首页 文章

Hadoop facebook共同使用mapreduce的朋友

提问于
浏览
0

我在hadoop(Java版)中尝试了mapreduce程序,从json文件中找到共同的朋友列表 . json文件内容具有以下模式:

{"name":"abc","id":123} [{"name":"xyz","id":124},{"name":"def","id":125},{"name":"cxf","id":155}]
{"name":"cxf","id":155} [{"name":"xyz","id":124},{"name":"abc","id":123},{"name":"yyy","id":129}]

模式解释如下:

朋友json tab由相关朋友json的数组分隔

因此abc有xyz,def和cxf作为朋友cxf有xyz abc和yyy为朋友 .

鉴于上述情况,abc和cxf之间的共同朋友是xyz .

试图通过创建自定义可写入来使用mapreduce实现相同的功能,映射器发出以下键值,键是一对朋友,值是键中第一个朋友的相关朋友(即朋友对)

K->V
(abc,xyz) -> [xyz,def,cxf]
(abc,def) -> [xyz,def,cxf]
(abc,cxf) -> [xyz,def,cxf]
(cxf,xyz) -> [xyz,abc,yyy]
(cxf,abc) -> [xyz,abc,yyy]
(cxf,yyy) -> [xyz,abc,yyy]

这里的关键实际上是一个自定义可写,创建了一个扩展WritableComparable的类,我已经重写了compareTo方法,这样这些对(a,b)和(b,a)是相同的 . 但我面临的问题是没有为所有对的组合调用compareTo方法,因此reducer逻辑失败 .

基于上面的例子,映射器发射了6个K,V对 . 但是compareTo只被调用了5次key1.compareTo(key2),key2.compareTo(key3),key3.compareTo(key4),key4.compareTo(key5),key5.compareTo(key6) .

知道为什么会这样吗?

下面是根据f11ler建议的逻辑的代码

Driver class :

package com.facebook.updated;

import java.io.IOException;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Logger;

public class FacebookMain extends Configured implements Tool

{

    Logger logger = Logger.getLogger(FacebookMain.class);

    public static void main(String[] args) throws Exception {
        System.exit(ToolRunner.run(new FacebookMain(), args));

    }

    @Override
    public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        logger.info("Running======>");
        Job job = Job.getInstance();

        job.setJarByClass(FacebookMain.class);
        job.setJobName("FBApp");

        job.setMapOutputKeyClass(Friend.class);
        job.setMapOutputValueClass(Friend.class);

        job.setOutputKeyClass(FriendPair.class);
        job.setOutputValueClass(Friend.class);

        job.setMapperClass(FacebookMapper.class);
        job.setReducerClass(FacebookReducer.class);

        job.setInputFormatClass(org.apache.hadoop.mapreduce.lib.input.TextInputFormat.class);
        job.setOutputFormatClass(SequenceFileOutputFormat.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        boolean val = job.waitForCompletion(true);

        return val ? 0 : 1;

    }

}

The customWritables (used to represent a friend and friendpair)

package com.facebook.updated;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import lombok.Getter;
import lombok.Setter;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.log4j.Logger;

@Getter
@Setter
public class Friend implements WritableComparable<Friend> {

    Logger logger = Logger.getLogger(Friend.class);

    private IntWritable id;
    private Text name;

    public Friend() {
        this.id = new IntWritable();
        this.name = new Text();
    }

    @Override
    public int compareTo(Friend arg0) {
        int val = getId().compareTo(arg0.getId());
        logger.info("compareTo Friend ======> " + arg0 + " and " + this + " compare is " + val);
        return val;
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        id.readFields(in);
        name.readFields(in);
    }

    @Override
    public void write(DataOutput out) throws IOException {
        id.write(out);
        name.write(out);
    }

    @Override
    public boolean equals(Object obj) {
        Friend f2 = (Friend) obj;
        boolean val = this.getId().equals(f2.getId());
        //logger.info("equals Friend ======> " + obj + " and " + this);
        return val;
    }

    @Override
    public String toString() {
        return id + ":" + name + " ";
    }
}

package com.facebook.updated;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import lombok.Getter;
import lombok.Setter;

import org.apache.hadoop.io.WritableComparable;
import org.apache.log4j.Logger;

@Getter
@Setter
public class FriendPair implements WritableComparable<FriendPair> {

    Logger logger = Logger.getLogger(FriendPair.class);

    private Friend first;
    private Friend second;

    public FriendPair() {
        this.first = new Friend();
        this.second = new Friend();
    }

    public FriendPair(Friend f1, Friend f2) {
        this.first = f1;
        this.second = f2;
    }

    @Override
    public int compareTo(FriendPair o) {

        logger.info("compareTo FriendPair ======> " + o + " and " + this);
        FriendPair pair2 = o;
        int cmp = -1;

        if (getFirst().compareTo(pair2.getFirst()) == 0 || getFirst().compareTo(pair2.getSecond()) == 0) {
            cmp = 0;
        }
        if (cmp != 0) {
            // logger.info("compareTo FriendPair ======> " + o + " and " + this
            // + " comparison is " + cmp);
            return cmp;
        }
        cmp = -1;
        if (getSecond().compareTo(pair2.getFirst()) == 0 || getSecond().compareTo(pair2.getSecond()) == 0) {
            cmp = 0;
        }

        // logger.info("compareTo FriendPair ======> " + o + " and " + this +
        // " comparison is " + cmp);

        // logger.info("getFirst() " + getFirst());
        // logger.info("pair2.getFirst() " + pair2.getFirst());
        // logger.info("getFirst().compareTo(pair2.getFirst()) " +
        // getFirst().compareTo(pair2.getFirst()));
        // logger.info("getFirst().compareTo(pair2.getSecond()) " +
        // getFirst().compareTo(pair2.getSecond()));
        // logger.info("getSecond().compareTo(pair2.getFirst()) " +
        // getSecond().compareTo(pair2.getFirst()));
        // logger.info("getSecond().compareTo(pair2.getSecond()) " +
        // getSecond().compareTo(pair2.getSecond()));
        // logger.info("pair2.getSecond() " + pair2.getSecond());
        // logger.info("getSecond() " + getSecond());
        // logger.info("pair2.getFirst() " + pair2.getFirst());
        // logger.info("pair2.getSecond() " + pair2.getSecond());

        return cmp;
    }

    @Override
    public boolean equals(Object obj) {

        FriendPair pair1 = this;
        FriendPair pair2 = (FriendPair) obj;

        boolean eq = false;

        logger.info("equals FriendPair ======> " + obj + " and " + this);

        if (pair1.getFirst().equals(pair2.getFirst()) || pair1.getFirst().equals(pair2.getSecond()))
            eq = true;

        if (!eq) {
            // logger.info("equals FriendPair ======> " + obj + " and " + this +
            // " equality is " + eq);
            return false;
        }
        if (pair1.getSecond().equals(pair2.getFirst()) || pair1.getSecond().equals(pair2.getSecond()))
            eq = true;

        // logger.info("equals FriendPair ======> " + obj + " and " + this +
        // " equality is " + eq);

        return eq;
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        first.readFields(in);
        second.readFields(in);
    }

    @Override
    public void write(DataOutput out) throws IOException {
        first.write(out);
        second.write(out);
    }

    @Override
    public String toString() {
        return "[" + first + ";" + second + "]";
    }

    @Override
    public int hashCode() {
        logger.info("hashCode FriendPair ======> " + this);
        return first.getId().hashCode() + second.getId().hashCode();
    }
}

Mapper and Reducer

package com.facebook.updated;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.log4j.Logger;

import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.util.JSON;

public class FacebookMapper extends Mapper<LongWritable, Text, Friend, Friend> {

    Logger log = Logger.getLogger(FacebookMapper.class);

    @Override
    protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Friend, Friend>.Context context)
            throws IOException, InterruptedException {

        String line = value.toString();
        StringTokenizer st = new StringTokenizer(line, "\t");
        String person = st.nextToken();
        String friends = st.nextToken();

        BasicDBObject personObj = (BasicDBObject) JSON.parse(person);
        BasicDBList friendsList = (BasicDBList) JSON.parse(friends);

        List<Friend> frndJavaList = new ArrayList<>();

        for (Object frndObj : friendsList) {
            frndJavaList.add(getFriend((BasicDBObject) frndObj));
        }

        Friend frnd = getFriend(personObj);
        Friend[] array = frndJavaList.toArray(new Friend[frndJavaList.size()]);
        for (Friend f : array) {
            log.info("Map output is " + f + " and " + frnd);
            context.write(f, frnd);
        }
    }

    private static Friend getFriend(BasicDBObject personObj) {
        Friend frnd = new Friend();
        frnd.setId(new IntWritable(personObj.getInt("id")));
        frnd.setName(new Text(personObj.getString("name")));
        frnd.setHomeTown(new Text(personObj.getString("homeTown")));
        return frnd;
    }
}

package com.facebook.updated;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.mapreduce.Reducer;
import org.apache.log4j.Logger;

public class FacebookReducer extends Reducer<Friend, Friend, FriendPair, Friend> {

    Logger log = Logger.getLogger(FacebookReducer.class);

    @Override
    protected void reduce(Friend friend, Iterable<Friend> vals,
            Reducer<Friend, Friend, FriendPair, Friend>.Context context) throws IOException, InterruptedException {
        List<Friend> friends = new ArrayList<>();
        for (Friend frnd : vals) {
            friends.add(frnd);
        }
        log.info("Reducer output is " + friend + " and values are " + friends);
        if (friends.size() == 2) {
            FriendPair key = new FriendPair(friends.get(0), friends.get(1));
            context.write(key, friend);
        } else {
            //log.info("Size of friends is not 2 key is " + friend + " and values are " + friends);
        }

    }
}

Input json file containing 2 lines

{"name":"abc","id":123} [{"name":"xyz","id":124},{"name":"def","id":125},{"name":"cxf","id":155}]
{"name":"cxf","id":155} [{"name":"xyz","id":124},{"name":"abc","id":123},{"name":"yyy","id":129}]

Output of reducer (abc,abc) - > xyz

1 回答

  • 1

    排序需要 compareTo 方法,这种关系应该是可传递的 . 这意味着如果a> b和b> c则a> c . 您的实施可能并非如此 .

    为什么在mapper中生成这种记录?如果“成为朋友”是对称关系,您只需使用此逻辑(伪代码)执行仅限映射器的作业:

    for(int i = 0; i < values.length; ++i)
        for(int j = 0; j < values.length; ++j)
            if (i ==j)
                continue
            emmit (values[i], values[j]), key
    

    Update: 如果这不是对称的(这意味着"xyz has friend abc"不是来自"abc has friend xyz")那么我们需要反向记录:

    制图员:

    for(int i = 0; i < values.length; ++i)
        emmit values[i], key
    

    Reducer(与之前的mapper相同):

    for(int i = 0; i < values.length; ++i)
        for(int j = 0; j < values.length; ++j)
            if (i ==j)
                continue
            emmit (values[i], values[j]), key
    

    Update2:

    让我们看看这个算法如何与你的例子一起工作:

    映射器的结果:

    xyz -> abc
    def -> abc
    cxf -> abc
    xyz -> cxf
    abc -> cxf
    yyy -> cxf
    

    Mapreduce wiil将这个值按键组合,所以reducer的输入:

    xyz -> [abc,cxf]
    def -> [abc]
    cxf -> [abc]
    abc -> [cxf]
    yyy -> [cxf]
    

    在reducer中,我们按值进行嵌套循环,但跳过与self的比较 . 结果:

    (abc, cxf) -> xyz
    

    这就是我们想要得到的 .

相关问题