700字范文,内容丰富有趣,生活中的好帮手!
700字范文 > MapReduce例子——找出QQ共同好友

MapReduce例子——找出QQ共同好友

时间:2024-05-09 14:44:38

相关推荐

MapReduce例子——找出QQ共同好友

///\\\\\\\\

fri.txt 如下: person: friend1, friend2, friend3, friend4, …..

A:B,C,D,F,E,OB:A,C,E,KC:F,A,D,ID:A,E,F,LE:B,C,D,M,LF:A,B,C,D,E,O,MG:A,C,D,E,FH:A,C,D,E,OI:A,OJ:B,OK:A,C,DL:D,E,FM:E,F,GO:A,H,I,J

\\\\\\

import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.mapreduce.Job;public class friends {static class FriendMapper extends Mapper<LongWritable, Text, Text, Text>{ @Overrideprotected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)throws IOException, InterruptedException {String line = value.toString();String[] person_friends = line.split(":");String person = person_friends[0];String friends = person_friends[1];for(String friend:friends.split(",")) {context.write(new Text(friend), new Text(person)); } } } static class FriendsReducer extends Reducer<Text, Text, Text, Text>{@Overrideprotected void reduce(Text friend, Iterable<Text> persons, Context context)throws IOException, InterruptedException {StringBuffer sb = new StringBuffer();for(Text person:persons) { sb.append(person).append(",");} context.write(friend, new Text(sb.toString())); } } public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException { Configuration conf = new Configuration();Job job = Job.getInstance(conf);job.setJarByClass(friends.class);job.setOutputKeyClass(Text.class);job.setOutputValueClass(Text.class); job.setMapperClass(FriendMapper.class);job.setReducerClass(FriendsReducer.class);// FileInputFormat.setInputPaths(job, new Path("hdfs://Master:9000/data/demon/friends/input"));// FileOutputFormat.setOutputPath(job, new Path("hdfs://Master:9000//data/demon/friends/output"));FileInputFormat.setInputPaths(job, new Path("/home/hadoop/examples/friends/input"));FileOutputFormat.setOutputPath(job, new Path("/home/hadoop/examples/friends/out_1"));job.waitForCompletion(true); }}

//\\\\

得到结果如下: friend: person1, person2, person3, ….

A I,K,C,B,G,F,H,O,D,B A,F,J,E,C A,E,B,H,F,G,K,D G,C,K,A,L,F,E,H,E G,M,L,H,A,F,B,D,F L,M,D,C,G,A,G M,H O,I O,C,J O,K B,L D,E,M E,F,O A,H,I,J,F,

/\\\\\

///\\\\

import java.io.IOException;import java.util.Arrays;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.mapreduce.Job;public class FriendStepTo {static class FriendToMapper extends Mapper<LongWritable, Text, Text, Text>{ @Overrideprotected void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, Text>.Context context)throws IOException, InterruptedException {String line = value.toString();String[] friend_person = line.split("\t");String friend = friend_person[0];String[] persons = friend_person[1].split(",");Arrays.sort(persons);for(int i=0; i<persons.length-2; i++) {for(int j=i+1; j<persons.length-1; j++) {context.write(new Text(persons[i]+"-" +persons[j]), new Text(friend)); } } } } static class FriendsToReducer extends Reducer<Text, Text, Text, Text>{@Overrideprotected void reduce(Text person_person, Iterable<Text> friends, Context context)throws IOException, InterruptedException {StringBuffer sb = new StringBuffer();for(Text friend:friends) { sb.append(friend).append(" ");} context.write(person_person, new Text(sb.toString())); } } public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {Configuration conf = new Configuration();Job job = Job.getInstance(conf);job.setJarByClass(FriendStepTo.class);job.setOutputKeyClass(Text.class);job.setOutputValueClass(Text.class); job.setMapperClass(FriendToMapper.class);job.setReducerClass(FriendsToReducer.class); // FileInputFormat.setInputPaths(job, new Path("hdfs://Master:9000/data/demon/friends/input"));// FileOutputFormat.setOutputPath(job, new Path("hdfs://Master:9000//data/demon/friends/output"));FileInputFormat.setInputPaths(job, new Path("/home/hadoop/examples/friends/out_1"));FileOutputFormat.setOutputPath(job, new Path("/home/hadoop/examples/friends/out_2")); job.waitForCompletion(true); }}

///\\\\\\\\\\\

得到的结果如下: person1 - person2 : friend1, friend2 ,…..

A-B C E A-C F D A-D E F A-E B C D A-F C D B E O A-G D E F C A-H E O C D A-I O A-K D A-L F E B-C A B-D E A B-E C B-F E A C B-G C E A B-H E C A B-I A B-K A B-L E C-D F A C-E D C-F D A C-G F A D C-H A D C-I A C-K D A C-L F D-F E A D-G A E F D-H A E D-I A D-K A D-L F E E-F C D B E-G D C E-H D C E-K D F-G C E D A F-H C A D E O F-I A O F-K D A F-L E G-H D E C A G-I A G-K A D G-L F E H-I A O H-K A D H-L E I-K A

本内容不代表本网观点和政治立场,如有侵犯你的权益请联系我们处理。
网友评论
网友评论仅供其表达个人看法,并不表明网站立场。