Filtering Columns Using MapReduce (Map Only)
we also offer , online and classroom trainings
we support in POC
author: Bharat (sree ram)
contact : 04042026071
_____________________________________________________________________________
author : halitics.blogspot.in (bharat)
input file : emp.txt
_____________________
101,amar,m,20000,hyd
102,amala,f,30000,pune
103,siva,m,40000,hyd
104,sivani,f,50000,hyd
105,hari,m,40000,pune
____________________
output expected:
only name, salary , and city fields to be written in output file.
amar,20000,hyd
amala,30000,pune
siva,40000,hyd
sivani,50000,hyd
hari,40000,pune
package my.map.red;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class FilterColumns{
public static class Map1 extends Mapper<LongWritable,Text,Text,Text> {
public void map(LongWritable k, Text v, Context con) throws IOException, InterruptedException{
String line=v.toString();
String[] words=line.split(",");
String newline = words[1]+","+ words[3]+","+words[4];
con.write(new Text(newline), new Text());
con.write(new Text(newline), new Text());
}
}
public static void main(String[] args) throws Exception {
Configuration c=new Configuration();
String[] files=new GenericOptionsParser(c,args).getRemainingArgs();
Path p1=new Path(files[0]);
Path p2=new Path(files[1]);
Job j = new Job(c,"FilterColumns");
j.setJarByClass(FilterColumns.class);
j.setMapperClass(Map1.class);
j.setNumReduceTasks(0);
j.setOutputKeyClass(Text.class);
j.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(j,p1);
FileOutputFormat.setOutputPath(j, p2);
FileOutputFormat.setOutputPath(j, p2);
System.exit(j.waitForCompletion(true) ? 0:1);
}
}
0 comments: