import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.io.*;
public class WordCount
{
    public static void main(String []args) throws Exception
    {
        if(args.length!=2)
        {
            System.err.println("Invalid command");
            System.exit(0);
        }
        Configuration conf = new Configuration();
        Job job = new Job(conf, "WordCount");
        job.setJarByClass(WordCount.class);
        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));
        job.setMapperClass(WordMapper.class);
        job.setReducerClass(WordReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
        job.waitForCompletion(true);
    }
class WordMapper extends Mapper<LongWritable, Text, Text, IntWritable>
    {
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException
        {
            String line = value.toString();
            String[] word = line.split(" ");
            for(String i : word)
            {
                if(i.length()>0)
                {
                    context.write(new Text(i), new IntWritable(1));
                }
            }
        }
    }
    class WordReducer extends Reducer<Text, IntWritable, Text, IntWritable>
    {
        public void reduce(Text key, Iterable<IntWritable> value, Context context) throws IOException, InterruptedException
        {
            int count =0;
            for(IntWritable i: value)
            {
                count = count+ i.get();
            }
            context.write(key, new IntWritable(count));
        }
    }
}