user3159623
user3159623

Reputation: 75

java.io.IOException: Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, recieved org.apache.hadoop.io.Text

I have configured Mapper, reducer classes plus map output key value classes in main block. I do not understand what is wrong with the code which throws an error Type mismatch in value from map: expected org.apache.hadoop.io.IntWritable, recieved org.apache.hadoop.io.Text Can anyone please help? Thank you.

Code is:

import java.io.IOException;
import java.lang.String;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.util.StringTokenizer;

public class Alphabet {

    public static class AlphabetMapper
    extends Mapper<IntWritable, Text, LongWritable, IntWritable>{
        private Text word = new Text();

        public void map(LongWritable key, Text value, Context context)
                throws IOException, InterruptedException{
                    String line = value.toString();

                      StringTokenizer tokenizer = new StringTokenizer(line);

                      while (tokenizer.hasMoreTokens()) {
                        word.set(tokenizer.nextToken());
                        context.write( new LongWritable(word.getLength()), new IntWritable(1)  );
                      }

                }

    }
    public static class AlphabetReducer
    extends Reducer<LongWritable, IntWritable, LongWritable, IntWritable>{
    public void reduce(LongWritable key, Iterable<IntWritable> values, Context context) 
    throws IOException, InterruptedException {

              int sum = 0;

              for (IntWritable val : values) {                  
                  sum += val.get();
              }

              context.write( key, new IntWritable(sum) );
}

    }
public static void main(String[] args) throws Exception {

        if (args.length!=2){
            System.err.println("Usage:Alphabet <input path> <output path>");
            System.exit(-1);
        }

        Job job =new Job();
        job.setJarByClass(Alphabet.class);
        job.setJobName("Word Char Count");

        FileInputFormat.addInputPath(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.setMapperClass(AlphabetMapper.class);
        job.setReducerClass(AlphabetReducer.class);

        job.setMapOutputKeyClass( LongWritable.class );
            job.setMapOutputValueClass( IntWritable.class );

        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(IntWritable.class);

        System.exit(job.waitForCompletion(true)?0:1);

    }

}

Upvotes: 1

Views: 3985

Answers (1)

Jijo
Jijo

Reputation: 610

If you are using FileInputFormat the default input to mapper is LongWritable and Text

Upvotes: 2

Related Questions