user2931444
user2931444

Reputation: 131

hadoop 2.6.0 and avro

i am trying to run map reduce using avro but after trying all options suggested by folks on the web, i am still unable to get thru -

15/11/09 21:54:48 WARN mapred.LocalJobRunner: job_local1421922570_0001 java.lang.Exception: java.lang.NoSuchMethodError: org.apache.avro.generic.GenericData.createDatumWriter(Lorg/apache/avro/Schema;)Lorg/apache/avro/io/DatumWriter;

Below is the code ( from Hadoop definitive guide ) public class AvroGenericMapTemperature extends Configured implements Tool {

private static final Schema SCHEMA = new Schema.Parser().parse(
    "{" +
    " \"type\" : \"record\"," +
    " \"name\" : \"WeatherRecord\"," +
    " \"doc\" : \"A weather reading\"," +
    " \"fields\": [" +
    "    {\"name\": \"year\", \"type\": \"int\" }," +
    "    {\"name\": \"temperature\", \"type\": \"int\" }," +
    "    {\"name\": \"stationId\", \"type\": \"string\" }" +
    "  ]" +
    "}"
);

public static class MaxTemperatureMapper extends 
    Mapper<LongWritable,  Text, 
    AvroKey<Integer>,AvroValue<GenericRecord> > {
    private NcdcRecordParser parser = new NcdcRecordParser();
    private GenericRecord record = new GenericData.Record(SCHEMA);

    @Override
    protected void map(
        LongWritable key,
        Text value,
        Mapper<LongWritable, Text, AvroKey<Integer>, 
        AvroValue<GenericRecord>>.Context context)
        throws IOException, InterruptedException {
        parser.parse(value.toString());

        if( parser.isValidTemperature() ) {
            record.put("year", parser.getYearInt());
            record.put("temperature", parser.getAirTemperature());
            record.put("stationId", parser.getStationId());
            context.write(new AvroKey<Integer>(parser.getYearInt()), 
            new AvroValue<GenericRecord>(record));
        }
    }
}

public static class MaxTemperatureReducer extends 
Reducer<AvroKey<Integer>, AvroKey<GenericRecord>, 
AvroKey<GenericRecord>, NullWritable> {

    @Override
    protected void reduce(
        AvroKey<Integer> key,
        Iterable<AvroKey<GenericRecord>> values,
        Reducer<AvroKey<Integer>, AvroKey<GenericRecord>, 
        AvroKey<GenericRecord>, NullWritable>.Context context)
        throws IOException, InterruptedException {
        GenericRecord max = null;

        for ( AvroKey<GenericRecord> value : values) {
            GenericRecord record = value.datum();
            if ( max == null ||
                 (Integer)record.get("temperature") > (Integer) 
                 max.get("termperature") ) {
                 max = newWeatherRecord(record);
            }
        }
        context.write(new AvroKey<GenericRecord>(max), 
        NullWritable.get());
    }

    private GenericRecord newWeatherRecord(GenericRecord value) {
        GenericRecord record = new GenericData.Record(SCHEMA);
        record.put("year", value.get("year"));
        record.put("temperature", value.get("temperature"));
        record.put("stationId", value.get("stationId"));
        return record;
    }
}

public int run(String[] args) throws Exception {
    // TODO Auto-generated method stub
    Job job = new Job( getConf(), "Avro mapreduce");
    job.setJarByClass(getClass());


    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    AvroJob.setMapOutputKeySchema(job, Schema.create(Schema.Type.INT));
    AvroJob.setMapOutputValueSchema(job, SCHEMA);
    AvroJob.setOutputKeySchema(job, SCHEMA);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(AvroKeyOutputFormat.class);

    job.setMapperClass(MaxTemperatureMapper.class);
    job.setReducerClass(MaxTemperatureReducer.class);

    job.waitForCompletion(true);
    return 0;
}

public static void main(String[] args) throws Exception {
    int exitcode = 
        ToolRunner.run(new AvroGenericMapTemperature(), args);
}

}; i have explicitly set all avro jars in HADOOP_CLASSPATH ( avro, avro-mapred, avro-tools etc for 1.7.5 version ) and have also specified -D mapreduce.job.user.classpath.first=true while running the above, but i keep getting the same error.... i know that default avro with hadoop 2.6.0 is 1.7.4 and i even tried that version for avro , but no luck .... any help would be huge

Upvotes: 1

Views: 238

Answers (1)

Princey James
Princey James

Reputation: 738

The generic parameters of Reducer are as follows: org.apache.hadoop.mapreduce.Reducer

The first and the second are KeyIn and Value in respectively. So you can try changing public static class MaxTemperatureReducer extends Reducer, AvroKey, AvroKey, NullWritable> {

@Override
protected void reduce(
    AvroKey<Integer> key,
    Iterable<AvroKey<GenericRecord>> values,
    Reducer<AvroKey<Integer>, AvroKey<GenericRecord>, 
    AvroKey<GenericRecord>, NullWritable>.Context context)
    throws IOException, InterruptedException {

to public static class MaxTemperatureReducer extends Reducer, AvroValue, AvroKey, NullWritable> {

@Override
protected void reduce(
    AvroKey<Integer> key,
    Iterable<AvroKey<GenericRecord>> values,
    Reducer<AvroKey<Integer>, AvroValue<GenericRecord>, 
    AvroKey<GenericRecord>, NullWritable>.Context context)
    throws IOException, InterruptedException {

Since in the mapper you are writing AvroKey as Key and AvroValue as Value.

Upvotes: 1

Related Questions