user513164
user513164

Reputation: 1868

Hadoop pipes problem

I have configured hadoop in pseudo-distributed mode (single -node cluster) on my ubuntu 10.04.

I have a problem in running hadoop pipes code my code is following:

#include "/home/hadoop/project/hadoop-0.20.2/c++/Linux-amd64-64/include/hadoop/Pipes.hh"
#include "/home/hadoop/project/hadoop-0.20.2/c++/Linux-amd64-64/include/hadoop/TemplateFactory.hh"
#include "/home/hadoop/project/hadoop-0.20.2/c++/Linux-amd64-64/include/hadoop/StringUtils.hh"


#include "/home/hadoop/project/hadoop-0.20.2/src/c++/libhdfs/hdfs.h"

const std::string WORDCOUNT = "WORDCOUNT";
const std::string INPUT_WORDS = "INPUT_WORDS";
const std::string OUTPUT_WORDS = "OUTPUT_WORDS";
//hdfs fs;
//hdfs writefile;
 hdfsFS fs;
 hdfsFile writefile;
const char* writepath="/temp/mest";
class WordCountMap: public HadoopPipes::Mapper {
public:
  HadoopPipes::TaskContext::Counter* inputWords;

  WordCountMap(HadoopPipes::TaskContext& context) {
   fs = hdfsConnect("192.168.0.133", 54310);
inputWords = context.getCounter(WORDCOUNT, INPUT_WORDS);
  }

~WordCountMap()
{
hdfsCloseFile(fs, writefile);
}

  void map(HadoopPipes::MapContext& context)
 {
     hdfsFile writefile = hdfsOpenFile(fs, writepath, O_WRONLY|O_CREAT, 0, 0, 0);    

std::vector<std::string> words = 
      HadoopUtils::splitString(context.getInputValue(), " ");
    for(unsigned int i=0; i < words.size(); ++i) {
      context.emit(words[i], "1");
    }
    context.incrementCounter(inputWords, words.size());
  }
};

class WordCountReduce: public HadoopPipes::Reducer {
public:
  HadoopPipes::TaskContext::Counter* outputWords;

  WordCountReduce(HadoopPipes::TaskContext& context) {
    outputWords = context.getCounter(WORDCOUNT, OUTPUT_WORDS);
  }

  void reduce(HadoopPipes::ReduceContext& context) {
    int sum = 0;
    while (context.nextValue()) {
      sum += HadoopUtils::toInt(context.getInputValue());
    }
    context.emit(context.getInputKey(), HadoopUtils::toString(sum));
    context.incrementCounter(outputWords, 1); 
  }
};

int main(int argc, char *argv[]) {
  return HadoopPipes::runTask(HadoopPipes::TemplateFactory<WordCountMap, 
                              WordCountReduce>());
}

I compiled it it compiled successfully.

I run it with following command:

bin/hadoop pipes -D java.pipes.recordreader=true -D java.pipes.recordwriter=true -input gutenberg -output manish_gut2 -program bin/cat

but when i run it it shows following problems:

11/05/04 16:13:12 WARN mapred.JobClient: No job jar file set.  User classes may not be found. See JobConf(Class) or JobConf#setJar(String).
11/05/04 16:13:12 INFO mapred.FileInputFormat: Total input paths to process : 3
11/05/04 16:13:13 INFO mapred.JobClient: Running job: job_201105041611_0001
11/05/04 16:13:14 INFO mapred.JobClient:  map 0% reduce 0%
11/05/04 16:13:24 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000000_0, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000000_0: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:24 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000001_0, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000001_0: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:29 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000001_1, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000001_1: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:29 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000000_1, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000000_1: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:35 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000000_2, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000000_2: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:35 INFO mapred.JobClient: Task Id : attempt_201105041611_0001_m_000001_2, Status : FAILED
java.io.IOException: pipe child exception
    at org.apache.hadoop.mapred.pipes.Application.abort(Application.java:151)
    at org.apache.hadoop.mapred.pipes.PipesMapRunner.run(PipesMapRunner.java:101)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:358)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:307)
    at org.apache.hadoop.mapred.Child.main(Child.java:170)
Caused by: java.io.EOFException
    at java.io.DataInputStream.readByte(DataInputStream.java:250)
    at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:298)
    at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:319)
    at org.apache.hadoop.mapred.pipes.BinaryProtocol$UplinkReaderThread.run(BinaryProtocol.java:114)

attempt_201105041611_0001_m_000001_2: Hadoop Pipes Exception: RecordReader not defined at /export/crawlspace/chris/work/branch-0.20/src/c++/pipes/impl/HadoopPipes.cc:692 in virtual void HadoopPipes::TaskContextImpl::runMap(std::string, int, bool)
11/05/04 16:13:44 INFO mapred.JobClient: Job complete: job_201105041611_0001
11/05/04 16:13:44 INFO mapred.JobClient: Counters: 3
11/05/04 16:13:44 INFO mapred.JobClient:   Job Counters 
11/05/04 16:13:44 INFO mapred.JobClient:     Launched map tasks=8
11/05/04 16:13:44 INFO mapred.JobClient:     Data-local map tasks=8
11/05/04 16:13:44 INFO mapred.JobClient:     Failed map tasks=1
Exception in thread "main" java.io.IOException: Job failed!
    at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1252)
    at org.apache.hadoop.mapred.pipes.Submitter.runJob(Submitter.java:248)
    at org.apache.hadoop.mapred.pipes.Submitter.run(Submitter.java:479)
    at org.apache.hadoop.mapred.pipes.Submitter.main(Submitter.java:494)

I dont know what i'm doing wrong ? how can i run this program ? how to resolve these error?

Upvotes: 1

Views: 3773

Answers (2)

Mark
Mark

Reputation: 11

The problem here is as @Nija describes.

hadoop.pipes.java.recordreader is not being specified, and defaults to false. That means it's expecting you to have a RecordReader in your C++ code. And you don't have one, and thus it can't be found.

Upvotes: 1

QuinnG
QuinnG

Reputation: 6424

I would start with what they do here http://wiki.apache.org/hadoop/C%2B%2BWordCount, get it functional and then expand it to your implementation.

You can also use that page to see differences in your implementation and theirs and try to solve the problem that way. One difference I notice is your recordreader and recordwriter classes. You have java.pipes.recordreader and java.pipes.recordwriter while the example at the link uses hadoop.pipes.java.recordreader and hadoop.pipes.java.recordwriter.

I haven't used hadoop in this method before, so this is just me finding a similar thing and noticing differences. :)

Hopefully this will point you in the correct direction

Upvotes: 2

Related Questions