ddsprasad
ddsprasad

Reputation: 82

ERROR Executor: Exception in task 1.0 in stage 1.0 (TID 1) java.net.NoRouteToHostException: No route to host

I tried to run a word count spark app every time I get this error please help, following is the wordcount.scala file and after sbt package I ran the spark-submit command

package main

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf

object WordCount {
  def main(args: Array[String]) {

    val conf = new SparkConf().setAppName("Word Count")
    val sc = new SparkContext(conf)

    val textfile = sc.textFile("file:///usr/local/spark/README.md")
    val tokenizeddata = textfile.flatMap(line => line.split(" "))
    val countprep = tokenizeddata.map(word => (word,1))
    val counts = countprep.reduceByKey((accumvalue,newvalue)=>(accumvalue+newvalue))
    val sortedcount = counts.sortBy(kvpair=>kvpair._2,false)
    sortedcount.saveAsTextFile("file:///usr/local/wordcount")
  }
}    

I ran the next command.

 bin/spark-submit --class "main.WordCount" --master "local[*]" "/home/hadoop/SparkApps/target/scala-2.10/word-count_2.10-1.0.jar"

Spark assembly has been built with Hive, including Datanucleus jars on classpath Java HotSpot(TM) 64-Bit Server VM warning:

ignoring option MaxPermSize=128m; support was removed in 8.0 15/11/28 07:38:51 ERROR Executor: Exception in task 1.0 in stage 1.0

(TID 1) java.net.NoRouteToHostException: No route to host at java.net.PlainSocketImpl.socketConnect(Native Method) at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:589) at sun.net.NetworkClient.doConnect(NetworkClient.java:175) at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) at sun.net.www.http.HttpClient.(HttpClient.java:211) at sun.net.www.http.HttpClient.New(HttpClient.java:308) at sun.net.www.http.HttpClient.New(HttpClient.java:326) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1169) at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1105) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:999) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:933) at org.apache.spark.util.Utils$.fetchFile(Utils.scala:375) at org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:325) at org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:323) at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772) at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98) at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98) at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:226) at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39) at scala.collection.mutable.HashMap.foreach(HashMap.scala:98) at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771) at org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:323) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:158) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745)

Upvotes: 0

Views: 4357

Answers (1)

Youngv
Youngv

Reputation: 1

Maybe you should add .setMaster("local")

Upvotes: 0

Related Questions