Yes error changed when i give it on command line.
conf/spark-defaults.confspark.tachyonStore.url tachyon://ri:19998
conf/spark-env.shexport SPARK_WORKER_DIR=/home/guptapu/spark-1.3.0-bin-hadoop2.3
export SPARK_CLASSPATH=/home/guptapu/tachyon-0.5.0/core/target/tachyon-0.5.0-jar-with-dependencies.jar
export SPARK_MASTER_IP=192.168.140.254
I tried giving ip to rule out dns issue earlier when it wasnt connecting.
logs:15/05/04 11:46:55 WARN component.AbstractLifeCycle: FAILED org.spark-project.jetty.server.Server@68db7c81: java.net.BindException: Address already in use
java.net.BindException: Address already in use
at sun.nio.ch.Net.bind0(Native Method)
at sun.nio.ch.Net.bind(Net.java:444)
at sun.nio.ch.Net.bind(Net.java:436)
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
at org.spark-project.jetty.server.nio.SelectChannelConnector.open(SelectChannelConnector.java:187)
at org.spark-project.jetty.server.AbstractConnector.doStart(AbstractConnector.java:316)
at org.spark-project.jetty.server.nio.SelectChannelConnector.doStart(SelectChannelConnector.java:265)
at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
at org.spark-project.jetty.server.Server.doStart(Server.java:293)
at org.spark-project.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:64)
at org.apache.spark.ui.JettyUtils$.org$apache$spark$ui$JettyUtils$$connect$1(JettyUtils.scala:199)
at org.apache.spark.ui.JettyUtils$$anonfun$2.apply(JettyUtils.scala:209)
at org.apache.spark.ui.JettyUtils$$anonfun$2.apply(JettyUtils.scala:209)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1832)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1823)
at org.apache.spark.ui.JettyUtils$.startJettyServer(JettyUtils.scala:209)
at org.apache.spark.ui.WebUI.bind(WebUI.scala:102)
at org.apache.spark.SparkContext$$anonfun$12.apply(SparkContext.scala:307)
at org.apache.spark.SparkContext$$anonfun$12.apply(SparkContext.scala:307)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:307)
at org.apache.spark.examples.SparkTachyonPi$.main(SparkTachyonPi.scala:32)
at org.apache.spark.examples.SparkTachyonPi.main(SparkTachyonPi.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
15/05/04 11:46:55 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}
15/05/04 11:46:55 WARN util.Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
15/05/04 11:46:55 INFO server.Server: jetty-8.y.z-SNAPSHOT
15/05/04 11:46:55 INFO server.AbstractConnector: Started
SelectChann...@0.0.0.0:404115/05/04 11:46:55 INFO util.Utils: Successfully started service 'SparkUI' on port 4041.
15/05/04 11:46:55 INFO ui.SparkUI: Started SparkUI at
http://ri:404115/05/04 11:46:57 INFO spark.SparkContext: Added JAR file:/home/guptapu/spark-1.3.0-bin-hadoop2.3/lib/spark-examples-1.3.0-hadoop2.3.0.jar at
http://192.168.140.254:57307/jars/spark-examples-1.3.0-hadoop2.3.0.jar with timestamp 1430754417664
15/05/04 11:46:57 INFO executor.Executor: Starting executor ID <driver> on host localhost
15/05/04 11:46:57 INFO util.AkkaUtils: Connecting to HeartbeatReceiver: akka.tcp://sparkDriver@ri:51041/user/HeartbeatReceiver
15/05/04 11:46:57 INFO netty.NettyBlockTransferService: Server created on 33815
15/05/04 11:46:57 INFO storage.BlockManagerMaster: Trying to register BlockManager
15/05/04 11:46:57 INFO storage.BlockManagerMasterActor: Registering block manager localhost:33815 with 265.4 MB RAM, BlockManagerId(<driver>, localhost, 33815)
15/05/04 11:46:57 INFO storage.BlockManagerMaster: Registered BlockManager