NoSuchMethodError in connecting Spark-shell with Cassandra

714 views
Skip to first unread message

Shyam Sarkar

unread,
Sep 22, 2015, 5:59:24 PM9/22/15
to DataStax Spark Connector for Apache Cassandra
Hello,

I am getting errors for executing methods in spark-cassandra-connector. Please see the steps below and suggest what should I do next :

=====================================================

sarkar@sarkar-D900C:~$ ps -efl | grep spark
0 S sarkar 11628 1 1 80 0 - 761289 futex_ 14:27 ? 00:00:06 /usr/bin/java -Dlog4j.configuration=file:///home/sarkar/spark/spark-1.3.0/conf/log4j.properties -cp /home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/sbin/../conf/:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar -Xms512m -Xmx512m -XX:MaxPermSize=256m org.apache.spark.deploy.master.Master --ip sarkar-D900C --port 7077 --webui-port 8080
0 S sarkar 12009 1 1 80 0 - 759489 futex_ 14:28 ? 00:00:05 /usr/lib/jvm/java-7-oracle/jre/bin/java -Dlog4j.configuration=file:///home/sarkar/spark/spark-1.3.0/conf/log4j.properties -cp /home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/sbin/../conf/:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar -Xms512m -Xmx512m -XX:MaxPermSize=256m org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://sarkar-D900C:7077
0 S sarkar 12104 1 1 80 0 - 759489 futex_ 14:28 ? 00:00:05 /usr/lib/jvm/java-7-oracle/jre/bin/java -Dlog4j.configuration=file:///home/sarkar/spark/spark-1.3.0/conf/log4j.properties -cp /home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/sbin/../conf/:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar -Xms512m -Xmx512m -XX:MaxPermSize=256m org.apache.spark.deploy.worker.Worker --webui-port 8082 spark://sarkar-D900C:7077
0 S sarkar 13693 13566 0 80 0 - 3394 pipe_w 14:34 pts/2 00:00:00 grep --color=auto spark
sarkar@sarkar-D900C:~$
sarkar@sarkar-D900C:~$ ./spark/spark-1.4.1-bin-hadoop2.6/sbin/stop-all.sh
sarkar@localhost's password:
localhost: stopping org.apache.spark.deploy.worker.Worker
localhost: stopping org.apache.spark.deploy.worker.Worker
stopping org.apache.spark.deploy.master.Master
sarkar@sarkar-D900C:~$
sarkar@sarkar-D900C:~$
sarkar@sarkar-D900C:~$ ./spark/spark-1.4.1-bin-hadoop2.6/bin/spark-shell --driver-class-path $(echo ./spark/spark-cassandra-connector/target/scala-2.11/*.jar ./spark/spark-1.4.1-bin-hadoop2.6/lib/* |sed 's/ /:/g')
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-examples-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.4.1
/_/

Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java 1.7.0_60)
Type in expressions to have them evaluated.
Type :help for more information.
Spark context available as sc.
SQL context available as sqlContext.

scala> history
<console>:20: error: not found: value history
history
^

scala>

scala> import com.datastax.spark.connector._
import com.datastax.spark.connector._

scala> import org.apache.spark.SparkContext
import org.apache.spark.SparkContext

scala> import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext._

scala> import org.apache.spark.SparkConf
import org.apache.spark.SparkConf

scala> sc.stop

scala> val conf = new SparkConf(true).set("spark.cassandra.connection.host", "127.0.0.1")
conf: org.apache.spark.SparkConf = org.apache.spark.SparkConf@7bdfb70a

scala> val sc = new SparkContext("local[2]", "test", conf)
sc: org.apache.spark.SparkContext = org.apache.spark.SparkContext@75c10c36

scala> val rdd = sc.cassandraTable("test", "kv")
rdd: com.datastax.spark.connector.rdd.CassandraTableScanRDD[com.datastax.spark.connector.CassandraRow] = CassandraTableScanRDD[0] at RDD at CassandraRDD.scala:18

scala> println(rdd.first)
java.lang.NoSuchMethodError: scala.runtime.ObjectRef.zero()Lscala/runtime/ObjectRef;
at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:150)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:150)
at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31)
at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56)
at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:81)
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)
at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241)
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:150)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217)
at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1255)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:286)
at org.apache.spark.rdd.RDD.take(RDD.scala:1250)
at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121)
at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122)
at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1290)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:286)
at org.apache.spark.rdd.RDD.first(RDD.scala:1289)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:39)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:41)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:43)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:45)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:47)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:49)
at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:51)
at $iwC$$iwC$$iwC$$iwC.<init>(<console>:53)
at $iwC$$iwC$$iwC.<init>(<console>:55)
at $iwC$$iwC.<init>(<console>:57)
at $iwC.<init>(<console>:59)
at <init>(<console>:61)
at .<init>(<console>:65)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:665)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:170)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:193)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)


scala> val rdd = sc.cassandraTable("test", "kv")
rdd: com.datastax.spark.connector.rdd.CassandraTableScanRDD[com.datastax.spark.connector.CassandraRow] = CassandraTableScanRDD[2] at RDD at CassandraRDD.scala:18

scala> val collection = sc.parallelize(Seq(("key3", 3), ("key4", 4)))
collection: org.apache.spark.rdd.RDD[(String, Int)] = ParallelCollectionRDD[3] at parallelize at <console>:31

scala> collection.saveToCassandra("test", "kv", SomeColumns("key", "value"))
java.lang.NoSuchMethodError: scala.reflect.api.JavaUniverse.runtimeMirror(Ljava/lang/ClassLoader;)Lscala/reflect/api/JavaUniverse$JavaMirror;
at com.datastax.spark.connector.mapper.ColumnMapper$.tuple2ColumnMapper(ColumnMapper.scala:57)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:39)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:41)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:43)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:45)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:47)
at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:49)
at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:51)
at $iwC$$iwC$$iwC$$iwC.<init>(<console>:53)
at $iwC$$iwC$$iwC.<init>(<console>:55)
at $iwC$$iwC.<init>(<console>:57)
at $iwC.<init>(<console>:59)
at <init>(<console>:61)
at .<init>(<console>:65)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1338)
at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:665)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:170)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:193)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)


scala>
=========================================================

Thanks,
SS

Piotr Kołaczkowski

unread,
Sep 22, 2015, 6:02:35 PM9/22/15
to spark-conn...@lists.datastax.com

Looks like you are using wrong Scala version for that connector version. You must use connector version matching the Scala version. Check your classpath.

To unsubscribe from this group and stop receiving emails from it, send an email to spark-connector-...@lists.datastax.com.

Shyam Sarkar

unread,
Sep 22, 2015, 6:11:57 PM9/22/15
to DataStax Spark Connector for Apache Cassandra
I am using Scala 2.11.4 version. I do not know why spark-shell identifies
scala 2.10.4.

See below:

sarkar@sarkar-D900C:~$ scala -version
Scala code runner version 2.11.4 -- Copyright 2002-2013, LAMP/EPFL

How to fix it ?

Thx.

Piotr Kołaczkowski

unread,
Sep 22, 2015, 6:16:57 PM9/22/15
to spark-conn...@lists.datastax.com

Check your classpath. Why do you think it would use your own scala installation?  It comes with its own.

Shyam Sarkar

unread,
Sep 22, 2015, 9:52:41 PM9/22/15
to DataStax Spark Connector for Apache Cassandra
I have following files in lib:

sarkar@sarkar-D900C:~$ ls ./spark/spark-1.4.1-bin-hadoop2.6/lib/
apache-cassandra-clientutil-2.2.1.jar joda-time-2.4.jar
apache-cassandra-thrift-2.2.1.jar libthrift-0.9.2.jar
cassandra-driver-core-2.2.0-rc2-SNAPSHOT-20150617-shaded.jar spark-1.4.1-yarn-shuffle.jar
datanucleus-api-jdo-3.2.6.jar spark-assembly-1.4.1-hadoop2.6.0.jar
datanucleus-core-3.2.10.jar spark-cassandra-connector-assembly-1.5.0-M1-SNAPSHOT.jar
datanucleus-rdbms-3.2.9.jar spark-examples-1.4.1-hadoop2.6.0.jar
guava-16.0.jar

I used following command to run spark-shell with driver-class-path:

./spark/spark-1.4.1-bin-hadoop2.6/bin/spark-shell --driver-class-path $(echo ./spark/spark-cassandra-connector/target/scala-2.11/*.jar ./spark/spark-1.4.1-bin-hadoop2.6/lib/* |sed 's/ /:/g')

I do not have anything in the path
./spark/spark-cassandra-connector/target/scala-2.11/*.jar

Any suggestions about what is missing ?
Regards,
SS

Etienne

unread,
Sep 23, 2015, 3:19:30 AM9/23/15
to spark-conn...@lists.datastax.com
Did you try https://github.com/crakjie/sbt-spark-plugin ?

Envoyé de mon iPhone

Artem Aliev

unread,
Sep 23, 2015, 4:28:35 AM9/23/15
to spark-conn...@lists.datastax.com
Spark distro is built with scala 2.10 and include scala 2.10 in assembly. Thus your spark-shell reported that it run on 2.10. 
So build spark by your self  http://spark.apache.org/docs/latest/building-spark.html#building-for-scala-211 or use scala_2.10 connector

Shyam Sarkar

unread,
Sep 23, 2015, 3:32:20 PM9/23/15
to DataStax Spark Connector for Apache Cassandra

I built spark connector with Scala 2.10 (spark 1.4.1 is already built with 2.10)
but I am getting now AbstractMethodError:

sarkar@sarkar-D900C:~$ ./spark/spark-1.4.1-bin-hadoop2.6/bin/spark-shell --driver-class-path $(echo ./spark/spark-cassandra-connector/target/scala-2.11/*.jar ./spark/spark-1.4.1-bin-hadoop2.6/lib/* |sed 's/ /:/g')
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-examples-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.4.1
/_/

Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java 1.7.0_60)
Type in expressions to have them evaluated.
Type :help for more information.
Spark context available as sc.
SQL context available as sqlContext.

scala> import com.datastax.spark.connector._
import com.datastax.spark.connector._

scala> import org.apache.spark.SparkContext
import org.apache.spark.SparkContext

scala> import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext._

scala> import org.apache.spark.SparkConf
import org.apache.spark.SparkConf

scala> sc.stop

scala> val conf = new SparkConf(true).set("spark.cassandra.connection.host", "127.0.0.1")

conf: org.apache.spark.SparkConf = org.apache.spark.SparkConf@19dad040

scala> val sc = new SparkContext("local[2]", "test", conf)

sc: org.apache.spark.SparkContext = org.apache.spark.SparkContext@7a4c0965

scala> val rdd = sc.cassandraTable("test", "kv")

rdd: com.datastax.spark.connector.rdd.CassandraTableScanRDD[com.datastax.spark.connector.CassandraRow] = CassandraTableScanRDD[0] at RDD at CassandraRDD.scala:15

scala> println(rdd.first)
java.lang.AbstractMethodError: com.datastax.spark.connector.cql.LocalNodeFirstLoadBalancingPolicy.close()V
at com.datastax.driver.core.Cluster$Manager.close(Cluster.java:1417)
at com.datastax.driver.core.Cluster$Manager.access$200(Cluster.java:1167)
at com.datastax.driver.core.Cluster.closeAsync(Cluster.java:461)
at com.datastax.driver.core.Cluster.close(Cluster.java:472)
at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:163)


scala>
======================================

Can I please get some help ?
Thanks,
SS

Artem Aliev

unread,
Sep 23, 2015, 5:04:57 PM9/23/15
to spark-conn...@lists.datastax.com
You need to use spark connector v 1.5 with C* 2.2.


SS

Message has been deleted

Shyam Sarkar

unread,
Sep 24, 2015, 1:17:46 AM9/24/15
to DataStax Spark Connector for Apache Cassandra
Hello,

I found following Connector version and relationships to Spark, Cassandra and Java Driver :

The connector project has several branches, each of which map into different supported versions of Spark and Cassandra. Refer to the compatibility table below which shows the major.minor version range supported between the connector, Spark, Cassandra, and the Cassandra Java driver:

Connector Spark Cassandra Cassandra Java Driver
1.5 1.5 2.1.5+ 2.1 (planned 2.2)
1.4 1.4 2.1.5+ 2.1
1.3 1.3 2.1.5+ 2.1
1.2 1.2 2.1, 2.0 2.1
1.1 1.1, 1.0 2.1, 2.0 2.1
1.0 1.0, 0.9 2.0 2.0

Can I download and build Spark-Cassandra-Connector 1.5 with all latest fixes for Cassandra Java Driver (2.2 planned already implemented !!) ?

Please suggest what exactly should I do.

Regards,
SS

Shyam Sarkar

unread,
Sep 24, 2015, 3:47:30 PM9/24/15
to DataStax Spark Connector for Apache Cassandra
I did the following:

git clone https://github.com/datastax/spark-cassandra-connector.git

git checkout tags/v1.5.0-M1

./sbt/sbt assembly

Will it have the fix ?

Shyam Sarkar

unread,
Sep 24, 2015, 4:07:01 PM9/24/15
to DataStax Spark Connector for Apache Cassandra
It did not fix the problem...

Russell Spitzer

unread,
Sep 24, 2015, 4:45:10 PM9/24/15
to DataStax Spark Connector for Apache Cassandra
By closing and recreating the cluster you are wiping out the executor JVM's which have the dependencies correctly loaded. Don't shutdown the context and pass configuration via --conf.

On Thu, Sep 24, 2015 at 1:07 PM Shyam Sarkar <ssarkaray...@gmail.com> wrote:
It did not fix the problem...

To unsubscribe from this group and stop receiving emails from it, send an email to spark-connector-...@lists.datastax.com.
--

Artem Aliev

unread,
Sep 24, 2015, 5:44:39 PM9/24/15
to spark-conn...@lists.datastax.com
it will be in v1.5.0-M2
So this will help:
#> git checkout b1.5

On Thu, Sep 24, 2015 at 1:07 PM, Shyam Sarkar <ssarkaray...@gmail.com> wrote:
It did not fix the problem...

Shyam Sarkar

unread,
Sep 24, 2015, 6:50:37 PM9/24/15
to DataStax Spark Connector for Apache Cassandra
I tried after building spark-cassandra-connector-java-assembly-1.5.0-M2-SNAPSHOT.jar

but now I am getting new error :

===================
scala> println(rdd.first)
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost): java.io.IOException: Exception during preparation of SELECT "key", "value" FROM "test"."kv" WHERE token("key") > ? AND token("key") <= ? LIMIT 1 ALLOW FILTERING: com.datastax.driver.core.Session.newSimpleStatement(Ljava/lang/String;)Lcom/datastax/driver/core/SimpleStatement;
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.createStatement(CassandraTableScanRDD.scala:187)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.com$datastax$spark$connector$rdd$CassandraTableScanRDD$$fetchTokenRange(CassandraTableScanRDD.scala:201)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$13.apply(CassandraTableScanRDD.scala:227)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$13.apply(CassandraTableScanRDD.scala:227)
at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
at com.datastax.spark.connector.util.CountingIterator.hasNext(CountingIterator.scala:11)
at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:308)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
at scala.collection.AbstractIterator.to(Iterator.scala:1157)
at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1276)
at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1276)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1767)
at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1767)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
at org.apache.spark.scheduler.Task.run(Task.scala:70)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NoSuchMethodError: com.datastax.driver.core.Session.newSimpleStatement(Ljava/lang/String;)Lcom/datastax/driver/core/SimpleStatement;
at com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:28)
at com.sun.proxy.$Proxy24.prepare(Unknown Source)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.createStatement(CassandraTableScanRDD.scala:173)
... 27 more

Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1273)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1264)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1263)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1263)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:730)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:730)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:730)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1457)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1418)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)

==========================================

Please suggest what can I do next.
Regards,
SS

Shyam Sarkar

unread,
Sep 25, 2015, 2:39:54 PM9/25/15
to DataStax Spark Connector for Apache Cassandra
Why is this error coming up ?

sarkar@sarkar-D900C:~$ ./spark/spark-1.4.1-bin-hadoop2.6/bin/spark-shell --driver-class-path $(echo ./spark/spark-cassandra-connector/target/scala-2.11/*.jar ./spark/spark-1.4.1-bin-hadoop2.6/lib/* |sed 's/ /:/g')
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-assembly-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/sarkar/spark/spark-1.4.1-bin-hadoop2.6/lib/spark-examples-1.4.1-hadoop2.6.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 1.4.1
/_/

Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java 1.7.0_60)
Type in expressions to have them evaluated.
Type :help for more information.
Spark context available as sc.
SQL context available as sqlContext.

scala> import org.apache.spark.SparkContext
import org.apache.spark.SparkContext

scala> import org.apache.spark.SparkContext._
import org.apache.spark.SparkContext._

scala> import org.apache.spark.SparkConf
import org.apache.spark.SparkConf

scala> sc.stop

scala> val conf = new SparkConf(true).set("spark.cassandra.connection.host", "127.0.0.1")

conf: org.apache.spark.SparkConf = org.apache.spark.SparkConf@4d7c97ae

scala> val sc = new SparkContext("local[2]", "test", conf)

sc: org.apache.spark.SparkContext = org.apache.spark.SparkContext@d895866

scala> import com.datastax.spark.connector._
import com.datastax.spark.connector._

scala> val rdd = sc.cassandraTable("test", "kv")


rdd: com.datastax.spark.connector.rdd.CassandraTableScanRDD[com.datastax.spark.connector.CassandraRow] = CassandraTableScanRDD[0] at RDD at CassandraRDD.scala:15

scala> rdd.first


scala>


I created assembly spark-cassandra-connector-java-assembly-1.5.0-M2-SNAPSHOT.jar with latest fixes as suggested.

Thanks,
SS

Reply all
Reply to author
Forward
0 new messages