java.util.NoSuchElementException: key not found: date

1,256 views
Skip to first unread message

Safat Siddiqui

unread,
Feb 18, 2016, 2:20:16 AM2/18/16
to DataStax Spark Connector for Apache Cassandra
Hi,

I'm getting an exception loading data from Cassandra into Spark. Would you please inform me where the wrong is. In Cassandra I have a primary key with "date" data-type. My spark version: 1.5.1 and Connector version 2.2.4. Connector dependency is:

<groupId>com.datastax.spark</groupId>
<artifactId>spark-cassandra-connector_2.10</artifactId>
<version>1.5.0-M3</version>


Here is my sample code:

import java.util.UUID
import org.apache.log4j.{ Level, Logger, LogManager }
import org.joda.time.DateTime
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import com.datastax.spark.connector._
import org.apache.spark.sql.cassandra.CassandraSQLContext
import org.apache.spark.sql.cassandra._
import org.apache.spark.sql._
import com.datastax.spark.connector.cql.CassandraConnector

var rdd = sc.cassandraTable(keyspace, table_name)
val cnt = rdd.count
println("count is: "+ cnt)


And I'm getting error like:

16/02/18 12:45:04 INFO CassandraConnector: Connected to Cassandra cluster: RC2
Exception in thread "main" java.util.NoSuchElementException: key not found: date
at scala.collection.MapLike$class.default(MapLike.scala:228)
at scala.collection.AbstractMap.default(Map.scala:58)
at scala.collection.MapLike$class.apply(MapLike.scala:141)
at scala.collection.AbstractMap.apply(Map.scala:58)
at com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:81)
at com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchPartitionKey$1.apply(Schema.scala:199)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchPartitionKey$1.apply(Schema.scala:198)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.Iterator$class.foreach(Iterator.scala:727)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchPartitionKey(Schema.scala:198)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:239)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:238)
at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)
at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:238)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:247)
at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:246)
at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)
at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:246)
at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:252)
at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:249)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)
at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:249)
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)
at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1919)
at org.apache.spark.rdd.RDD.count(RDD.scala:1121)
at org.qm.UpdateWishTable$.main(UpdateWishTable.scala:27)
at org.qm.UpdateWishTable.main(UpdateWishTable.scala)
16/02/18 12:45:09 INFO CassandraConnector: Disconnected from Cassandra cluster: RC2

Russell Spitzer

unread,
Feb 18, 2016, 1:15:32 PM2/18/16
to DataStax Spark Connector for Apache Cassandra

--
You received this message because you are subscribed to the Google Groups "DataStax Spark Connector for Apache Cassandra" group.
To unsubscribe from this group and stop receiving emails from it, send an email to spark-connector-...@lists.datastax.com.
--

Safat Siddiqui

unread,
Feb 23, 2016, 12:17:53 AM2/23/16
to spark-conn...@lists.datastax.com
Hello Russell,

Thanks for your solution. My Cassandra version is 2.2.4. I miss-typed it as connector version.
After the 1.5.0-RC1, I have date support, but I'm getting error whenever I'm trying to save data into Cassandra. Would you please help me figuring out the problem. I'm sharing my sample code:


class WishCountTable extends Serializable {
  var wish_date: DateTime = new DateTime(0)
  var wish_published_time: UUID = new UUID(0L, 0L)
  var wish_counter_value: Long = 0L
}

In main method:

var wishCounterRDD = sc.cassandraTable[WishCountTable](keyspace, "wish_counter")

var saveRDD = wishCounterRDD
                       .map(row => (row , row.wish_counter_value = 0L))
                       .map(row => row._1)

saveRDD.saveToCassandra(keyspace, "wish_counter", SomeColumns("wish_date", "wish_published_time", "wish_counter_value"))

Error:
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee
16/02/23 11:08:56 ERROR TaskSetManager: Task 2 in stage 2.0 failed 4 times; aborting job
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 2.0 failed 4 times, most recent failure: Lost task 2.3 in stage 2.0 (TID 18, 192.168.3.159): UnknownReason
Driver stacktrace:
    at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1283)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1271)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1270)
    at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1270)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:697)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:697)
    at scala.Option.foreach(Option.scala:236)
    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:697)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1496)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1458)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1447)
    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
    at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:567)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1822)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1835)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1912)
    at com.datastax.spark.connector.RDDFunctions.saveToCassandra(RDDFunctions.scala:37)
    at org.qm.UpdateWishTable$.main(UpdateWishTable.scala:81)
    at org.qm.UpdateWishTable.main(UpdateWishTable.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:672)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
16/02/23 11:08:56 ERROR TaskResultGetter: Could not deserialize TaskEndReason: ClassNotFound with classloader org.apache.spark.util.MutableURLClassLoader@4d41cee




Thanks,
Safat Siddiqui
Student
Department of CSE
Shahjalal University of Science and Technology
Sylhet, Bangladesh.

Russell Spitzer

unread,
Feb 23, 2016, 1:13:03 AM2/23/16
to spark-conn...@lists.datastax.com
I'm guessing the issue is that your WishCountTable class is not being properly set on the executor classpath. Unfortunately you haven't sent the full trace so it's impossible to tell what class was not found. Try looking at the executor logs for your job, (stdout and stderr in a directory made just for this application run most like /tmp/spark/work or something like that.)

 I would also recommend rewriting your row class as a case class
class WishCountTable extends Serializable {
  var wish_date: DateTime = new DateTime(0)
  var wish_published_time: UUID = new UUID(0L, 0L)
  var wish_counter_value: Long = 0L
}

Uses mutable variables (and you won't be changing them since you are using them in an RDD)

I would suggest
case class WishCountRow ( wish_date:DateTime, wish_published_time: UUID, wish_counter_value: Long)

Anyway, please send your executor logs with the full exception. Also it would be very helpful if we see how you are starting yoru application (what command line call are you using)

Safat Siddiqui

unread,
Feb 23, 2016, 6:25:33 AM2/23/16
to spark-conn...@lists.datastax.com
Thanks Russell for your quick reply. I will be out of town for a week. Then I would follow your suggestions and inform you what happen. 



Safat Siddiqui

unread,
Mar 6, 2016, 7:44:16 AM3/6/16
to spark-conn...@lists.datastax.com
Hello Russell,

I'm using Scala IDE (eclipse). and my spark master is set into 'local' mood. connector dependency is:

 <dependencies>
     <dependency>

         <groupId>com.datastax.spark</groupId>
         <artifactId>spark-cassandra-connector_2.10</artifactId>
         <version>1.5.0-RC1</version>
     </dependency>
 </dependencies>

My cassandra table is:

CREATE TABLE keyspace.wish_counter (
    wish_date date,
    wish_published_time timeuuid,
    wish_counter_value counter,
    PRIMARY KEY (wish_date, wish_published_time)
) WITH CLUSTERING ORDER BY (wish_published_time ASC)


Whenever I use following 'Test' class, I can easily save data into cassandra. But when I use 'WishCountTable' class, I get error.

class Test extends Serializable {
  var word: String = ""
  var count: Integer = 0

}

class WishCountTable extends Serializable {
  var wish_date: DateTime = new DateTime(0)
  var wish_published_time: UUID = new UUID(0L, 0L)
  var wish_counter_value: Long = 0L
}

ERROR:
16/03/06 18:17:59 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 5)
com.datastax.spark.connector.types.TypeConversionException: Cannot convert object 2016-02-21T06:00:00.000+06:00 of type class org.joda.time.DateTime to com.datastax.driver.core.LocalDate.
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:45)
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$$anonfun$convertPF$20.applyOrElse(TypeConverter.scala:447)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$28.applyOrElse(TypeConverter.scala:756)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1$$anonfun$applyOrElse$1.apply$mcVI$sp(MappedToGettableDataConverter.scala:170)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1.applyOrElse(MappedToGettableDataConverter.scala:169)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1.convert(MappedToGettableDataConverter.scala:18)
    at com.datastax.spark.connector.writer.DefaultRowWriter.readColumnValues(DefaultRowWriter.scala:21)
    at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:155)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:139)

    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
    at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
    at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
    at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:139)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
16/03/06 18:17:59 ERROR TaskSetManager: Task 0 in stage 1.0 failed 1 times; aborting job
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 1.0 failed 1 times, most recent failure: Lost task 0.0 in stage 1.0 (TID 5, localhost): com.datastax.spark.connector.types.TypeConversionException: Cannot convert object 2016-02-21T06:00:00.000+06:00 of type class org.joda.time.DateTime to com.datastax.driver.core.LocalDate.
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:45)
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$$anonfun$convertPF$20.applyOrElse(TypeConverter.scala:447)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$28.applyOrElse(TypeConverter.scala:756)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1$$anonfun$applyOrElse$1.apply$mcVI$sp(MappedToGettableDataConverter.scala:170)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1.applyOrElse(MappedToGettableDataConverter.scala:169)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1.convert(MappedToGettableDataConverter.scala:18)
    at com.datastax.spark.connector.writer.DefaultRowWriter.readColumnValues(DefaultRowWriter.scala:21)
    at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:155)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:139)

    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
    at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
    at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
    at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:139)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)


Driver stacktrace:
    at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1283)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1271)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1270)
    at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
    at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1270)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:697)
    at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:697)
    at scala.Option.foreach(Option.scala:236)
    at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:697)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1496)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1458)
    at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1447)
    at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
    at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:567)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1822)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1835)
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1912)
    at com.datastax.spark.connector.RDDFunctions.saveToCassandra(RDDFunctions.scala:37)
    at org.qm.UpdateWishTable$.main(UpdateWishTable.scala:87)
    at org.qm.UpdateWishTable.main(UpdateWishTable.scala)
Caused by: com.datastax.spark.connector.types.TypeConversionException: Cannot convert object 2016-02-21T06:00:00.000+06:00 of type class org.joda.time.DateTime to com.datastax.driver.core.LocalDate.
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:45)
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$$anonfun$convertPF$20.applyOrElse(TypeConverter.scala:447)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$28.applyOrElse(TypeConverter.scala:756)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1$$anonfun$applyOrElse$1.apply$mcVI$sp(MappedToGettableDataConverter.scala:170)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1.applyOrElse(MappedToGettableDataConverter.scala:169)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1.convert(MappedToGettableDataConverter.scala:18)
    at com.datastax.spark.connector.writer.DefaultRowWriter.readColumnValues(DefaultRowWriter.scala:21)
    at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:155)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:139)

    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
    at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
    at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
    at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:139)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
16/03/06 18:17:59 ERROR Executor: Exception in task 1.0 in stage 1.0 (TID 6)
com.datastax.spark.connector.types.TypeConversionException: Cannot convert object 2016-02-28T06:00:00.000+06:00 of type class org.joda.time.DateTime to com.datastax.driver.core.LocalDate.
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:45)
    at com.datastax.spark.connector.types.TypeConverter$$anonfun$convert$1.apply(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$$anonfun$convertPF$20.applyOrElse(TypeConverter.scala:447)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$LocalDateConverter$.convert(TypeConverter.scala:437)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter$$anonfun$convertPF$28.applyOrElse(TypeConverter.scala:756)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.com$datastax$spark$connector$types$NullableTypeConverter$$super$convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.types.NullableTypeConverter$class.convert(TypeConverter.scala:56)
    at com.datastax.spark.connector.types.TypeConverter$OptionToNullConverter.convert(TypeConverter.scala:749)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1$$anonfun$applyOrElse$1.apply$mcVI$sp(MappedToGettableDataConverter.scala:170)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1$$anonfun$convertPF$1.applyOrElse(MappedToGettableDataConverter.scala:169)
    at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:43)
    at com.datastax.spark.connector.writer.MappedToGettableDataConverter$$anon$1.convert(MappedToGettableDataConverter.scala:18)
    at com.datastax.spark.connector.writer.DefaultRowWriter.readColumnValues(DefaultRowWriter.scala:21)
    at com.datastax.spark.connector.writer.BoundStatementBuilder.bind(BoundStatementBuilder.scala:35)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:106)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.next(GroupingBatchBuilder.scala:31)
    at scala.collection.Iterator$class.foreach(Iterator.scala:727)
    at com.datastax.spark.connector.writer.GroupingBatchBuilder.foreach(GroupingBatchBuilder.scala:31)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:155)
    at com.datastax.spark.connector.writer.TableWriter$$anonfun$write$1.apply(TableWriter.scala:139)

    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
    at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
    at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
    at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
    at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:139)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:37)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
    at org.apache.spark.scheduler.Task.run(Task.scala:88)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
 
Reply all
Reply to author
Forward
0 new messages