// imports ...
val test = sc.newAPIHadoopRDD(conf, classOf[AvroTableInputFormat[Data]], classOf[ImmutableBytesWritable], classOf[Data])
test.count()
$ MASTER=local[4] ./spark-shell -cp <some jars to include>
$ MASTER=<master_ip> ./spark-shell -cp <some jars to include>
java.lang.IllegalStateException: unread block data at java.io.ObjectInputStream$BlockDataInputStream.setBlockDataMode(ObjectInputStream.java:2400) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1379) at java.io.ObjectInputStream.skipCustomData(ObjectInputStream.java:1935) at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1829) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1347) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:369) at spark.JavaDeserializationStream.readObject(JavaSerializer.scala:23) at spark.JavaSerializerInstance.deserialize(JavaSerializer.scala:45) at spark.executor.Executor$TaskRunner.run(Executor.scala:98) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:722)
scala> sc.parallelize(1 to 10000, 100).count()
...
res0: Long = 10000
--
You received this message because you are subscribed to the Google Groups "Spark Users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to spark-users...@googlegroups.com.
For more options, visit https://groups.google.com/groups/opt_out.