2020-06-15 15:48:13,765 - ERROR [Executor task launch worker for task 0:o.a.s.e.Executor@91] - Exception in task 0.0 in stage 0.0 (TID 0) java.lang.LinkageError: loader constraint violation: loader (instance of org/apache/spark/repl/ExecutorClassLoader) previously initiated loading for a different type with name "io/cdap/plugin/common/script/ScriptContext" at java.lang.Class.forName0(Native Method) ~[na:1.8.0_252] at java.lang.Class.forName(Class.java:264) ~[na:1.8.0_252] at com.sun.proxy.$Proxy133.<clinit>(Unknown Source) ~[na:na] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_252] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_252] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_252] at java.lang.reflect.Constructor.newInstance(Constructor.java:423) ~[na:1.8.0_252] at java.lang.reflect.Proxy.newProxyInstance(Proxy.java:739) ~[na:1.8.0_252] at py4j.Gateway.createProxy(Gateway.java:368) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at py4j.CallbackClient.getPythonServerEntryPoint(CallbackClient.java:418) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at py4j.GatewayServer.getPythonServerEntryPoint(GatewayServer.java:803) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at io.cdap.plugin.python.transform.Py4jPythonExecutor.initialize(Py4jPythonExecutor.java:194) ~[1592236085781-0/:na] at io.cdap.plugin.python.transform.PythonEvaluator.initialize(PythonEvaluator.java:160) ~[1592236085781-0/:na] at io.cdap.cdap.etl.common.plugin.WrappedTransform.lambda$initialize$3(WrappedTransform.java:72) ~[cdap-etl-core-6.2.0.jar:na] at io.cdap.cdap.etl.common.plugin.Caller$1.call(Caller.java:30) ~[cdap-etl-core-6.2.0.jar:na] at io.cdap.cdap.etl.common.plugin.WrappedTransform.initialize(WrappedTransform.java:71) ~[cdap-etl-core-6.2.0.jar:na] at io.cdap.cdap.etl.spark.function.TransformFunction.call(TransformFunction.java:43) ~[hydrator-spark-core2_2.11-6.2.0.jar:na] at io.cdap.cdap.etl.spark.Compat$FlatMapAdapter.call(Compat.java:126) ~[hydrator-spark-core2_2.11-6.2.0.jar:na] at org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1.apply(JavaRDDLike.scala:125) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.api.java.JavaRDDLike$$anonfun$fn$1$1.apply(JavaRDDLike.scala:125) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:434) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:439) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.memory.MemoryStore.putIteratorAsValues(MemoryStore.scala:215) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1016) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1007) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:947) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1007) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:711) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.iterator(RDD.scala:285) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:336) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD$$anonfun$8.apply(RDD.scala:334) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1016) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager$$anonfun$doPutIterator$1.apply(BlockManager.scala:1007) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.doPut(BlockManager.scala:947) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.doPutIterator(BlockManager.scala:1007) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.storage.BlockManager.getOrElseUpdate(BlockManager.scala:711) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.getOrCompute(RDD.scala:334) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.iterator(RDD.scala:285) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.scheduler.Task.run(Task.scala:100) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:325) ~[io.cdap.cdap.spark-assembly-2.1.3.jar:na] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [na:1.8.0_252] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [na:1.8.0_252] at java.lang.Thread.run(Thread.java:748) [na:1.8.0_252]
--
You received this message because you are subscribed to the Google Groups "CDAP User" group.
To unsubscribe from this group and stop receiving emails from it, send an email to cdap-user+...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/cdap-user/7ebf27a6-18f1-49bd-969a-3b39268cf5c9o%40googlegroups.com.
To unsubscribe from this group and stop receiving emails from it, send an email to cdap...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/cdap-user/7ebf27a6-18f1-49bd-969a-3b39268cf5c9o%40googlegroups.com.
To unsubscribe from this group and stop receiving emails from it, send an email to cdap-user+...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/cdap-user/aaa75282-4279-4dfd-8521-c0b8de4d1a98o%40googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/cdap-user/aaa75282-4279-4dfd-8521-c0b8de4d1a98o%40googlegroups.com.
def transform(record, emitter, context): file_list="new key" record['file_list']=file_list emitter.emit(record)def transform(record, emitter, context): record['form_type']='P' emitter.emit(record)