Hi,
I have created a small Scala Program that queries some data using JDBC driver.
In the IDE and with different JVMs (1.7, 1.8, etc.) invoking the Scala Program from the command line works great.
But on one of the target systems I get the following error and I don't know if thats a problem with my code, the JVM implementation or spark or scala, attached is the exception, and ideas how to fix that?
java.lang.ExceptionInInitializerError
at java.lang.J9VMInternals.ensureError(J9VMInternals.java:134)
at java.lang.J9VMInternals.recordInitializationFailure(J9VMInternals.java:123)
at org.apache.spark.sql.jdbc.JDBCRDD$.org$apache$spark$sql$jdbc$JDBCRDD$$getCatalystType(JDBCRDD.scala:62)
at org.apache.spark.sql.jdbc.JDBCRDD$$anonfun$1.apply(JDBCRDD.scala:137)
at org.apache.spark.sql.jdbc.JDBCRDD$$anonfun$1.apply(JDBCRDD.scala:137)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.sql.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:136)
at org.apache.spark.sql.jdbc.JDBCRelation.<init>(JDBCRelation.scala:128)
at org.apache.spark.sql.jdbc.DefaultSource.createRelation(JDBCRelation.scala:113)
at org.apache.spark.sql.sources.ResolvedDataSource$.apply(ddl.scala:269)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:114)
at com.ibm.imstest.IMSAccessSample$delayedInit$body.apply(IMSAccessSample.scala:26)
at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:71)
at scala.App$$anonfun$main$1.apply(App.scala:71)
at scala.collection.immutable.List.foreach(List.scala:318)
at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32)
at scala.App$class.main(App.scala:71)
at com.ibm.imstest.IMSAccessSample$.main(IMSAccessSample.scala:8)
at com.ibm.imstest.IMSAccessSample.main(IMSAccessSample.scala)
Caused by: scala.reflect.internal.MissingRequirementError: error while loading package, Scala signature package has
wrong version
expected: 5.0
found: 45.0 in scala.package
at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
at scala.reflect.runtime.JavaMirrors$JavaMirror.handleError$1(JavaMirrors.scala:535)
at scala.reflect.runtime.JavaMirrors$JavaMirror.unpickleClass(JavaMirrors.scala:584)
at scala.reflect.runtime.SymbolLoaders$TopClassCompleter.complete(SymbolLoaders.scala:32)
at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
at scala.reflect.internal.SymbolTable.openPackageModule(SymbolTable.scala:244)
at scala.reflect.internal.SymbolTable.openPackageModule(SymbolTable.scala:300)
at scala.reflect.runtime.SymbolLoaders$LazyPackageType.complete(SymbolLoaders.scala:89)
at scala.reflect.internal.Symbols$Symbol.info(Symbols.scala:1231)
at scala.reflect.internal.Definitions$DefinitionsClass.AnyValClass$lzycompute(Definitions.scala:275)
at scala.reflect.internal.Definitions$DefinitionsClass.AnyValClass(Definitions.scala:275)
at scala.reflect.runtime.JavaMirrors$class.init(JavaMirrors.scala:50)
at scala.reflect.runtime.JavaUniverse.init(JavaUniverse.scala:12)
at scala.reflect.runtime.JavaUniverse.<init>(JavaUniverse.scala:26)
at scala.reflect.runtime.package$.universe$lzycompute(package.scala:16)
at scala.reflect.runtime.package$.universe(package.scala:16)
at org.apache.spark.sql.types.AtomicType.<init>(DataType.scala:95)
at org.apache.spark.sql.types.StringType.<init>(StringType.scala:33)
at org.apache.spark.sql.types.StringType$.<init>(StringType.scala:49)
at org.apache.spark.sql.types.StringType$.<clinit>(StringType.scala)
... 19 more
Thanks, Denis.