[PFSLAP860012:~/opt/spark] sbpb131% sudo sbt/sbt package assembely
Password:
[info] Loading project definition from /Users/sbpb131/opt/spark/project/project
[info] Loading project definition from /Users/sbpb131/opt/spark/project
[info] Set current project to root (in build file:/Users/sbpb131/opt/spark/)
[info] Updating {file:/Users/sbpb131/opt/spark/}core...
[info] Updating {file:/Users/sbpb131/opt/spark/}root...
[info] Resolving com.typesafe.akka#akka-zeromq_2.9.3;2.0.5 ...
[warn] module not found: com.typesafe.akka#akka-zeromq_2.9.3;2.0.5
[warn] ==== local: tried
[warn] /var/root/.ivy2/local/com.typesafe.akka/akka-zeromq_2.9.3/2.0.5/ivys/ivy.xml
[warn] ==== Akka Repository: tried
[warn] http://repo.akka.io/releases/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Akka rep2: tried
[warn] ==== sonatype-snapshots: tried
[warn] ==== sonatype-staging: tried
[warn] ==== JBoss Repository: tried
[warn] ==== Spray Repository: tried
[warn] http://repo.spray.cc/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Cloudera Repository: tried
[warn] ==== Typesafe11 Repository: tried
[warn] ==== Akka11 Repo: tried
[warn] http://repo.akka.io/repository/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Akka22 Repo: tried
[warn] http://repo.akka.io/releases/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Akka33 Repo: tried
[warn] https://repo.akka.io/releases/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Maven Repository: tried
[warn] ==== Maven2 Repository: tried
[warn] http://repo1.maven.org/maven2/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== Maven3 Repository: tried
[warn] ==== mvn55: tried
[warn] ==== Akka44 Repo: tried
[warn] ==== Akka333 Repo: tried
[warn] http://repo.akka.io/repository/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[warn] ==== maven333: tried
[warn] ==== mv1: tried
[warn] ==== mv2: tried
[warn] ==== public: tried
[warn] http://repo1.maven.org/maven2/com/typesafe/akka/akka-zeromq_2.9.3/2.0.5/akka-zeromq_2.9.3-2.0.5.pom
[info] Resolving org.apache.derby#derby;10.4.2.0 ...
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: UNRESOLVED DEPENDENCIES ::
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: com.typesafe.akka#akka-zeromq_2.9.3;2.0.5: not found
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[info] Resolving org.objenesis#objenesis;1.2 ...
[info] Done updating.
[info] Packaging /Users/sbpb131/opt/spark/target/scala-2.9.3/root_2.9.3-0.8.0-SNAPSHOT.jar ...
[info] Done packaging.
sbt.ResolveException: unresolved dependency: com.typesafe.akka#akka-zeromq_2.9.3;2.0.5: not found
at sbt.IvyActions$.sbt$IvyActions$$resolve(IvyActions.scala:214)
at sbt.IvyActions$$anonfun$update$1.apply(IvyActions.scala:122)
at sbt.IvyActions$$anonfun$update$1.apply(IvyActions.scala:121)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:117)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:117)
at sbt.IvySbt$$anonfun$withIvy$1.apply(Ivy.scala:105)
at sbt.IvySbt.liftedTree1$1(Ivy.scala:52)
at sbt.IvySbt.action$1(Ivy.scala:52)
at sbt.IvySbt$$anon$3.call(Ivy.scala:61)
at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:75)
at xsbt.boot.Locks$GlobalLock.withChannelRetries$1(Locks.scala:58)
at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:79)
at xsbt.boot.Using$.withResource(Using.scala:11)
at xsbt.boot.Using$.apply(Using.scala:10)
at xsbt.boot.Locks$GlobalLock.liftedTree1$1(Locks.scala:51)
at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:51)
at xsbt.boot.Locks$.apply0(Locks.scala:30)
at xsbt.boot.Locks$.apply(Locks.scala:27)
at sbt.IvySbt.withDefaultLogger(Ivy.scala:61)
at sbt.IvySbt.withIvy(Ivy.scala:102)
at sbt.IvySbt.withIvy(Ivy.scala:98)
at sbt.IvySbt$Module.withModule(Ivy.scala:117)
at sbt.IvyActions$.update(IvyActions.scala:121)
at sbt.Classpaths$$anonfun$work$1$1.apply(Defaults.scala:955)
at sbt.Classpaths$$anonfun$work$1$1.apply(Defaults.scala:953)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$58.apply(Defaults.scala:976)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$58.apply(Defaults.scala:974)
at sbt.Tracked$$anonfun$lastOutput$1.apply(Tracked.scala:35)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:978)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:973)
at sbt.Tracked$$anonfun$inputChanged$1.apply(Tracked.scala:45)
at sbt.Classpaths$.cachedUpdate(Defaults.scala:981)
at sbt.Classpaths$$anonfun$47.apply(Defaults.scala:858)
at sbt.Classpaths$$anonfun$47.apply(Defaults.scala:855)
at sbt.Scoped$$anonfun$hf10$1.apply(Structure.scala:586)
at sbt.Scoped$$anonfun$hf10$1.apply(Structure.scala:586)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:49)
at sbt.Scoped$Reduced$$anonfun$combine$1$$anonfun$apply$12.apply(Structure.scala:311)
at sbt.Scoped$Reduced$$anonfun$combine$1$$anonfun$apply$12.apply(Structure.scala:311)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:41)
at sbt.std.Transform$$anon$5.work(System.scala:71)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:232)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:232)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
at sbt.Execute.work(Execute.scala:238)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:232)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:232)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
[error] (core/*:update) sbt.ResolveException: unresolved dependency: com.typesafe.akka#akka-zeromq_2.9.3;2.0.5: not found
[error] Total time: 8 s, completed 10-Jan-2018 15:08:20
[PFSLAP860012:~/opt/spark] sbpb131%
[PFSLAP860012:~/opt/spark] sbpb131% sudo mvn clean -Dmaven.test.skip=true -e install
Password:
[INFO] Error stacktraces are turned on.
[INFO] Scanning for projects...
[WARNING] The project org.spark-project:spark-parent:pom:0.8.0-SNAPSHOT uses prerequisites which is only intended for maven-plugin projects but not for non maven-plugin projects. For such purposes you should use the maven-enforcer-plugin. See https://maven.apache.org/enforcer/enforcer-rules/requireMavenVersion.html
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Build Order:
[INFO]
[INFO] Spark Project Parent POM
[INFO] Spark Project Core
[INFO] Spark Project Bagel
[INFO] Spark Project Examples
[INFO] Spark Project ML Library
[INFO] Spark Project Tools
[INFO] Spark Project Streaming
[INFO] Spark Project REPL
[INFO] Spark Project REPL binary packaging
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] Building Spark Project Parent POM 0.8.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO]
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ spark-parent ---
[INFO] Deleting /Users/sbpb131/opt/spark/target
[INFO]
[INFO] --- maven-enforcer-plugin:1.1.1:enforce (enforce-versions) @ spark-parent ---
[INFO]
[INFO] --- build-helper-maven-plugin:1.7:add-source (add-scala-sources) @ spark-parent ---
[INFO] Source directory: /Users/sbpb131/opt/spark/src/main/scala added.
[INFO]
[INFO] --- scala-maven-plugin:3.1.5:compile (scala-compile-first) @ spark-parent ---
[INFO] No sources to compile
[INFO]
[INFO] --- build-helper-maven-plugin:1.7:add-test-source (add-scala-test-sources) @ spark-parent ---
[INFO] Test Source directory: /Users/sbpb131/opt/spark/src/test/scala added.
[INFO]
[INFO] --- scala-maven-plugin:3.1.5:testCompile (scala-test-compile-first) @ spark-parent ---
[INFO]
[INFO] --- maven-source-plugin:2.2.1:jar-no-fork (create-source-jar) @ spark-parent ---
[INFO]
[INFO] >>> scala-maven-plugin:3.1.5:doc-jar (attach-scaladocs) > generate-sources @ spark-parent >>>
[INFO]
[INFO] --- maven-enforcer-plugin:1.1.1:enforce (enforce-versions) @ spark-parent ---
[INFO]
[INFO] --- build-helper-maven-plugin:1.7:add-source (add-scala-sources) @ spark-parent ---
[INFO] Source directory: /Users/sbpb131/opt/spark/src/main/scala added.
[INFO]
[INFO] <<< scala-maven-plugin:3.1.5:doc-jar (attach-scaladocs) < generate-sources @ spark-parent <<<
[INFO]
[INFO]
[INFO] --- scala-maven-plugin:3.1.5:doc-jar (attach-scaladocs) @ spark-parent ---
[INFO] No source files found
[INFO]
[INFO] --- maven-install-plugin:2.4:install (default-install) @ spark-parent ---
[INFO] Installing /Users/sbpb131/opt/spark/pom.xml to /var/root/.m2/repository/org/spark-project/spark-parent/0.8.0-SNAPSHOT/spark-parent-0.8.0-SNAPSHOT.pom
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] Building Spark Project Core 0.8.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO]
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ spark-core ---
[INFO] Deleting /Users/sbpb131/opt/spark/core/target
[INFO]
[INFO] --- maven-enforcer-plugin:1.1.1:enforce (enforce-versions) @ spark-core ---
[INFO]
[INFO] --- build-helper-maven-plugin:1.7:add-source (add-scala-sources) @ spark-core ---
[INFO] Source directory: /Users/sbpb131/opt/spark/core/src/main/scala added.
[INFO]
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ spark-core ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 6 resources
[INFO]
[INFO] --- scala-maven-plugin:3.1.5:compile (scala-compile-first) @ spark-core ---
[WARNING] Zinc server is not available at port 3030 - reverting to normal incremental compile
[INFO] Using incremental compilation
[INFO] Compiling 247 Scala sources and 16 Java sources to /Users/sbpb131/opt/spark/core/target/scala-2.9.3/classes...
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:20: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.FileSystem
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:21: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.Path
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:39: not found: type HadoopMapRedUtil
[ERROR] class HadoopWriter(@transient jobConf: JobConf) extends Logging with HadoopMapRedUtil with Serializable {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:39: not found: type JobConf
[ERROR] class HadoopWriter(@transient jobConf: JobConf) extends Logging with HadoopMapRedUtil with Serializable {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SerializableWritable.scala:23: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.Writable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:47: not found: type JobID
[ERROR] private var jID: SerializableWritable[JobID] = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:48: not found: type TaskAttemptID
[ERROR] private var taID: SerializableWritable[TaskAttemptID] = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:50: not found: type RecordWriter
[ERROR] @transient private var writer: RecordWriter[AnyRef,AnyRef] = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:51: not found: type OutputFormat
[ERROR] @transient private var format: OutputFormat[AnyRef,AnyRef] = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:52: not found: type OutputCommitter
[ERROR] @transient private var committer: OutputCommitter = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:53: not found: type JobContext
[ERROR] @transient private var jobContext: JobContext = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:54: not found: type TaskAttemptContext
[ERROR] @transient private var taskContext: TaskAttemptContext = null
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:149: not found: type JobContext
[ERROR] private def getJobContext(): JobContext = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:142: not found: type OutputCommitter
[ERROR] private def getOutputCommitter(): OutputCommitter = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:76: not found: value FileOutputFormat
[ERROR] val path = FileOutputFormat.getOutputPath(conf.value)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:134: not found: type OutputFormat
[ERROR] private def getOutputFormat(): OutputFormat[AnyRef,AnyRef] = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:156: not found: type TaskAttemptContext
[ERROR] private def getTaskContext(): TaskAttemptContext = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:137: not found: type OutputFormat
[ERROR] .asInstanceOf[OutputFormat[AnyRef,AnyRef]]
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:151: not found: value newJobContext
[ERROR] jobContext = newJobContext(conf.value, jID.value)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:158: not found: value newTaskAttemptContext
[ERROR] taskContext = newTaskAttemptContext(conf.value, taID.value)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:168: not found: type JobID
[ERROR] jID = new SerializableWritable[JobID](HadoopWriter.createJobID(now, jobid))
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:169: not found: type TaskAttemptID
[ERROR] taID = new SerializableWritable[TaskAttemptID](
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:183: not found: type JobID
[ERROR] def createJobID(time: Date, id: Int): JobID = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:186: not found: type JobID
[ERROR] return new JobID(jobtrackerID, id)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/HadoopWriter.scala:189: not found: type JobConf
[ERROR] def createPathFromString(path: String, conf: JobConf): Path = {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:29: object conf is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.conf.Configuration
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:30: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.Path
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:31: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.compress.CompressionCodec
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:32: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.SequenceFile.CompressionType
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:33: FileOutputCommitter is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.FileOutputCommitter
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:34: FileOutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.FileOutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:36: JobConf is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.JobConf
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:37: OutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.OutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:39: object mapreduce is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutputFormat}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:40: object mapreduce is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat, RecordWriter => NewRecordWriter, Job => NewAPIHadoopJob, HadoopMapReduceUtil}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:41: object security is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.security.UserGroupInformation
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:48: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.Writable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:61: SparkHadoopUtil is not a member of spark.deploy
[ERROR] import spark.deploy.{LocalSparkCluster, SparkHadoopUtil}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:57: object security is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.security.UserGroupInformation
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:56: object mapreduce is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:55: object mapreduce is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.mapreduce.{Job => NewHadoopJob}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:54: object mapreduce is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:53: TextInputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.TextInputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:52: SequenceFileInputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.SequenceFileInputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:51: JobConf is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.JobConf
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:50: InputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.InputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:49: FileInputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.FileInputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:47: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.Text
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:46: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.NullWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:45: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.LongWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:44: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.IntWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:43: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.FloatWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:42: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.DoubleWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:41: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.BytesWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:40: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.BooleanWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:39: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.ArrayWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:38: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.Path
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:37: object conf is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.conf.Configuration
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:39: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.Writable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:925: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def intWritableConverter() = simpleWritableConverter[Int, IntWritable](_.get)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:927: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def longWritableConverter() = simpleWritableConverter[Long, LongWritable](_.get)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:929: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def doubleWritableConverter() = simpleWritableConverter[Double, DoubleWritable](_.get)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:931: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def floatWritableConverter() = simpleWritableConverter[Float, FloatWritable](_.get)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:933: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def booleanWritableConverter() = simpleWritableConverter[Boolean, BooleanWritable](_.get)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:935: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def bytesWritableConverter() = simpleWritableConverter[Array[Byte], BytesWritable](_.getBytes)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:937: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[ERROR] implicit def stringWritableConverter() = simpleWritableConverter[String, Text](_.toString)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:528: not found: type OutputFormat
[ERROR] def saveAsHadoopFile[F <: OutputFormat[K, V]](path: String)(implicit fm: ClassManifest[F]) {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:623: not found: type OutputFormat
[ERROR] outputFormatClass: Class[_ <: OutputFormat[_, _]],
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:624: not found: type JobConf
[ERROR] conf: JobConf = new JobConf(self.context.hadoopConfiguration),
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:609: not found: type OutputFormat
[ERROR] outputFormatClass: Class[_ <: OutputFormat[_, _]],
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:537: not found: type OutputFormat
[ERROR] def saveAsHadoopFile[F <: OutputFormat[K, V]](
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SparkContext.scala:244: not found: value SparkHadoopUtil
[ERROR] val conf = SparkHadoopUtil.newConfiguration()
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:573: not found: value newTaskAttemptID
[ERROR] val attemptId = newTaskAttemptID(jobtrackerID, stageId, false, context.splitId, attemptNumber)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:574: not found: value newTaskAttemptContext
[ERROR] val hadoopContext = newTaskAttemptContext(wrappedConf.value, attemptId)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:576: value getOutputCommitter is not a member of Any
[ERROR] val committer = format.getOutputCommitter(hadoopContext)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:578: value getRecordWriter is not a member of Any
[ERROR] val writer = format.getRecordWriter(hadoopContext).asInstanceOf[NewRecordWriter[K,V]]
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:592: not found: value newTaskAttemptID
[ERROR] val jobAttemptId = newTaskAttemptID(jobtrackerID, stageId, true, 0, 0)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:593: not found: value newTaskAttemptContext
[ERROR] val jobTaskContext = newTaskAttemptContext(wrappedConf.value, jobAttemptId)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:594: value getOutputCommitter is not a member of Any
[ERROR] val jobCommitter = jobFormat.getOutputCommitter(jobTaskContext)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:612: not found: type JobConf
[ERROR] new JobConf(self.context.hadoopConfiguration), Some(codec))
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:624: not found: type JobConf
[ERROR] conf: JobConf = new JobConf(self.context.hadoopConfiguration),
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:638: not found: value FileOutputFormat
[ERROR] FileOutputFormat.setOutputPath(conf, HadoopWriter.createPathFromString(path, conf))
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/PairRDDFunctions.scala:648: not found: type JobConf
[ERROR] def saveAsHadoopDataset(conf: JobConf) {
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:26: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.BytesWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:27: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.compress.CompressionCodec
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:28: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.NullWritable
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:29: object io is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.io.Text
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:30: TextOutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.TextOutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:814: value saveAsHadoopFile is not a member of spark.RDD[T]
possible cause: maybe a semicolon is missing before `value saveAsHadoopFile'?
[ERROR] .saveAsHadoopFile[TextOutputFormat[NullWritable, Text]](path)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:822: value saveAsHadoopFile is not a member of spark.RDD[T]
possible cause: maybe a semicolon is missing before `value saveAsHadoopFile'?
[ERROR] .saveAsHadoopFile[TextOutputFormat[NullWritable, Text]](path, codec)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDD.scala:831: value saveAsSequenceFile is not a member of spark.RDD[T]
possible cause: maybe a semicolon is missing before `value saveAsSequenceFile'?
[ERROR] .saveAsSequenceFile(path)
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/Utils.scala:37: SparkHadoopUtil is not a member of spark.deploy
[ERROR] import spark.deploy.SparkHadoopUtil
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/Utils.scala:34: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDDCheckpointData.scala:20: object fs is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.fs.Path
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/RDDCheckpointData.scala:21: object conf is not a member of package org.apache.hadoop
[ERROR] import org.apache.hadoop.conf.Configuration
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:32: JobConf is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.JobConf
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:33: OutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.OutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:34: TextOutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.TextOutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:35: SequenceFileOutputFormat is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.SequenceFileOutputFormat
[ERROR] ^
[ERROR] /Users/sbpb131/opt/spark/core/src/main/scala/spark/SequenceFileRDDFunctions.scala:36: OutputCommitter is not a member of org.apache.hadoop.mapred
[ERROR] import org.apache.hadoop.mapred.OutputCommitter
[ERROR] ^
[WARNING] /Users/sbpb131/opt/spark/core/src/main/scala/spark/rdd/NewHadoopRDD.scala:114: type <error> in type pattern <error> is unchecked since it is eliminated by erasure
[WARNING] case e: Exception => logWarning("Exception in RecordReader.close()", e)
[WARNING] ^
[WARNING] one warning found
[ERROR] 204 errors found
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO]
[INFO] Spark Project Parent POM ........................... SUCCESS [ 1.690 s]
[INFO] Spark Project Core ................................. FAILURE [ 25.226 s]
[INFO] Spark Project Bagel ................................ SKIPPED
[INFO] Spark Project Examples ............................. SKIPPED
[INFO] Spark Project ML Library ........................... SKIPPED
[INFO] Spark Project Tools ................................ SKIPPED
[INFO] Spark Project Streaming ............................ SKIPPED
[INFO] Spark Project REPL ................................. SKIPPED
[INFO] Spark Project REPL binary packaging ................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 27.218 s
[INFO] Finished at: 2018-01-10T14:53:29Z
[INFO] Final Memory: 29M/1140M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal net.alchim31.maven:scala-maven-plugin:3.1.5:compile (scala-compile-first) on project spark-core: Execution scala-compile-first of goal net.alchim31.maven:scala-maven-plugin:3.1.5:compile failed.: CompileFailed -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal net.alchim31.maven:scala-maven-plugin:3.1.5:compile (scala-compile-first) on project spark-core: Execution scala-compile-first of goal net.alchim31.maven:scala-maven-plugin:3.1.5:compile failed.
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:213)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:606)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: org.apache.maven.plugin.PluginExecutionException: Execution scala-compile-first of goal net.alchim31.maven:scala-maven-plugin:3.1.5:compile failed.
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:145)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:606)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
Caused by: sbt.compiler.CompileFailed
at sbt.compiler.AnalyzingCompiler.call (AnalyzingCompiler.scala:76)
at sbt.compiler.AnalyzingCompiler.compile (AnalyzingCompiler.scala:35)
at sbt.compiler.AnalyzingCompiler.compile (AnalyzingCompiler.scala:29)
at sbt.compiler.AggressiveCompile$$anonfun$4$$anonfun$compileScala$1$1.apply$mcV$sp (AggressiveCompile.scala:71)
at sbt.compiler.AggressiveCompile$$anonfun$4$$anonfun$compileScala$1$1.apply (AggressiveCompile.scala:71)
at sbt.compiler.AggressiveCompile$$anonfun$4$$anonfun$compileScala$1$1.apply (AggressiveCompile.scala:71)
at sbt.compiler.AggressiveCompile.sbt$compiler$AggressiveCompile$$timed (AggressiveCompile.scala:101)
at sbt.compiler.AggressiveCompile$$anonfun$4.compileScala$1 (AggressiveCompile.scala:70)
at sbt.compiler.AggressiveCompile$$anonfun$4.apply (AggressiveCompile.scala:88)
at sbt.compiler.AggressiveCompile$$anonfun$4.apply (AggressiveCompile.scala:60)
at sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply (Compile.scala:24)
at sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply (Compile.scala:22)
at sbt.inc.Incremental$.cycle (Incremental.scala:40)
at sbt.inc.Incremental$.compile (Incremental.scala:25)
at sbt.inc.IncrementalCompile$.apply (Compile.scala:20)
at sbt.compiler.AggressiveCompile.compile2 (AggressiveCompile.scala:96)
at sbt.compiler.AggressiveCompile.compile1 (AggressiveCompile.scala:44)
at com.typesafe.zinc.Compiler.compile (Compiler.scala:158)
at com.typesafe.zinc.Compiler.compile (Compiler.scala:142)
at sbt_inc.SbtIncrementalCompiler.compile (SbtIncrementalCompiler.java:77)
at scala_maven.ScalaCompilerSupport.incrementalCompile (ScalaCompilerSupport.java:296)
at scala_maven.ScalaCompilerSupport.compile (ScalaCompilerSupport.java:123)
at scala_maven.ScalaCompilerSupport.doExecute (ScalaCompilerSupport.java:104)
at scala_maven.ScalaMojoSupport.execute (ScalaMojoSupport.java:460)
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo (DefaultBuildPluginManager.java:134)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:208)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:154)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute (MojoExecutor.java:146)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:117)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject (LifecycleModuleBuilder.java:81)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build (SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute (LifecycleStarter.java:128)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:309)
at org.apache.maven.DefaultMaven.doExecute (DefaultMaven.java:194)
at org.apache.maven.DefaultMaven.execute (DefaultMaven.java:107)
at org.apache.maven.cli.MavenCli.execute (MavenCli.java:955)
at org.apache.maven.cli.MavenCli.doMain (MavenCli.java:290)
at org.apache.maven.cli.MavenCli.main (MavenCli.java:194)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:606)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced (Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch (Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode (Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main (Launcher.java:356)
[ERROR]
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/PluginExecutionException
[ERROR]
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR] mvn <goals> -rf :spark-core
[PFSLAP860012:~/opt/spark] sbpb131%
--
You received this message because you are subscribed to the Google Groups "Sparrow Users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to sparrow-scheduler-users+unsub...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
To unsubscribe from this group and stop receiving emails from it, send an email to sparrow-scheduler-users+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
--
You received this message because you are subscribed to a topic in the Google Groups "Sparrow Users" group.
To unsubscribe from this topic, visit https://groups.google.com/d/topic/sparrow-scheduler-users/d8CeYxSf0KM/unsubscribe.
To unsubscribe from this group and all its topics, send an email to sparrow-scheduler-users+unsub...@googlegroups.com.
To unsubscribe from this group and all its topics, send an email to sparrow-scheduler-users+unsubscr...@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
--
You received this message because you are subscribed to the Google Groups "Sparrow Users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to sparrow-scheduler-users+unsub...@googlegroups.com.
To unsubscribe from this group and all its topics, send an email to sparrow-scheduler-users+unsub...@googlegroups.com.