hi team,
i am trying to install demo tapster. i followed
http://predictionio.incubator.apache.org/demo/tapster/. It took more than a week to install predictionio.
now that i m installed the PIO, its throwing me error while running the $pio build command. the error goes like this:
darshan@darshu:~/PredictionIO/tapster-episode-similar$ pio build
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/darshan/PredictionIO/lib/spark/pio-data-hdfs-assembly-0.11.0-incubating.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/darshan/PredictionIO/lib/pio-assembly-0.11.0-incubating.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
[INFO] [Engine$] Using command '/home/darshan/PredictionIO/sbt/sbt' at /home/darshan/PredictionIO/tapster-episode-similar to build.
[INFO] [Engine$] If the path above is incorrect, this process will fail.
[INFO] [Engine$] Uber JAR disabled. Making sure lib/pio-assembly-0.11.0-incubating.jar is absent.
[INFO] [Engine$] Going to run: /home/darshan/PredictionIO/sbt/sbt package assemblyPackageDependency in /home/darshan/PredictionIO/tapster-episode-similar
[ERROR] [Engine$] [error] /home/darshan/PredictionIO/tapster-episode-similar/src/main/scala/DataSource.scala:63: not found: value eventsDb
[ERROR] [Engine$] [error] val viewEventsRDD: RDD[ViewEvent] = eventsDb.find(
[ERROR] [Engine$] [error] ^
[ERROR] [Engine$] [error] one error found
[ERROR] [Engine$] [error] (compile:compileIncremental) Compilation failed
[ERROR] [Engine$] [error] Total time: 5 s, completed 26 Jul, 2017 1:56:50 AM
[ERROR] [Engine$] Return code of build command: /home/darshan/PredictionIO/sbt/sbt package assemblyPackageDependency is 1. Aborting.
[INFO] [Engine$] Looking for an engine...
[INFO] [Engine$] Found template-scala-parallel-similarproduct_2.10-0.1-SNAPSHOT.jar
[INFO] [Engine$] Found template-scala-parallel-similarproduct-assembly-0.1-SNAPSHOT-deps.jar
[INFO] [Engine$] Build finished successfully.
[INFO] [Pio$] Your engine is ready for training.
and the file DataSource.scala,
package org.example.similarproduct
import org.apache.predictionio.controller.PDataSource
import org.apache.predictionio.controller.EmptyEvaluationInfo
import org.apache.predictionio.controller.EmptyActualResult
import org.apache.predictionio.controller.Params
import org.apache.predictionio.data.storage.Event
import org.apache.predictionio.data.store.PEventStore
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import grizzled.slf4j.Logger
case class DataSourceParams(appName: String) extends Params
class DataSource(val dsp: DataSourceParams)
extends PDataSource[TrainingData,
EmptyEvaluationInfo, Query, EmptyActualResult] {
@transient lazy val logger = Logger[this.type]
override
def readTraining(sc: SparkContext): TrainingData = {
// create a RDD of (entityID, User)
val usersRDD: RDD[(String, User)] = PEventStore.aggregateProperties(
appName = dsp.appName,
entityType = "user"
)(sc).map { case (entityId, properties) =>
val user = try {
User()
} catch {
case e: Exception => {
logger.error(s"Failed to get properties ${properties} of" +
s" user ${entityId}. Exception: ${e}.")
throw e
}
}
(entityId, user)
}.cache()
// create a RDD of (entityID, Item)
val itemsRDD: RDD[(String, Item)] = PEventStore.aggregateProperties(
appName = dsp.appName,
entityType = "item"
)(sc).map { case (entityId, properties) =>
val item = try {
// Assume categories is optional property of item.
Item(categories = properties.getOpt[List[String]]("categories"))
} catch {
case e: Exception => {
logger.error(s"Failed to get properties ${properties} of" +
s" item ${entityId}. Exception: ${e}.")
throw e
}
}
(entityId, item)
}.cache()
// get all "user" "view" "item" events
val viewEventsRDD: RDD[ViewEvent] = eventsDb.find(
appId = dsp.appId,
entityType = Some("user"),
eventNames = Some(List("like")),
// targetEntityType is optional field of an event.
targetEntityType = Some(Some("item")))(sc)
// eventsDb.find() returns RDD[Event]
.map { event =>
val viewEvent = try {
event.event match {
case "like" => ViewEvent(
user = event.entityId,
item = event.targetEntityId.get,
t = event.eventTime.getMillis)
case _ => throw new Exception(s"Unexpected event ${event} is read.")
}
} catch {
case e: Exception => {
logger.error(s"Cannot convert ${event} to ViewEvent." +
s" Exception: ${e}.")
throw e
}
}
viewEvent
}.cache()
new TrainingData(
users = usersRDD,
items = itemsRDD,
viewEvents = viewEventsRDD
)
}
}
case class User()
case class Item(categories: Option[List[String]])
case class ViewEvent(user: String, item: String, t: Long)
class TrainingData(
val users: RDD[(String, User)],
val items: RDD[(String, Item)],
val viewEvents: RDD[ViewEvent]
) extends Serializable {
override def toString = {
s"users: [${users.count()} (${users.take(2).toList}...)]" +
s"items: [${items.count()} (${items.take(2).toList}...)]" +
s"viewEvents: [${viewEvents.count()}] (${viewEvents.take(2).toList}...)"
}
}
while training, it gives me following error,
darshan@darshu:~/PredictionIO/tapster-episode-similar/src/main/scala$ pio train
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/home/darshan/PredictionIO/lib/spark/pio-data-hdfs-assembly-0.11.0-incubating.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/home/darshan/PredictionIO/lib/pio-assembly-0.11.0-incubating.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See
http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
[WARN] [Template$] /home/darshan/PredictionIO/tapster-episode-similar/src/main/scala/template.json does not exist. Template metadata will not be available. (This is safe to ignore if you are not working on a template.)
Exception in thread "main" java.io.FileNotFoundException: /home/darshan/PredictionIO/tapster-episode-similar/src/main/scala/engine.json (No such file or directory)
at java.io.FileInputStream.open0(Native Method)
at java.io.FileInputStream.open(FileInputStream.java:195)
at java.io.FileInputStream.<init>(FileInputStream.java:138)
at scala.io.Source$.fromFile(Source.scala:90)
at scala.io.Source$.fromFile(Source.scala:75)
at org.apache.predictionio.tools.console.Console$.getEngineInfo(Console.scala:724)
at org.apache.predictionio.tools.RunWorkflow$.runWorkflow(RunWorkflow.scala:54)
at org.apache.predictionio.tools.commands.Engine$.train(Engine.scala:186)
at org.apache.predictionio.tools.console.Pio$.train(Pio.scala:85)
at org.apache.predictionio.tools.console.Console$$anonfun$main$1.apply(Console.scala:626)
at org.apache.predictionio.tools.console.Console$$anonfun$main$1.apply(Console.scala:611)
at scala.Option.map(Option.scala:145)
at org.apache.predictionio.tools.console.Console$.main(Console.scala:611)
at org.apache.predictionio.tools.console.Console.main(Console.scala)
i tried almost all the things, but could not find the proper solution. please help me...
thanks,
-DAN