[io.druid.extensions:druid-examples, io.druid.extensions:druid-kafka-eight, io.druid.extensions:mysql-metadata-storage, io.druid.extensions:druid-hdfs-storage:0.8.1, org.apache.hadoop:hadoop-client:2.7.1], defaultVersion='0.8.1', localRepository='extensions-repo', remoteRepositories=[https://repo1.maven.org/maven2/, https://metamx.artifactoryonline.com/metamx/pub-libs-releases-local]}]
2015-10-30T18:54:09,571 INFO [task-runner-0] io.druid.indexing.common.task.HadoopIndexTask - Starting a hadoop determine configuration job...
2015-10-30T18:54:09,985 WARN [task-runner-0] org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
2015-10-30T18:54:10,036 INFO [task-runner-0] io.druid.indexer.path.StaticPathSpec - Adding paths[words.txt]
2015-10-30T18:54:10,049 ERROR [task-runner-0] io.druid.indexing.overlord.ThreadPoolTaskRunner - Exception while running task[HadoopIndexTask{id=index_hadoop_hdfs-words_2015-10-30T18:53:48.749Z, type=index_hadoop, dataSource=hdfs-words}]
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at com.google.api.client.repackaged.com.google.common.base.Throwables.propagate(Throwables.java:160) ~[google-http-client-1.15.0-rc.jar:?]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:132) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.common.task.HadoopIndexTask.run(HadoopIndexTask.java:173) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:235) [druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:214) [druid-indexing-service-0.8.1.jar:0.8.1]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) [?:1.7.0_79]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) [?:1.7.0_79]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) [?:1.7.0_79]
at java.lang.Thread.run(Thread.java:745) [?:1.7.0_79]
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.7.0_79]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[?:1.7.0_79]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.7.0_79]
at java.lang.reflect.Method.invoke(Method.java:606) ~[?:1.7.0_79]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:129) ~[druid-indexing-service-0.8.1.jar:0.8.1]
... 7 more
Caused by: java.lang.IllegalStateException: Optional.get() cannot be called on an absent value
at com.google.common.base.Absent.get(Absent.java:47) ~[guava-16.0.1.jar:?]
at io.druid.indexer.HadoopDruidDetermineConfigurationJob.run(HadoopDruidDetermineConfigurationJob.java:61) ~[druid-indexing-hadoop-0.8.1.jar:0.8.1]
at io.druid.indexing.common.task.HadoopIndexTask$HadoopDetermineConfigInnerProcessing.runTask(HadoopIndexTask.java:289) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.7.0_79]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[?:1.7.0_79]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.7.0_79]
at java.lang.reflect.Method.invoke(Method.java:606) ~[?:1.7.0_79]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:129) ~[druid-indexing-service-0.8.1.jar:0.8.1]
... 7 more
2015-10-30T18:54:10,069 INFO [task-runner-0] io.druid.indexing.worker.executor.ExecutorLifecycle - Task completed with status: {
"id" : "index_hadoop_hdfs-words_2015-10-30T18:53:48.749Z",
"status" : "FAILED",
"duration" : 9299
}
2015-10-
# Extensions (no deep storage model is listed - using local fs for deep storage - not recommended for production)# Also, for production to use mysql add, "io.druid.extensions:mysql-metadata-storage"druid.extensions.coordinates=["io.druid.extensions:druid-examples","io.druid.extensions:druid-kafka-eight", "io.druid.extensions:mysql-metadata-storage", "io.druid.extensions:druid-hdfs-storage:0.8.1", "org.apache.hadoop:hadoop-client:2.7.1"]druid.extensions.localRepository=extensions-repo
# Zookeeperdruid.zk.service.host=localhost
# Metadata Storage (use something like mysql in production by uncommenting properties below)# by default druid will use derbydruid.metadata.storage.type=mysqldruid.metadata.storage.connector.connectURI=jdbc:mysql://localhost:3306/druiddruid.metadata.storage.connector.user=druiddruid.metadata.storage.connector.password=diurd
# Deep storage (local filesystem for examples - don't use this in production)druid.storage.type=hdfsdruid.storage.storageDirectory=/
# Query Cache (we use a simple 10mb heap-based local cache on the broker)druid.cache.type=localdruid.cache.sizeInBytes=10000000
# Indexing service discoverydruid.selectors.indexing.serviceName=overlord
# Monitoring (disabled for examples, if you enable SysMonitor, make sure to include sigar jar in your cp)# druid.monitoring.monitors=["com.metamx.metrics.SysMonitor","com.metamx.metrics.JvmMonitor"]
# Metrics logging (disabled for examples - change this to logging or http in production)druid.emitter=noop{ "type": "index_hadoop", "spec" : { "dataSchema": { "dataSource": "hdfs-words", "parser": { "type": "string", "parseSpec": { "format": "tsv", "timestampSpec": { "column": "timestamp", "format": "iso" }, "columns": ["timestamp", "word"], "dimensionsSpec": { "dimensions": ["word"] } } }, "metricsSpec": [ { "type": "count", "name": "count" } ], "granularitySpec": { "type": "uniform", "segmentGranularity": "minute", "queryGranularity": "none" } }, "ioConfig": { "type" : "hadoop", "inputSpec" : { "type" : "static", "paths" : "hdfs://words.txt" } }, "tuningConfig" : { "type": "hadoop" } }}
2015-11-03T00:37:11,791 INFO [task-runner-0] io.druid.indexer.JobHelper - Deleting path[/tmp/druid-indexing/hdfs-words/2015-11-03T003611.991Z]
2015-11-03T00:37:11,835 ERROR [task-runner-0] io.druid.indexing.overlord.ThreadPoolTaskRunner - Exception while running task[HadoopIndexTask{id=index_hadoop_hdfs-words_2015-11-03T00:36:11.967Z, type=index_hadoop, dataSource=hdfs-words}]
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at com.google.api.client.repackaged.com.google.common.base.Throwables.propagate(Throwables.java:160) ~[google-http-client-1.15.0-rc.jar:?]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:132) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.common.task.HadoopIndexTask.run(HadoopIndexTask.java:206) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:235) [druid-indexing-service-0.8.1.jar:0.8.1]
at io.druid.indexing.overlord.ThreadPoolTaskRunner$ThreadPoolTaskRunnerCallable.call(ThreadPoolTaskRunner.java:214) [druid-indexing-service-0.8.1.jar:0.8.1]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) [?:1.7.0_79]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) [?:1.7.0_79]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) [?:1.7.0_79]
at java.lang.Thread.run(Thread.java:745) [?:1.7.0_79]
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.7.0_79]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[?:1.7.0_79]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.7.0_79]
at java.lang.reflect.Method.invoke(Method.java:606) ~[?:1.7.0_79]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:129) ~[druid-indexing-service-0.8.1.jar:0.8.1]
... 7 more
Caused by: com.metamx.common.ISE: Job[class io.druid.indexer.LegacyIndexGeneratorJob] failed!
at io.druid.indexer.JobHelper.runJobs(JobHelper.java:202) ~[druid-indexing-hadoop-0.8.1.jar:0.8.1]
at io.druid.indexer.HadoopDruidIndexerJob.run(HadoopDruidIndexerJob.java:96) ~[druid-indexing-hadoop-0.8.1.jar:0.8.1]
at io.druid.indexing.common.task.HadoopIndexTask$HadoopIndexGeneratorInnerProcessing.runTask(HadoopIndexTask.java:259) ~[druid-indexing-service-0.8.1.jar:0.8.1]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.7.0_79]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) ~[?:1.7.0_79]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.7.0_79]
at java.lang.reflect.Method.invoke(Method.java:606) ~[?:1.7.0_79]
at io.druid.indexing.common.task.HadoopTask.invokeForeignLoader(HadoopTask.java:129) ~[druid-indexing-service-0.8.1.jar:0.8.1]
... 7 more
2015-11-03T00:37:11,847 INFO [task-runner-0] io.druid.indexing.worker.executor.ExecutorLifecycle - Task completed with status: {
"id" : "index_hadoop_hdfs-words_2015-11-03T00:36:11.967Z",
"status" : "FAILED",
"duration" : 48394
}
--
You received this message because you are subscribed to the Google Groups "Druid User" group.
To unsubscribe from this group and stop receiving emails from it, send an email to druid-user+...@googlegroups.com.
To post to this group, send email to druid...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/druid-user/1270b94f-9312-488a-b042-cfb163c379cf%40googlegroups.com.
Hi Jonathan,My previous post is actually a snippet of the Hadoop index task log. I have attached the full version in this reply.
--
You received this message because you are subscribed to the Google Groups "Druid User" group.
To unsubscribe from this group and stop receiving emails from it, send an email to druid-user+...@googlegroups.com.
To post to this group, send email to druid...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/druid-user/4339801f-3400-4911-abd8-3cf715ed8b0b%40googlegroups.com.
# Deep storage (local filesystem for examples - don't use this in production)druid.storage.type=hadoopdruid.storage.storageDirectory=hdfs://tmp/druid-indexing# Deep storage (local filesystem for examples - don't use this in production)druid.storage.type=localdruid.storage.storageDirectory=/tmp/druid-indexing2015-11-03T00:37:11,791 INFO [task-runner-0] io.druid.indexer.JobHelper - Deleting path[/tmp/druid-indexing/hdfs-words/2015-11-03T003611.991Z]
--
You received this message because you are subscribed to the Google Groups "Druid User" group.
To unsubscribe from this group and stop receiving emails from it, send an email to druid-user+...@googlegroups.com.
To post to this group, send email to druid...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/druid-user/5b7d1c3b-551e-4ee9-b7c9-eb032562ffe1%40googlegroups.com.