Caused by: java.lang.NullPointerException: Null timestamp in input: {ver=1.0}
a larger snip of the log:
2016-06-05T23:20:05,534 INFO [LocalJobRunner Map Task Executor #0] io.druid.indexer.HadoopDruidIndexerConfig - Running with config:
{
"spec" : {
"dataSchema" : {
"dataSource" : "vmonitor.site.telemetry",
"parser" : {
"type" : "string",
"parseSpec" : {
"format" : "json",
"timestampSpec" : {
"column" : "t",
"format" : "iso"
},
"flattenSpec" : {
"useFieldDiscovery" : true,
"fields" : [ {
"type" : "path",
"name" : "id",
"expr" : "$.arr.id"
}, {
"type" : "path",
"name" : "t",
"expr" : "$.arr.data.t"
}, {
"type" : "path",
"name" : "orig_t",
"expr" : "$.arr.data.orig_t"
}, {
"type" : "path",
"name" : "f",
"expr" : "$.arr.data.f"
}, {
"type" : "path",
"name" : "v_0",
"expr" : "$.arr.data.v[0]"
}, {
"type" : "path",
"name" : "v_1",
"expr" : "$.arr.data.v[1]"
}, {
"type" : "path",
"name" : "v_2",
"expr" : "$.arr.data.v[2]"
}, {
"type" : "path",
"name" : "i_0",
"expr" : "$.arr.data.i[0]"
}, {
"type" : "path",
"name" : "i_1",
"expr" : "$.arr.data.i[1]"
}, {
"type" : "path",
"name" : "i_2",
"expr" : "$.arr.data.i[2]"
}, {
"type" : "path",
"name" : "kw_0",
"expr" : "$.arr.data.kw[0]"
}, {
"type" : "path",
"name" : "kw_1",
"expr" : "$.arr.data.kw[1]"
}, {
"type" : "path",
"name" : "kw_2",
"expr" : "$.arr.data.kw[2]"
}, {
"type" : "path",
"name" : "delta_t",
"expr" : "$.arr.data.delta_t"
} ]
},
"dimensionsSpec" : {
"dimensions" : [ "ver", "id" ]
}
}
},
"metricsSpec" : [ {
"type" : "count",
"name" : "views"
}, {
"type" : "count",
"name" : "kw_0"
}, {
"type" : "count",
"name" : "kw_1"
}, {
"type" : "count",
"name" : "kw_2"
} ],
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "DAY",
"queryGranularity" : {
"type" : "none"
},
"intervals" : null
}
},
"ioConfig" : {
"type" : "hadoop",
"inputSpec" : {
"type" : "static",
"paths" : "/home/ubuntu/datawarehouse/vmonitor.site.telemetry.json"
},
"metadataUpdateSpec" : null,
"segmentOutputPath" : "file:/home/ubuntu/druid-0.9.0/var/druid/segments/vmonitor.site.telemetry"
},
"tuningConfig" : {
"type" : "hadoop",
"workingPath" : "var/druid/hadoop-tmp",
"version" : "2016-06-05T23:19:59.090Z",
"partitionsSpec" : {
"type" : "hashed",
"targetPartitionSize" : 5000000,
"maxPartitionSize" : 7500000,
"assumeGrouped" : false,
"numShards" : -1
},
"shardSpecs" : { },
"indexSpec" : {
"bitmap" : {
"type" : "concise"
},
"dimensionCompression" : null,
"metricCompression" : null
},
"maxRowsInMemory" : 80000,
"leaveIntermediate" : false,
"cleanupOnFailure" : true,
"overwriteFiles" : false,
"ignoreInvalidRows" : false,
"jobProperties" : { },
"combineText" : false,
"useCombiner" : false,
"buildV9Directly" : false,
"numBackgroundPersistThreads" : 0
},
"uniqueId" : "be11a9ca18e748b2b4f681ff9d42cdf7"
}
}
2016-06-05T23:20:05,544 INFO [LocalJobRunner Map Task Executor #0] org.apache.hadoop.mapred.MapTask - Starting flush of map output
2016-06-05T23:20:05,550 INFO [Thread-21] org.apache.hadoop.mapred.LocalJobRunner - map task executor complete.
2016-06-05T23:20:05,551 WARN [Thread-21] org.apache.hadoop.mapred.LocalJobRunner - job_local761517617_0001
java.lang.Exception: com.metamx.common.RE: Failure on row[{"arr": [{"data": [{"f": 60, "i": [-1, -1, -1], "delta_t": 1, "kw": [68.948, 79.242, 67.05], "t": "2015-07-28T15:19:18.769", "v": [-1, -1, -1], "orig_t": "2015-07-28T15:19:18.769"}], "id": "pgx.hq.stem-8e-71-6b.vmonitor.site.telemetry"}], "ver": "1.0"}]
at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462) ~[hadoop-mapreduce-client-common-2.3.0.jar:?]
at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522) [hadoop-mapreduce-client-common-2.3.0.jar:?]
Caused by: com.metamx.common.RE: Failure on row[{"arr": [{"data": [{"f": 60, "i": [-1, -1, -1], "delta_t": 1, "kw": [68.948, 79.242, 67.05], "t": "2015-07-28T15:19:18.769", "v": [-1, -1, -1], "orig_t": "2015-07-28T15:19:18.769"}], "id": "pgx.hq.stem-8e-71-6b.vmonitor.site.telemetry"}], "ver": "1.0"}]
at io.druid.indexer.HadoopDruidIndexerMapper.map(HadoopDruidIndexerMapper.java:88) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at io.druid.indexer.DetermineHashedPartitionsJob$DetermineCardinalityMapper.run(DetermineHashedPartitionsJob.java:282) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:340) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) ~[hadoop-mapreduce-client-common-2.3.0.jar:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) ~[?:1.7.0_101]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) ~[?:1.7.0_101]
at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_101]
Caused by: com.metamx.common.parsers.ParseException: Unparseable timestamp found!
at io.druid.data.input.impl.MapInputRowParser.parse(MapInputRowParser.java:72) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.data.input.impl.StringInputRowParser.parseMap(StringInputRowParser.java:136) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.data.input.impl.StringInputRowParser.parse(StringInputRowParser.java:131) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.indexer.HadoopDruidIndexerMapper.parseInputRow(HadoopDruidIndexerMapper.java:98) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at io.druid.indexer.HadoopDruidIndexerMapper.map(HadoopDruidIndexerMapper.java:69) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at io.druid.indexer.DetermineHashedPartitionsJob$DetermineCardinalityMapper.run(DetermineHashedPartitionsJob.java:282) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:340) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) ~[hadoop-mapreduce-client-common-2.3.0.jar:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) ~[?:1.7.0_101]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) ~[?:1.7.0_101]
at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_101]
Caused by: java.lang.NullPointerException: Null timestamp in input: {ver=1.0}
at io.druid.data.input.impl.MapInputRowParser.parse(MapInputRowParser.java:63) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.data.input.impl.StringInputRowParser.parseMap(StringInputRowParser.java:136) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.data.input.impl.StringInputRowParser.parse(StringInputRowParser.java:131) ~[druid-api-0.3.16.jar:0.3.16]
at io.druid.indexer.HadoopDruidIndexerMapper.parseInputRow(HadoopDruidIndexerMapper.java:98) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at io.druid.indexer.HadoopDruidIndexerMapper.map(HadoopDruidIndexerMapper.java:69) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at io.druid.indexer.DetermineHashedPartitionsJob$DetermineCardinalityMapper.run(DetermineHashedPartitionsJob.java:282) ~[druid-indexing-hadoop-0.9.0.jar:0.9.0]
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:340) ~[hadoop-mapreduce-client-core-2.3.0.jar:?]
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) ~[hadoop-mapreduce-client-common-2.3.0.jar:?]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) ~[?:1.7.0_101]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) ~[?:1.7.0_101]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) ~[?:1.7.0_101]
at java.lang.Thread.run(Thread.java:745) ~[?:1.7.0_101]