at io.druid.query.aggregation.HistogramAggregatorFactory.getTypeName(HistogramAggregatorFactory.java:158)
at io.druid.segment.incremental.IncrementalIndex.<init>(IncrementalIndex.java:104)
at io.druid.segment.incremental.IncrementalIndex.<init>(IncrementalIndex.java:130)
at io.druid.query.groupby.GroupByQueryQueryToolChest.mergeGroupByResults(GroupByQueryQueryToolChest.java:125)
at io.druid.query.groupby.GroupByQueryQueryToolChest.access$100(GroupByQueryQueryToolChest.java:57)
at io.druid.query.groupby.GroupByQueryQueryToolChest$2.run(GroupByQueryQueryToolChest.java:84)
at io.druid.query.FinalizeResultsQueryRunner.run(FinalizeResultsQueryRunner.java:102)
at io.druid.query.BaseQuery.run(BaseQuery.java:78)
at io.druid.query.BaseQuery.run(BaseQuery.java:73)
My indexing configuration :
{
"type" : "index",
"dataSource" : "test",
"granularitySpec" : {
"type" : "uniform",
"gran" : "day",
"intervals" : [ "2013-12-06/2013-12-07" ]
},
"aggregators" : [
],
"firehose" : {
"type" : "local",
"baseDir" : "/Users/test/Downloads/druid-services-0.6.26/examples/cucina/",
"filter" : "access_logs_
2013120610_3_sample.json",
"parser" : {
"timestampSpec" : {
"column" : "time",
"format" : "ruby"
},
"data" : {
"format" : "json",
"dimensions" : ["remote","path","agent","my_metric"]
}
}
}
}
My query:
{
"queryType" : "groupBy",
"dataSource": "test",
"granularity": "all",
"dimensions": [""],
"aggregations" : [
{
"type" : "histogram",
"name" : "my_metric",
"fieldName" : "my_metric",
"breaks" : [0.0, 20.0]
}
],
"intervals": ["2013-12-06T00:00/2013-12-31T00:00"]
}
Thanks.
-- Nicolas