Hi,
I am giving up om kafka 8. going back to 7. Now..with the latest version of durid..I am now getting this error i n the realtime node. How do I resolve?
[{
"schema" : { "dataSource":"datasource-pixel",
"aggregators":[ {"type":"count", "name":"impressions"}],
"indexGranularity":"minute",
"shardSpec" : { "type": "none" } },
"config" : { "maxRowsInMemory" : 500000,
"intermediatePersistPeriod" : "PT10m" },
"firehose" : { "type" : "kafka-0.7.2",
"consumerProps" : { "zk.connect" : "1.dzk.do.development.singapore.testcom:2181",
"
zk.connectiontimeout.ms" : "15000",
"
zk.sessiontimeout.ms" : "15000",
"
zk.synctime.ms" : "5000",
"groupid" : "topic-pixel",
"autooffset.reset" : "largest",
"autocommit.enable" : "false" },
"feed" : "topic-pixel",
"parser" : { "timestampSpec" : { "column" : "utcdt", "format" : "iso" },
"data" : { "format" : "json" },
"dimensionExclusions" : ["deleteme"] } },
"plumber" : { "type" : "realtime",
"windowPeriod" : "PT10m",
"segmentGranularity":"hour",
"basePersistDirectory" : "/tmp/realtime/basePersist" }
}
]
2014-02-28 12:01:38,857 INFO [main] kafka.consumer.ZookeeperConsumerConnector - topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 begin rebalancing consumer topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 try #2
2014-02-28 12:01:40,221 INFO [main] kafka.consumer.ZookeeperConsumerConnector - topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 exception during rebalance
java.lang.NumberFormatException: null
at java.lang.Integer.parseInt(Integer.java:454)
at java.lang.Integer.parseInt(Integer.java:527)
at scala.collection.immutable.StringLike$class.toInt(StringLike.scala:231)
at scala.collection.immutable.StringOps.toInt(StringOps.scala:31)
at kafka.utils.ZkUtils$$anonfun$getPartitionsForTopics$1$$anonfun$apply$2.apply(ZkUtils.scala:216)
at kafka.utils.ZkUtils$$anonfun$getPartitionsForTopics$1$$anonfun$apply$2.apply(ZkUtils.scala:215)
at scala.collection.Iterator$class.foreach(Iterator.scala:772)
at scala.collection.JavaConversions$JIteratorWrapper.foreach(JavaConversions.scala:573)
at scala.collection.IterableLike$class.foreach(IterableLike.scala:73)
at scala.collection.JavaConversions$JListWrapper.foreach(JavaConversions.scala:615)
at kafka.utils.ZkUtils$$anonfun$getPartitionsForTopics$1.apply(ZkUtils.scala:215)
at kafka.utils.ZkUtils$$anonfun$getPartitionsForTopics$1.apply(ZkUtils.scala:212)
at scala.collection.Iterator$class.foreach(Iterator.scala:772)
at scala.collection.mutable.HashMap$$anon$3.foreach(HashMap.scala:108)
at kafka.utils.ZkUtils$.getPartitionsForTopics(ZkUtils.scala:212)
at kafka.consumer.ZookeeperConsumerConnector$ZKRebalancerListener.kafka$consumer$ZookeeperConsumerConnector$ZKRebalancerListener$$rebalance(ZookeeperConsumerConnector.scala:479)
at kafka.consumer.ZookeeperConsumerConnector$ZKRebalancerListener$$anonfun$syncedRebalance$1.apply$mcVI$sp(ZookeeperConsumerConnector.scala:449)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:78)
at kafka.consumer.ZookeeperConsumerConnector$ZKRebalancerListener.syncedRebalance(ZookeeperConsumerConnector.scala:444)
at kafka.consumer.ZookeeperConsumerConnector.kafka$consumer$ZookeeperConsumerConnector$$reinitializeConsumer(ZookeeperConsumerConnector.scala:733)
at kafka.consumer.ZookeeperConsumerConnector.consume(ZookeeperConsumerConnector.scala:207)
at kafka.javaapi.consumer.ZookeeperConsumerConnector.createMessageStreams(ZookeeperConsumerConnector.scala:77)
at kafka.javaapi.consumer.ZookeeperConsumerConnector.createMessageStreams(ZookeeperConsumerConnector.scala:91)
at io.druid.firehose.kafka.KafkaSevenFirehoseFactory.connect(KafkaSevenFirehoseFactory.java:91)
at io.druid.segment.realtime.FireDepartment.connect(FireDepartment.java:90)
at io.druid.segment.realtime.RealtimeManager$FireChief.init(RealtimeManager.java:150)
at io.druid.segment.realtime.RealtimeManager.start(RealtimeManager.java:87)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at com.metamx.common.lifecycle.Lifecycle$AnnotationBasedHandler.start(Lifecycle.java:331)
at com.metamx.common.lifecycle.Lifecycle.start(Lifecycle.java:250)
at io.druid.guice.LifecycleModule$2.start(LifecycleModule.java:136)
at io.druid.cli.GuiceRunnable.initLifecycle(GuiceRunnable.java:72)
at io.druid.cli.ServerRunnable.run(ServerRunnable.java:40)
at io.druid.cli.Main.main(Main.java:91)
2014-02-28 12:01:40,224 INFO [main] kafka.consumer.ZookeeperConsumerConnector - topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 end rebalancing consumer topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 try #2
2014-02-28 12:01:40,224 INFO [main] kafka.consumer.ZookeeperConsumerConnector - topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 Rebalancing attempt failed. Clearing the cache before the next rebalancing operation is triggered
2014-02-28 12:01:40,225 INFO [main] kafka.consumer.Fetcher - Cleared all relevant queues for this fetcher
2014-02-28 12:01:40,225 INFO [main] kafka.consumer.ConsumerIterator - Clearing the current data chunk for this consumer iterator
2014-02-28 12:01:40,226 INFO [main] kafka.consumer.Fetcher - Cleared the data chunks in all the consumer message iterators
2014-02-28 12:01:40,226 INFO [main] kafka.consumer.ZookeeperConsumerConnector - topic-pixel_do-druidrealtime-sg-development-20140228111325-1393588884141-b7feaab3 Committing all offsets after clearing the fetcher queues