bootstrap.servers=kafka:9092
key.converter=org.apache.kafka.connect.storage.StringConverter
value.converter=io.confluent.connect.avro.AvroConverter
value.converter.schema.registry.url=http://avro-schema-registry:8081
internal.key.converter=org.apache.kafka.connect.json.JsonConverter
internal.value.converter=org.apache.kafka.connect.json.JsonConverter
internal.key.converter.schemas.enable=false
internal.value.converter.schemas.enable=false
offset.storage.file.filename=/tmp/connect.offsets.auth
sink properties:
name=auth-file-sink
connector.class=org.apache.kafka.connect.file.FileStreamSinkConnector
tasks.max=1
file=auth.sink.txt
topics=test
AVRO schema:
{
"doc": "The payload",
"name": "Event",
"type": "record",
"fields": [
{
"name": "people",
"type": {
"type": "array",
"items": {
"name": "Person",
"type": "record",
"fields": [
{
"doc": "Name of person",
"name": "name",
"type": "string"
}
]
}
}
}
]
}
Message:
{
people: [
{
name: 'adam'
},
{
name: 'klein'
}
]
}
I'm getting this error:
[2016-03-24 14:01:32,519] ERROR Thread WorkerSinkTask-auth-file-sink-0 exiting with uncaught exception: (org.apache.kafka.connect.util.ShutdownableThread)
java.lang.NullPointerException
at io.confluent.connect.avro.AvroData.toConnectSchema(AvroData.java:1043)
at io.confluent.connect.avro.AvroData.toConnectSchema(AvroData.java:1068)
at io.confluent.connect.avro.AvroData.toConnectSchema(AvroData.java:1068)
at io.confluent.connect.avro.AvroData.toConnectSchema(AvroData.java:980)
at io.confluent.connect.avro.AvroData.toConnectData(AvroData.java:750)
at io.confluent.connect.avro.AvroConverter.toConnectData(AvroConverter.java:103)
at org.apache.kafka.connect.runtime.WorkerSinkTask.convertMessages(WorkerSinkTask.java:266)
at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:175)
at org.apache.kafka.connect.runtime.WorkerSinkTaskThread.iteration(WorkerSinkTaskThread.java:90)
at org.apache.kafka.connect.runtime.WorkerSinkTaskThread.execute(WorkerSinkTaskThread.java:58)
at org.apache.kafka.connect.util.ShutdownableThread.run(ShutdownableThread.java:82)
To view this discussion on the web visit https://groups.google.com/d/msgid/confluent-platform/44b6e743-8a8b-4de9-894b-22487773898d%40googlegroups.com.--
You received this message because you are subscribed to the Google Groups "Confluent Platform" group.
To unsubscribe from this group and stop receiving emails from it, send an email to confluent-platf...@googlegroups.com.
To post to this group, send email to confluent...@googlegroups.com.
--
You received this message because you are subscribed to the Google Groups "Confluent Platform" group.
To view this discussion on the web visit https://groups.google.com/d/msgid/confluent-platform/4228fea8-91e8-4781-8a52-98aaf2d8bf1e%40googlegroups.com.To unsubscribe from this group and stop receiving emails from it, send an email to confluent-platform+unsubscribe@googlegroups.com.
To post to this group, send email to confluent-platform@googlegroups.com.
org.apache.avro.AvroRuntimeException: Not an array: "long"
at org.apache.avro.Schema.getElementType(Schema.java:262)
at io.confluent.connect.avro.AvroData.fromConnectData(AvroData.java:415)
at io.confluent.connect.avro.AvroData.fromConnectData(AvroData.java:477)
at io.confluent.connect.avro.AvroData.fromConnectData(AvroData.java:437)
at io.confluent.connect.avro.AvroData.fromConnectData(AvroData.java:487)
at io.confluent.connect.avro.AvroData.fromConnectData(AvroData.java:295)
at io.confluent.connect.s3.format.avro.AvroRecordWriterProvider$1.write(AvroRecordWriterProvider.java:75)
at io.confluent.connect.s3.TopicPartitionWriter.writeRecord(TopicPartitionWriter.java:393)
at io.confluent.connect.s3.TopicPartitionWriter.write(TopicPartitionWriter.java:197)
at io.confluent.connect.s3.S3SinkTask.put(S3SinkTask.java:173)
at org.apache.kafka.connect.runtime.WorkerSinkTask.deliverMessages(WorkerSinkTask.java:429)
at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:250)
at org.apache.kafka.connect.runtime.WorkerSinkTask.iteration(WorkerSinkTask.java:179)
at org.apache.kafka.connect.runtime.WorkerSinkTask.execute(WorkerSinkTask.java:148)
at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:139)
at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:182)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:748)