[2020-07-14 10:04:50,527] INFO Finished creating connector replicator (org.apache.kafka.connect.runtime.Worker:257)
[2020-07-14 10:04:50,527] INFO SourceConnectorConfig values:
config.action.reload = restart
connector.class = io.confluent.connect.replicator.ReplicatorSourceConnector
errors.log.enable = false
errors.log.include.messages = false
errors.retry.timeout = 0
errors.tolerance = none
header.converter = null
key.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
name = replicator
tasks.max = 1
transforms = []
value.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
[2020-07-14 10:04:50,527] INFO EnrichedConnectorConfig values:
config.action.reload = restart
connector.class = io.confluent.connect.replicator.ReplicatorSourceConnector
errors.log.enable = false
errors.log.include.messages = false
errors.retry.timeout = 0
errors.tolerance = none
header.converter = null
key.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
name = replicator
tasks.max = 1
transforms = []
value.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
[2020-07-14 10:04:50,550] INFO ReplicatorSourceTaskConfig values:
confluent.license =
confluent.topic = _confluent-command
dest.kafka.bootstrap.servers = [destKafkaDNS:9092]
dest.kafka.metric.reporters = []
dest.kafka.metrics.num.samples = 2
dest.kafka.receive.buffer.bytes = 65536
dest.kafka.send.buffer.bytes = 131072
dest.zookeeper.connect =
offset.timestamps.commit = true
offset.topic.commit = true
offset.translator.tasks.max = -1
offset.translator.tasks.separate = false
partition.assignment = aVeryLongHash...=
provenance.header.enable = false
provenance.header.filter.overrides =
src.consumer.check.crcs = true
src.consumer.fetch.max.bytes = 52428800
src.consumer.fetch.min.bytes = 1
src.consumer.interceptor.classes = []
src.consumer.max.partition.fetch.bytes = 1048576
src.consumer.max.poll.records = 500
src.header.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
src.kafka.bootstrap.servers = [sourceKafkaDNS:9092]
src.kafka.metric.reporters = []
src.kafka.metrics.num.samples = 2
src.kafka.receive.buffer.bytes = 65536
src.kafka.sasl.client.callback.handler.class = null
src.kafka.sasl.jaas.config = null
src.kafka.sasl.kerberos.kinit.cmd = /usr/bin/kinit
src.kafka.sasl.kerberos.min.time.before.relogin = 60000
src.kafka.sasl.kerberos.ticket.renew.jitter = 0.05
src.kafka.sasl.kerberos.ticket.renew.window.factor = 0.8
src.kafka.sasl.login.callback.handler.class = null
src.kafka.sasl.login.class = null
src.kafka.sasl.login.refresh.buffer.seconds = 300
src.kafka.sasl.login.refresh.min.period.seconds = 60
src.kafka.sasl.login.refresh.window.factor = 0.8
src.kafka.sasl.login.refresh.window.jitter = 0.05
src.kafka.sasl.mechanism = GSSAPI
src.kafka.security.protocol = PLAINTEXT
src.kafka.send.buffer.bytes = 131072
src.kafka.ssl.cipher.suites = null
src.kafka.ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
src.kafka.ssl.endpoint.identification.algorithm = https
src.kafka.ssl.key.password = null
src.kafka.ssl.keymanager.algorithm = SunX509
src.kafka.ssl.keystore.location = null
src.kafka.ssl.keystore.password = null
src.kafka.ssl.keystore.type = JKS
src.kafka.ssl.protocol = TLS
src.kafka.ssl.provider = null
src.kafka.ssl.secure.random.implementation = null
src.kafka.ssl.trustmanager.algorithm = PKIX
src.kafka.ssl.truststore.location = null
src.kafka.ssl.truststore.password = null
src.kafka.ssl.truststore.type = JKS
src.key.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
src.value.converter = class io.confluent.connect.replicator.util.ByteArrayConverter
src.zookeeper.connect =
topic.auto.create = true
topic.blacklist = []
topic.config.sync = false
topic.preserve.partitions = true
topic.regex = .*
topic.rename.format = ${topic}
topic.timestamp.type = CreateTime
topic.whitelist = []