Everything was working with MSK and Debezium with Mysql Aurora till now. before 2 day, we have started to add one new more table , and its not working properly and throwing the error : "Encountered change event for table whose schema isn't known to this connector"
We have tried lot many solution but nothing work for us, Strange thing for us is below, we have added only one table but each time in history table there are few table entry (8 entries) inserted and it stopped, working with above error. we have tried below.
1. Created new connector.
2. Delete History topics.
3. Tried with multiple options.
Scenario is like that, some time connector show UP status and whenever any change event fired, it show "Encountered change event for table whose schema isn't known to this connector" and connector is stopped, when we have added.inconsistent.schema.handling.mode as "warn" connector is not down anymore. it show keep up and running but events are also not refelected in topic as well below is my connector conf.
Weired is that, no new table is working, existing all things working fine , if i add different database schema table it is working.
{
"connector.class": "io.debezium.connector.mysql.MySqlConnector",
"snapshot.locking.mode": "none",
"errors.log.include.messages": "true",
"topic.creation.default.partitions": "1",
"tasks.max": "1",
"database.history.consumer.sasl.jaas.config": "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"xxxx\" password=\"gSJsdsdsdsdsksdszj\";",
"database.history.kafka.topic": "xxx-history.sxxx-account-table-history",
"bootstrap.servers": "
b-2.dev-msk-cluster.cic8o1.c9.kafka.us-east-1.amazonaws.com:9096,
b-1.dev-msk-cluster.cic8o1.c9.kafka.us-east-1.amazonaws.com:9096",
"database.history.consumer.security.protocol": "SASL_SSL",
"table.whitelist": "xxxx.tbl_account",
"tombstones.on.delete": "true",
"schema.history.internal.kafka.topic": "xxxx-history.speed-account-schmea-history",
"snapshot.new.tables": "parallel",
"topic.creation.default.replication.factor": "2",
"database.history.skip.unparseable.ddl": "true",
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
"errors.log.enable": "true",
"database.whitelist": "speed",
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
"database.allowPublicKeyRetrieval": "true",
"database.history.producer.sasl.mechanism": "SCRAM-SHA-512",
"database.history.producer.sasl.jaas.config": "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"xxxx\" password=\"gSJxxxxxxxx\";",
"database.user": "xxxxx_debezium",
"
database.server.id": "87945",
"database.history.producer.security.protocol": "SASL_SSL",
"database.history.kafka.bootstrap.servers": "
b-2.dev-msk-cluster.cic8o1.c9.kafka.us-east-1.amazonaws.com:9096,
b-1.dev-msk-cluster.cic8o1.c9.kafka.us-east-1.amazonaws.com:9096",
"event.deserialization.failure.handling.mode": "warn",
"
database.server.name": "xxxx-tbl-account",
"schema.history.internal.skip.unparseable.ddl": "false",
"database.port": "4307",
"inconsistent.schema.handling.mode": "skip",
"autoReconnect": "true",
"gtid.new.channel.position": "latest",
"gtid.source.filter.dml.events": "true",
"key.converter.schemas.enable": "false",
"database.hostname": "
aurora-cluster-stage.cluster-cxxxxxxg9yht.us-east-1.rds.amazonaws.com",
"database.password": "xxxxxxxxxx",
"value.converter.schemas.enable": "false",
"name": "xxxxx-account-table-1",
"table.include.list": "xxxx.tbl_account",
"database.history.consumer.sasl.mechanism": "SCRAM-SHA-512",
"snapshot.mode": "initial",
"ddl.parser.mode": "antlr",
"database.include.list": "xxxxx"
}