Issue resolved. I modify existing spark job by adding bigtable jar in pom.xml as dependency and below code change . Thanks for your help.
case class HBaseConnectionWithClosure(zookeeperQuorum: String, zookeeperPort: String) {
def getConnection(): Connection = {
val c = HBaseConfiguration.create
//c.set("hbase.zookeeper.quorum", zookeeperQuorum)
//c.set("hbase.zookeeper.property.clientPort", zookeeperPort)
c.set("hbase.meta.replicas.use", "true")
c.set("hbase.client.connection.impl", "com.google.cloud.bigtable.hbase1_x.BigtableConnection")
ConnectionFactory.createConnection(c)
}
and
case class HBaseConnectionWithClosure(zookeeperQuorum: String, zookeeperPort: String) {
def getConnection(): Connection = {
val c = HBaseConfiguration.create
//c.set("hbase.zookeeper.quorum", zookeeperQuorum)
//c.set("hbase.zookeeper.property.clientPort", zookeeperPort)
c.set("hbase.meta.replicas.use", "true")
c.set("hbase.client.connection.impl", "com.google.cloud.bigtable.hbase1_x.BigtableConnection")
ConnectionFactory.createConnection(c)
}
and
<dependency>
<groupId>com.google.cloud.bigtable</groupId>
<artifactId>bigtable-hbase-1.x-shaded</artifactId>
<version>1.3.0</version>
</dependency>