export HIVE_HOME=/opt/cloudera/parcels/CDH/lib/hive
export HCAT_HOME=/opt/cloudera/parcels/CDH/lib/hive-hcatalog
/opt/app/dev/spark-1.6.1/bin/spark-submit --verbose --jars $LIB/hive-jdbc-1.1.0-cdh5.5.2.jar,$LIB/hive-common-1.1.0-cdh5.5.2.jar,$LIB/hive-service-1.1.0-cdh5.5.2.jar,$LIB/hive-serde-1.1.0-cdh5.5.2.jar,$LIB/hive-metastore-1.1.0-cdh5.5.2.jar,$LIB/hive-shims-0.23-1.1.0-cdh5.5.2.jar,$LIB/hive-shims-1.1.0-cdh5.5.2.jar,$LIB/hive-exec-1.1.0-cdh5.5.2.jar,$LIB/hive-shims-common-1.1.0-cdh5.5.2.jar,$LIB/hive-shims-scheduler-1.1.0-cdh5.5.2.jar,$LIB/commons-logging-1.2.jar,$LIB/kite-data-core-1.0.0.jar,$LIB/kite-data-mapreduce-1.1.0.jar,$LIB/kite-hadoop-compatibility-1.0.0.jar,$LIB/metrics-core-2.2.0.jar,$LIB/kafka-clients-0.8.2.1.jar,$LIB/kafka_2.10-0.8.2.1.jar,$LIB/zkclient-0.3.jar,$LIB/datanucleus-core-3.2.2.jar,$LIB/datanucleus-api-jdo-3.2.1.jar,$LIB/datanucleus-rdbms-3.2.1.jar,$LIB/hbase-common-1.0.0-cdh5.5.2.jar,$LIB/hbase-client-1.0.0-cdh5.5.2.jar,$LIB/hbase-protocol-1.0.0-cdh5.5.2.jar,$LIB/htrace-core4-4.0.1-incubating.jar,$LIB/avro-tools-1.7.6-cdh5.5.2.jar,$LIB/hbase-annotations-1.0.0-cdh5.5.2.jar,$LIB/accumulo-core-1.6.0.jar,$LIB/hbase-server-1.0.0-cdh5.5.2.jar,$LIB/avro-mapred-1.7.6-cdh5.5.2-hadoop2.jar,$LIB/htrace-core-3.2.0-incubating.jar --queue g_hadoop_d_developers --master yarn-cluster --files $FILES --conf "spark.driver.extraJavaOptions=-XX:MaxPermSize=4G -XX:+UseConcMarkSweepGC -Dlog4j.configuration=log4j-elt.properties" --conf "spark.sql.tungsten.enabled=false" --conf "spark.eventLog.dir=hdfs://nameservice1/user/spark/applicationHistory" --conf "spark.eventLog.enabled=true" --conf "spark.sql.codegen=false" --conf "spark.sql.unsafe.enabled=false" --conf "spark.executor.extraJavaOptions=-XX:+UseConcMarkSweepGC -Dlog4j.configuration=log4j-elt.properties" --conf "spark.streaming.backpressure.enabled=true" --conf "spark.locality.wait=1s" --conf "spark.cores.max=12" --conf "spark.streaming.blockInterval=1500ms" --class com.cigna.damian.AvroRecordReceiver /home/damian/ELT2Hive-0.10.jar