Hi everyone!
How to limit maximum executor retries limit in spark worker?
I tried:
{
"engineId": "rb",
"engineFactory": "com.actionml.engines.ur.UREngine",
"sparkConf": {
"master": "spark://spark-master:7077",
"spark.serializer": "org.apache.spark.serializer.KryoSerializer",
"spark.kryo.registrator": "org.apache.mahout.sparkbindings.io.MahoutKryoRegistrator",
"spark.kryo.referenceTracking": "false",
"spark.kryoserializer.buffer": "300m",
"spark.executor.memory": "2g",
"spark.driver.memory": "2g",
"spark.es.index.auto.create": "true",
"spark.es.nodes": "elasticsearch",
"spark.es.nodes.wan.only": "true",
"spark.dynamicAllocation.maxExecutors": "3",
"spark.worker.cleanup.enabled": "true"
},
"algorithm":{
"indicators": [
{
"name": "buy"
},
{
"name": "add-to-cart"
},
{
"name": "view"
},
{
"name": "like"
}
]
}
}
But it does not work.