Hi there,
Thank you for the quick reply. I'm looking to create a policy not for a pool but for any job in the workflow.
Here is the current policy I am playing with. Please let me know if you see where this is off.
{
"spark_conf.spark.databricks.cluster.profile":{
"type":"fixed",
"value":"singleNode",
"hidden":true
},
"spark_version": {
"type": "unlimited",
"defaultValue": "auto:latest-lts"
},
"enable_elastic_disk": {
"type": "fixed",
"value": true,
"hidden": true
},
"node_type_id": {
"type": "unlimited",
"defaultValue": "i3.xlarge",
"isOptional": true
},
"num_workers" : {
"type" : "fixed",
"value" : 0,
"hidden" : true
},
"aws_attributes.availability": {
"type": "fixed",
"value": "SPOT",
"hidden": true
},
"aws_attributes.zone_id": {
"type": "unlimited",
"defaultValue": "auto",
"hidden": true
},
"aws_attributes.spot_bid_price_percent": {
"type": "fixed",
"value": 100,
"hidden": true
},
"instance_pool_id": {
"type": "forbidden",
"hidden": true
},
"driver_instance_pool_id": {
"type": "forbidden",
"hidden": true
},
"cluster_type": {
"type": "fixed",
"value": "job"
}
}
P.S. When I copy your code into the policy maker it says singleNodePoolId1 does not exist.