Hi,
there is no option to take VMs from a Pool for a new workflow (Azure Cloud)?
default schema for a new cluster:
{
"num_workers": 0,
"spark_version": "10.4.x-scala2.12",
"spark_conf": {
"spark.master": "local[*, 4]",
"spark.databricks.cluster.profile": "singleNode"
},
"azure_attributes": {
"first_on_demand": 1,
"availability": "ON_DEMAND_AZURE",
"spot_bid_max_price": -1
},
"node_type_id": "Standard_DS3_v2",
"ssh_public_keys": [],
"custom_tags": {
"ResourceClass": "SingleNode"
},
"spark_env_vars": {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
},
"enable_elastic_disk": true,
"cluster_source": "JOB",
"init_scripts": [],
"data_security_mode": "SINGLE_USER",
"runtime_engine": "STANDARD"
}