I use below payload to submit my job that include am init script saved on S3. The instance profile and init script worked on interactive cluster. But when I move to job cluster the init script cannot be configure.
{
"new_cluster": {
"spark_version": "8.0.x-scala2.12",
"spark_conf": {
"spark.databricks.repl.allowedLanguages": "python,sql"
},
"aws_attributes": {
"instance_profile_arn": "<my instance profile>"
},
"init_scripts": [
{
"s3": {
"destination": "s3://<my bucket>/init.sh"
}
}
],
"instance_pool_id": "<pool id>",
"autoscale": {
"min_workers": 2,
"max_workers": 6
}
},
"notebook_task": {
"notebook_path": "<my notebook>"
}
}