{
"error_code": "INVALID_PARAMETER_VALUE",
"message": "Missing required field: job_id"
}
I have a test job cluster and I need to update the docker image filed with the other version using reset/update job API. I went through the documentation of data bricks and ran all of the commands that are related to the reset but I am getting the above error. attaching the snippet of my JSON file.
curl --netrc --request POST \ https://databricksinstance/api/2.0/jobs/update job_id=1 \ --data @resetjob.json \ | jq .
{
"job_id": 1,
"settings": {
"name": "testjob",
"new_cluster": {
"cluster_name": "",
"spark_version": "",
"spark_conf": {
"spark.sql.legacy.parser.havingWithoutGroupByAsWhere": "=True"
},
"aws_attributes": {
"zone_id": "",
"first_on_demand": 0,
"availability": "SPOT_WITH_FALLBACK",
"instance_profile_arn": "",
"spot_bid_price_percent": ,
"ebs_volume_type": "GENERAL_PURPOSE_SSD",
"ebs_volume_count": 1,
"ebs_volume_size": 100
},
"node_type_id": "i3.xlarge",
"driver_node_type_id": "i3.xlarge",
"custom_tags": {
"Contractor": "",
"USER": ""
},
"cluster_log_conf": {
"s3": {
"destination": "",
"region": "",
"enable_encryption": true,
"canned_acl": ""
}
},
"enable_elastic_disk": false,
"init_scripts": [
{
"dbfs": {
"destination": ""
}
},
{
"dbfs": {
"destination": ""
}
}
],
"docker_image": {
"url": "*",
"basic_auth": {}
},
"policy_id": "",
"num_workers": 12
},
"libraries": [
{
"egg": ""
}
],
"email_notifications": {
"no_alert_for_skipped_runs": false
},
"timeout_seconds": 0,
"notebook_task": {
"notebook_path": ""
},
"max_concurrent_runs": 10
},
"created_time": ,
"creator_user_name": "",
"run_as_user_name": ""
}
Any idea on this?