I created a repository for testing DAB, and I'm getting this error:
run failed with error message
Python wheel with name dab_test_repo could not be found. Please check the driver logs for more details
This is my databricks.yml:
bundle:
name: dab_test_repo
# Declare to Databricks Assets Bundles that this is a Python project
artifacts:
default:
type: whl
build: poetry build
path: .
resources:
jobs:
dab_test_repo:
name: dab_test_repo
tasks:
- task_key: solve
job_cluster_key: ${bundle.target}-${bundle.name}-job-cluster
python_wheel_task:
package_name: dab_test_repo
entry_point: solver
# libraries:
# - whl: ./dist/*.whl
- task_key: forecast
depends_on:
- task_key: solve
job_cluster_key: ${bundle.target}-${bundle.name}-job-cluster
python_wheel_task:
package_name: dab_test_repo
entry_point: forecast
# libraries:
# - whl: ./dist/*.whl
- task_key: regularize
depends_on:
- task_key: solve
job_cluster_key: ${bundle.target}-${bundle.name}-job-cluster
python_wheel_task:
package_name: dab_test_repo
entry_point: regularize
# libraries:
# - whl: ./dist/*.whl
- task_key: persist_data
depends_on:
- task_key: forecast
- task_key: regularize
job_cluster_key: ${bundle.target}-${bundle.name}-job-cluster
python_wheel_task:
package_name: dab_test_repo
entry_point: save_in_database
# libraries:
# - whl: ./dist/*.whl
targets:
dev:
mode: development
default: true
run_as:
user_name: my_mail@generic_mail.com
resources:
jobs:
dab_test_repo:
job_clusters:
- job_cluster_key: ${bundle.target}-${bundle.name}-job-cluster
new_cluster:
num_workers: 2
spark_version: "13.3.x-cpu-ml-scala2.12" # Specify the Spark version
node_type_id: Standard_DS3_v2 # Specify the node type
Thanks in advance