cancel
Showing results for 
Search instead for 
Did you mean: 
Community Platform Discussions
Connect with fellow community members to discuss general topics related to the Databricks platform, industry trends, and best practices. Share experiences, ask questions, and foster collaboration within the community.
cancel
Showing results for 
Search instead for 
Did you mean: 

Migrating dashboards from one workspace to another workspace

unity_Catalog
New Contributor III

I'm exporting dashboard objects from an existing workspace to new workspace but after importing ,the underlying dashboards data is not coming to new workspace. I'm using the below code. Can anyone help

import os
import requests
import json
import logging

# Set up logging
log_file = 'import_dashboards_log.log'
logging.basicConfig(filename=log_file, level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s')

# Target Databricks workspace URL and token (hardcoded)
target_workspace_url = 'https://.azuredatabricks.net'
target_workspace_token = 'dapib2e-3'

def create_folder(workspace_url, token, folder_path):
"""Create a folder in the Databricks workspace if it doesn't exist."""
url = f'{workspace_url}/api/2.0/workspace/mkdirs'
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
payload = {"path": folder_path}
response = requests.post(url, headers=headers, data=json.dumps(payload))
if response.status_code == 200 or response.status_code == 400: # 400 means folder already exists
logging.info(f"Folder created or already exists: {folder_path}")
print(f"Folder created or already exists: {folder_path}")
else:
logging.error(f"Failed to create folder {folder_path}. Error: {response.content}")
print(f"Failed to create folder {folder_path}. Error: {response.content}")

def import_dashboard(workspace_url, token, file_path, folder_path):
"""Import a dashboard JSON file into the new workspace."""
with open(file_path, 'r') as f:
dashboard_data = json.load(f)

# Prepare the import payload based on the provided JSON sample
import_dashboards = {
"name": dashboard_data.get('name'),
"parent": folder_path,
"tags": dashboard_data.get('tags', []),
"options": dashboard_data.get('options'),
"widgets": dashboard_data.get('widgets'),
"user": dashboard_data.get('user')
}

url = f'{workspace_url}/api/2.0/preview/sql/dashboards'
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
response = requests.post(url, headers=headers, data=json.dumps(import_dashboards))
if response.status_code == 200:
logging.info(f"Imported dashboard: {file_path}")
print(f"Imported dashboard: {file_path}")
else:
logging.error(f"Failed to import dashboard {file_path}. Error: {response.content}")
print(f"Failed to import dashboard {file_path}. Error: {response.content}")

def main():
"""Main function to import dashboards into the new workspace."""
exported_dir = 'exported_dashboards' # Directory where exported dashboards are saved
folder_path = "/Workspace/folders/new_dashboard_folder" # Path to the folder in the new workspace

print("\033[33mImporting dashboards...\033[0m") # Yellow color
logging.info("Starting to import dashboards.")

# Create folder in the workspace
create_folder(target_workspace_url, target_workspace_token, folder_path)

for filename in os.listdir(exported_dir):
if filename.endswith('.json'):
file_path = os.path.join(exported_dir, filename)
try:
import_dashboard(target_workspace_url, target_workspace_token, file_path, folder_path)
except Exception as e:
logging.error(f"An error occurred while importing {file_path}: {e}")
print(f"An error occurred while importing {file_path}: {e}")

print("\033[32mDashboards import process completed\033[0m") # Green color
logging.info("Dashboards import process completed.")

if __name__ == "__main__":
main()

1 ACCEPTED SOLUTION

Accepted Solutions

unity_Catalog
New Contributor III

Hi romy,
  Thanks for your quick reply. I have migrated dashboards to targeted workspace. But what about legacy dashboards can we export those too. When I am trying to migrate them only names of legacy dashboards are migrated but not the queries and datasets. I am adding the code below

import requests

import json

import os

 

# Define your target Databricks workspace URL and personal access token

TARGET_WORKSPACE_URL = "*******

TARGET_WORKSPACE_TOKEN = "*******"

 

# Set up headers for authentication

headers = {

    'Authorization': f'Bearer {TARGET_WORKSPACE_TOKEN}',

    'Content-Type': 'application/json'

}

 

def create_folder(folder_path):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/workspace/mkdirs'

    data = {

        "path": folder_path

    }

    response = requests.post(url, headers=headers, json=data)

    response.raise_for_status()

 

def import_dashboard(dashboard_data):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/preview/sql/dashboards'

    response = requests.post(url, headers=headers, json=dashboard_data)

    response.raise_for_status()

    return response.json()

 

def set_acls(dashboard_id, acl_data):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/preview/sql/permissions/dashboards/{dashboard_id}'

    print(f"Sending request to: {url}")

    print(f"Request data: {json.dumps(acl_data, indent=2)}")

    response = requests.post(url, headers=headers, json=acl_data)  # Use POST method

    print(f"Response status code: {response.status_code}")

    print(f"Response content: {response.text}")

    response.raise_for_status()

 

def import_dashboards():

    with open('exported_dashboards_with_acls.json', 'r') as f:

        dashboards = json.load(f)

 

    # Define the folder path in the target workspace

    legacy_folder_path = '/Shared/Legacy dashboards'

    create_folder(legacy_folder_path)

 

    for dashboard in dashboards:

        # Prepare dashboard data for import

        dashboard_data = {

            "name": dashboard.get('name', ''),

            "widgets": dashboard.get('widgets', []),

            "visualizations": dashboard.get('visualizations', []),

            "description": dashboard.get('description', ''),

            "options": dashboard.get('options', {}),

            

        }

       

        # Import dashboard

        imported_dashboard = import_dashboard(dashboard_data)

        dashboard_id = imported_dashboard['id']

 

        # Set ACLs for the imported dashboard

        if 'acl' in dashboard:

            acl_data = {

                "access_control_list": dashboard['acl']['access_control_list']

            }

            set_acls(dashboard_id, acl_data)

 

        print(f"Imported dashboard: {dashboard['name']} with ID: {dashboard_id}")

 

if __name__ == '__main__':

    import_dashboards()

    print('Dashboards imported successfully.')







View solution in original post

3 REPLIES 3

romy
New Contributor III
New Contributor III

unity_Catalog
New Contributor III

Hi romy,
  Thanks for your quick reply. I have migrated dashboards to targeted workspace. But what about legacy dashboards can we export those too. When I am trying to migrate them only names of legacy dashboards are migrated but not the queries and datasets. I am adding the code below

import requests

import json

import os

 

# Define your target Databricks workspace URL and personal access token

TARGET_WORKSPACE_URL = "*******

TARGET_WORKSPACE_TOKEN = "*******"

 

# Set up headers for authentication

headers = {

    'Authorization': f'Bearer {TARGET_WORKSPACE_TOKEN}',

    'Content-Type': 'application/json'

}

 

def create_folder(folder_path):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/workspace/mkdirs'

    data = {

        "path": folder_path

    }

    response = requests.post(url, headers=headers, json=data)

    response.raise_for_status()

 

def import_dashboard(dashboard_data):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/preview/sql/dashboards'

    response = requests.post(url, headers=headers, json=dashboard_data)

    response.raise_for_status()

    return response.json()

 

def set_acls(dashboard_id, acl_data):

    url = f'{TARGET_WORKSPACE_URL}/api/2.0/preview/sql/permissions/dashboards/{dashboard_id}'

    print(f"Sending request to: {url}")

    print(f"Request data: {json.dumps(acl_data, indent=2)}")

    response = requests.post(url, headers=headers, json=acl_data)  # Use POST method

    print(f"Response status code: {response.status_code}")

    print(f"Response content: {response.text}")

    response.raise_for_status()

 

def import_dashboards():

    with open('exported_dashboards_with_acls.json', 'r') as f:

        dashboards = json.load(f)

 

    # Define the folder path in the target workspace

    legacy_folder_path = '/Shared/Legacy dashboards'

    create_folder(legacy_folder_path)

 

    for dashboard in dashboards:

        # Prepare dashboard data for import

        dashboard_data = {

            "name": dashboard.get('name', ''),

            "widgets": dashboard.get('widgets', []),

            "visualizations": dashboard.get('visualizations', []),

            "description": dashboard.get('description', ''),

            "options": dashboard.get('options', {}),

            

        }

       

        # Import dashboard

        imported_dashboard = import_dashboard(dashboard_data)

        dashboard_id = imported_dashboard['id']

 

        # Set ACLs for the imported dashboard

        if 'acl' in dashboard:

            acl_data = {

                "access_control_list": dashboard['acl']['access_control_list']

            }

            set_acls(dashboard_id, acl_data)

 

        print(f"Imported dashboard: {dashboard['name']} with ID: {dashboard_id}")

 

if __name__ == '__main__':

    import_dashboards()

    print('Dashboards imported successfully.')







Kaniz_Fatma
Community Manager
Community Manager

Hi @unity_Catalog, Hi, Thank you for reaching out to our community! We're here to help you.

To ensure we provide you with the best support, could you please take a moment to review the response and choose the one that best answers your question? Your feedback not only helps us assist you better but also benefits other community members who may have similar questions in the future.

If you found the answer helpful, consider giving it a kudo. If the response fully addresses your question, please mark it as the accepted solution. This will help us close the thread and ensure your question is resolved.

We appreciate your participation and are here to assist you further if you need it!

Connect with Databricks Users in Your Area

Join a Regional User Group to connect with local Databricks users. Events will be happening in your city, and you won’t want to miss the chance to attend and share knowledge.

If there isn’t a group near you, start one and help create a community that brings people together.

Request a New Group