I use notebooks as part of the asset bundle deployment to conduct a lot of dynamic configurations based upon the workspace being deployed to (ex. Development, Test, Production). In conjunction, I developed a helper Python library with a number of functions that are used with administrative tasks. Attached is the one I created for adding schema permissions.
from typing import Tuple
import re
def addSchemaPermissions(schemaName: str, principal: str, permissionList: str, clearAll: bool) -> Tuple[bool, str]:
try:
if clearAll:
spark.sql(f"REVOKE ALL PRIVILEGES ON SCHEMA {schemaName} FROM `{principal}`")
spark.sql(f"GRANT {permissionList} ON SCHEMA {schemaName} TO `{principal}`")
return True, ""
except Exception as e:
fullErrorMessage = str(e)
# Try to extract the Databricks error class and readable message
errorClassMatch = re.search(r"ErrorClass=([A-Z_\.]+)", fullErrorMessage)
mainMessageMatch = re.search(r"] (.+?)\\n", fullErrorMessage)
errorClass = errorClassMatch.group(1) if errorClassMatch else "Unknown Error"
mainMessage = mainMessageMatch.group(1) if mainMessageMatch else fullErrorMessage.splitlines()[0]
return False, f"{errorClass}: {mainMessage}"
success, errorMessage = addSchemaPermissions('your_catalog_name.your_schema_name', 'Your Principal Name', 'USE SCHEMA, SELECT', True)
if not success:
print(f"Failed to update schema permissions: {errorMessage}")