<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Permission denied on shallow cloned table write on single cluster in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120718#M46240</link>
    <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/145555"&gt;@Isi&lt;/a&gt;&amp;nbsp;Thank you for the link to the documentation. I did not find it!&lt;/P&gt;</description>
    <pubDate>Mon, 02 Jun 2025 13:50:18 GMT</pubDate>
    <dc:creator>der</dc:creator>
    <dc:date>2025-06-02T13:50:18Z</dc:date>
    <item>
      <title>Permission denied on shallow cloned table write on single cluster</title>
      <link>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120306#M46130</link>
      <description>&lt;P&gt;If I want to modify a shallow cloned table with partitionOverwriteMode dynamic on a "dedicated/single user" cluster DBR 16.4 i get following error message:&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;Py4JJavaError: An error occurred while calling o483.saveAsTable.&lt;BR /&gt;: org.apache.spark.SparkException: [TASK_WRITE_FAILED] Task failed while writing rows to abfss://&amp;lt;TABLE_STORAGE_PLACE&amp;gt;. SQLSTATE: 58030&lt;BR /&gt;at org.apache.spark.sql.errors.QueryExecutionErrors$.taskFailedWhileWritingRowsError(QueryExecutionErrors.scala:996)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatDataWriter.enrichWriteError(FileFormatDataWriter.scala:109)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatDataWriter.writeWithMetrics(FileFormatDataWriter.scala:120)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatDataWriter.writeWithIterator(FileFormatDataWriter.scala:128)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$executeTask$1(FileFormatWriter.scala:559)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1628)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.executeTask(FileFormatWriter.scala:566)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.WriteFilesExec.$anonfun$doExecuteWrite$1(WriteFiles.scala:125)&lt;BR /&gt;at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2(RDD.scala:938)&lt;BR /&gt;at org.apache.spark.rdd.RDD.$anonfun$mapPartitionsInternal$2$adapted(RDD.scala:938)&lt;BR /&gt;at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:60)&lt;BR /&gt;at org.apache.spark.rdd.RDD.$anonfun$computeOrReadCheckpoint$1(RDD.scala:413)&lt;BR /&gt;at com.databricks.spark.util.ExecutorFrameProfiler$.record(ExecutorFrameProfiler.scala:110)&lt;BR /&gt;at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:410)&lt;BR /&gt;at org.apache.spark.rdd.RDD.iterator(RDD.scala:377)&lt;BR /&gt;at org.apache.spark.scheduler.ResultTask.$anonfun$runTask$3(ResultTask.scala:82)&lt;BR /&gt;at com.databricks.spark.util.ExecutorFrameProfiler$.record(ExecutorFrameProfiler.scala:110)&lt;BR /&gt;at org.apache.spark.scheduler.ResultTask.$anonfun$runTask$1(ResultTask.scala:82)&lt;BR /&gt;at com.databricks.spark.util.ExecutorFrameProfiler$.record(ExecutorFrameProfiler.scala:110)&lt;BR /&gt;at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)&lt;BR /&gt;at org.apache.spark.TaskContext.runTaskWithListeners(TaskContext.scala:225)&lt;BR /&gt;at org.apache.spark.scheduler.Task.doRunTask(Task.scala:199)&lt;BR /&gt;at org.apache.spark.scheduler.Task.$anonfun$run$5(Task.scala:161)&lt;BR /&gt;at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:51)&lt;BR /&gt;at com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:104)&lt;BR /&gt;at com.databricks.unity.HandleImpl.$anonfun$runWithAndClose$1(UCSHandle.scala:109)&lt;BR /&gt;at scala.util.Using$.resource(Using.scala:269)&lt;BR /&gt;at com.databricks.unity.HandleImpl.runWithAndClose(UCSHandle.scala:108)&lt;BR /&gt;at org.apache.spark.scheduler.Task.$anonfun$run$1(Task.scala:155)&lt;BR /&gt;at com.databricks.spark.util.ExecutorFrameProfiler$.record(ExecutorFrameProfiler.scala:110)&lt;BR /&gt;at org.apache.spark.scheduler.Task.run(Task.scala:102)&lt;BR /&gt;at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$10(Executor.scala:1043)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:111)&lt;BR /&gt;at org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:1046)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.spark.util.ExecutorFrameProfiler$.record(ExecutorFrameProfiler.scala:110)&lt;BR /&gt;at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:933)&lt;BR /&gt;at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)&lt;BR /&gt;at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)&lt;BR /&gt;at java.base/java.lang.Thread.run(Thread.java:840)&lt;BR /&gt;at org.apache.spark.scheduler.DAGScheduler.$anonfun$runJob$1(DAGScheduler.scala:1413)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:1401)&lt;BR /&gt;at org.apache.spark.SparkContext.runJobInternal(SparkContext.scala:3171)&lt;BR /&gt;at org.apache.spark.SparkContext.runJob(SparkContext.scala:3152)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$executeWrite$6(FileFormatWriter.scala:435)&lt;BR /&gt;at org.apache.spark.sql.catalyst.MetricKeyUtils$.measureMs(MetricKey.scala:1195)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$executeWrite$5(FileFormatWriter.scala:433)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.writeAndCommit(FileFormatWriter.scala:395)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.executeWrite(FileFormatWriter.scala:431)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$write$1(FileFormatWriter.scala:300)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:121)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.commands.WriteIntoDeltaCommand.run(WriteIntoDeltaCommand.scala:121)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.$anonfun$sideEffectResult$5(commands.scala:137)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.runCommandInAetherOrSpark(SparkPlan.scala:189)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.$anonfun$sideEffectResult$4(commands.scala:137)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:133)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:132)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.$anonfun$doExecute$4(commands.scala:161)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:161)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$2(SparkPlan.scala:341)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:341)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan$.org$apache$spark$sql$execution$SparkPlan$$withExecuteQueryLogging(SparkPlan.scala:132)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:399)&lt;BR /&gt;at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:395)&lt;BR /&gt;at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:336)&lt;BR /&gt;at org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$doExecute$1(AdaptiveSparkPlanExec.scala:981)&lt;BR /&gt;at org.apache.spark.sql.execution.adaptive.ResultQueryStageExec.$anonfun$doMaterialize$1(QueryStageExec.scala:663)&lt;BR /&gt;at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1210)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$7(SQLExecution.scala:905)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$6(SQLExecution.scala:905)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$5(SQLExecution.scala:905)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$4(SQLExecution.scala:904)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$3(SQLExecution.scala:903)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.OptimisticTransaction$.withActive(OptimisticTransaction.scala:216)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.ConcurrencyHelpers$.withOptimisticTransaction(ConcurrencyHelpers.scala:54)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$2(SQLExecution.scala:902)&lt;BR /&gt;at org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:97)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:886)&lt;BR /&gt;at java.base/java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1768)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:157)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.spark.util.IdentityClaim$.withClaim(IdentityClaim.scala:48)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.$anonfun$runWithCaptured$4(SparkThreadLocalForwardingThreadPoolExecutor.scala:113)&lt;BR /&gt;at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:51)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:112)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:89)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:154)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:157)&lt;BR /&gt;at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)&lt;BR /&gt;at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)&lt;BR /&gt;at java.base/java.lang.Thread.run(Thread.java:840)&lt;BR /&gt;Caused by: com.databricks.sql.managedcatalog.acl.UnauthorizedAccessException: PERMISSION_DENIED: User does not have MODIFY on Table 'catalog1.schema1.product_sales'.&lt;BR /&gt;at com.databricks.managedcatalog.UCReliableHttpClient.reliablyAndTranslateExceptions(UCReliableHttpClient.scala:152)&lt;BR /&gt;at com.databricks.managedcatalog.UCReliableHttpClient.postJsonWithOptions(UCReliableHttpClient.scala:190)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.generateTemporaryTableCredentials(ManagedCatalogClientImpl.scala:3463)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.$anonfun$getTableCredentials$1(ManagedCatalogClientImpl.scala:3521)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.$anonfun$recordAndWrapException$2(ManagedCatalogClientImpl.scala:6873)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.$anonfun$recordAndWrapException$1(ManagedCatalogClientImpl.scala:6872)&lt;BR /&gt;at com.databricks.managedcatalog.ErrorDetailsHandler.wrapServiceException(ErrorDetailsHandler.scala:37)&lt;BR /&gt;at com.databricks.managedcatalog.ErrorDetailsHandler.wrapServiceException$(ErrorDetailsHandler.scala:35)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.wrapServiceException(ManagedCatalogClientImpl.scala:216)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.recordAndWrapException(ManagedCatalogClientImpl.scala:6853)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.getTableCredentials(ManagedCatalogClientImpl.scala:3503)&lt;BR /&gt;at com.databricks.sql.managedcatalog.ManagedCatalogClient.getTemporaryCredentials(ManagedCatalogClient.scala:2398)&lt;BR /&gt;at com.databricks.sql.managedcatalog.ManagedCatalogClient.getTemporaryCredentials$(ManagedCatalogClient.scala:2383)&lt;BR /&gt;at com.databricks.managedcatalog.ManagedCatalogClientImpl.getTemporaryCredentials(ManagedCatalogClientImpl.scala:216)&lt;BR /&gt;at com.databricks.unity.TempCredCache.$anonfun$getInternal$7(TemporaryCredentials.scala:392)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4724)&lt;BR /&gt;at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3522)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2315)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2278)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2193)&lt;BR /&gt;at com.google.common.cache.LocalCache.get(LocalCache.java:3932)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4721)&lt;BR /&gt;at com.databricks.unity.TempCredCache.liftedTree1$1(TemporaryCredentials.scala:391)&lt;BR /&gt;at com.databricks.unity.TempCredCache.getInternal(TemporaryCredentials.scala:390)&lt;BR /&gt;at com.databricks.unity.TempCredCache.get(TemporaryCredentials.scala:319)&lt;BR /&gt;at com.databricks.unity.UnityCredentialManager.getTemporaryCredentials(CredentialManager.scala:471)&lt;BR /&gt;at com.databricks.unity.CredentialManager$.getTemporaryCredentials(CredentialManager.scala:849)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper.$anonfun$getTemporaryCredentials$1(UCSDriver.scala:280)&lt;BR /&gt;at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:51)&lt;BR /&gt;at com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:104)&lt;BR /&gt;at com.databricks.unity.HandleImpl.$anonfun$runWithAndClose$1(UCSHandle.scala:109)&lt;BR /&gt;at scala.util.Using$.resource(Using.scala:269)&lt;BR /&gt;at com.databricks.unity.HandleImpl.runWithAndClose(UCSHandle.scala:108)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper.runWithScopeAndClose(UCSDriver.scala:254)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper.runWithScopeAndClose$(UCSDriver.scala:251)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper$.runWithScopeAndClose(UCSDriver.scala:285)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper.getTemporaryCredentials(UCSDriver.scala:280)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper.getTemporaryCredentials$(UCSDriver.scala:278)&lt;BR /&gt;at com.databricks.unity.CredentialManagerRpcHelper$.getTemporaryCredentials(UCSDriver.scala:285)&lt;BR /&gt;at org.apache.spark.unity.CredentialRpcEndpoint$$anonfun$receiveAndReply$1.applyOrElse(CredentialRpcEndpoint.scala:45)&lt;BR /&gt;at org.apache.spark.rpc.netty.Inbox.$anonfun$process$1(Inbox.scala:104)&lt;BR /&gt;at org.apache.spark.rpc.netty.Inbox.safelyCall(Inbox.scala:216)&lt;BR /&gt;at org.apache.spark.rpc.netty.Inbox.process(Inbox.scala:101)&lt;BR /&gt;at org.apache.spark.rpc.netty.MessageLoop.org$apache$spark$rpc$netty$MessageLoop$$receiveLoop(MessageLoop.scala:76)&lt;BR /&gt;at org.apache.spark.rpc.netty.MessageLoop$$anon$1.run(MessageLoop.scala:42)&lt;BR /&gt;... 12 more&lt;/PRE&gt;&lt;P&gt;&lt;FONT color="#000000"&gt;I have full access on the cloned table, but only select rights on the source table.&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;&lt;FONT color="#FF0000"&gt;&lt;STRONG&gt;On a "Standard/Shared" cluster it works without error! It also works, if the clone is deep for obvious reasons&amp;nbsp;&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;&lt;P&gt;Example script:&lt;/P&gt;&lt;LI-CODE lang="python"&gt;from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StructField, StringType, IntegerType

# Clean up and setup
partitioned_table = f"catalog1.schema1.product_sales"
cloned_table = f"catalog2.schema2.product_sales_clone"

spark.sql(f"DROP TABLE IF EXISTS {partitioned_table}")
spark.sql(f"DROP TABLE IF EXISTS  {cloned_table}")

# 1. Create dummy DataFrame
schema_def = StructType([
    StructField("region", StringType(), True),
    StructField("product", StringType(), True),
    StructField("revenue", IntegerType(), True)
])

data = [
    ("North", "Widgets", 300),
    ("North", "Gadgets", 200),
    ("South", "Widgets", 150),
    ("South", "Gadgets", 100),
]
df = spark.createDataFrame(data, schema=schema_def)

# 2. Write and revoke MODIFY rights
(df.write.format("delta")
    .partitionBy("region")
    .mode("overwrite")
    .saveAsTable(partitioned_table))

# TODO: how you gave rights (catalog, schema, table)
# catalog_name = partitioned_table.split(".")[0]
# schema_name = partitioned_table.split(".")[1]
# spark.sql(f"REVOKE MODIFY ON TABLE {partitioned_table} FROM CURRENT_USER()")
# spark.sql(f"REVOKE MODIFY ON SCHEMA {catalog_name}.{schema_name} FROM CURRENT_USER()")
# spark.sql(f"REVOKE MODIFY ON CATALOG {catalog_name} FROM CURRENT_USER()")


# 3. Create a shallow clone
spark.sql(f"""
  CREATE OR REPLACE TABLE {cloned_table}
  SHALLOW CLONE {partitioned_table}
""")

# 4. Create new dummy data for a single partition
new_data = [
    ("North", "Widgets", 999),
    ("North", "Gadgets", 888)
]
new_df = spark.createDataFrame(new_data, schema=schema_def)

# 5. Overwrite only the 'North' partition of the cloned table dynamically
(new_df.write.format("delta")
    .partitionBy("region")
    .mode("overwrite")
    .option("partitionOverwriteMode", "dynamic")
    .saveAsTable(cloned_table))&lt;/LI-CODE&gt;</description>
      <pubDate>Tue, 27 May 2025 11:07:09 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120306#M46130</guid>
      <dc:creator>der</dc:creator>
      <dc:date>2025-05-27T11:07:09Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied on shallow cloned table write on single cluster</title>
      <link>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120479#M46175</link>
      <description>&lt;P&gt;Hey&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/154226"&gt;@der&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P class=""&gt;&lt;STRONG&gt;&lt;EM&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;FONT color="#000000"&gt;I have full access on the cloned table, but only select rights on the source table."&lt;/FONT&gt;&lt;/EM&gt;&lt;/STRONG&gt;&lt;/P&gt;&lt;P class=""&gt;&lt;SPAN class=""&gt;&lt;BR /&gt;When working with &lt;/SPAN&gt;&lt;STRONG&gt;shallow clones in Unity Catalog&lt;/STRONG&gt;&lt;SPAN class=""&gt; on a &lt;/SPAN&gt;&lt;STRONG&gt;dedicated or single-user cluster&lt;/STRONG&gt;&lt;SPAN class=""&gt;, Databricks enforces &lt;/SPAN&gt;&lt;STRONG&gt;strict permission inheritance from the source table&lt;/STRONG&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;/P&gt;&lt;P class=""&gt;To perform &lt;SPAN class=""&gt;&lt;STRONG&gt;any update/insert&lt;/STRONG&gt;&lt;/SPAN&gt; on a &lt;I&gt;shallow cloned table&lt;/I&gt;, you must have:&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;&lt;P class=""&gt;&lt;SPAN class=""&gt;USE&lt;/SPAN&gt;&lt;SPAN class=""&gt; permission on the &lt;/SPAN&gt;&lt;STRONG&gt;source catalog and schema&lt;/STRONG&gt;&lt;/P&gt;&lt;/LI&gt;&lt;LI&gt;&lt;P class=""&gt;&lt;SPAN class=""&gt;SELECT&lt;/SPAN&gt; permission on the &lt;SPAN class=""&gt;&lt;STRONG&gt;source table&lt;/STRONG&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;/LI&gt;&lt;LI&gt;&lt;P class=""&gt;&lt;STRONG&gt;MODIFY permission on the source table&lt;/STRONG&gt;&lt;/P&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;P class=""&gt;Even if you’re modifying the &lt;SPAN class=""&gt;&lt;STRONG&gt;cloned table&lt;/STRONG&gt;&lt;/SPAN&gt;, the underlying Delta metadata and lineage trace back to the original table, and Unity Catalog enforces this by default. &lt;A href="https://docs.databricks.com/aws/en/delta/clone-unity-catalog#work-with-shallow-cloned-tables-in-dedicated-access-mode" target="_self"&gt;Docs_dedicated&lt;/A&gt;&lt;BR /&gt;&lt;BR /&gt;In standard mode works because it follows a different&amp;nbsp;&lt;SPAN&gt;privileges system&lt;/SPAN&gt;&amp;nbsp;&lt;A href="https://docs.databricks.com/aws/en/delta/clone-unity-catalog#query-or-modify-a-shallow-cloned-table-on-unity-catalog" target="_self"&gt;Docs_standard&lt;/A&gt;&amp;nbsp;&lt;BR /&gt;&lt;BR /&gt;Hope this helps, &lt;span class="lia-unicode-emoji" title=":slightly_smiling_face:"&gt;🙂&lt;/span&gt;&lt;BR /&gt;&lt;BR /&gt;Isi&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 28 May 2025 21:37:17 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120479#M46175</guid>
      <dc:creator>Isi</dc:creator>
      <dc:date>2025-05-28T21:37:17Z</dc:date>
    </item>
    <item>
      <title>Re: Permission denied on shallow cloned table write on single cluster</title>
      <link>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120718#M46240</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/145555"&gt;@Isi&lt;/a&gt;&amp;nbsp;Thank you for the link to the documentation. I did not find it!&lt;/P&gt;</description>
      <pubDate>Mon, 02 Jun 2025 13:50:18 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/permission-denied-on-shallow-cloned-table-write-on-single/m-p/120718#M46240</guid>
      <dc:creator>der</dc:creator>
      <dc:date>2025-06-02T13:50:18Z</dc:date>
    </item>
  </channel>
</rss>

