<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: DBFS_DOWN in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105949#M42326</link>
    <description>&lt;P&gt;Can you please try the same in a shared access mode cluster?&lt;BR /&gt;&lt;BR /&gt;Also can you please try setting up spark configuration &lt;CODE&gt;spark.databricks.driver.enableWriteDbfsCommandResultInDp&lt;/CODE&gt; to &lt;CODE&gt;false&lt;/CODE&gt;.&lt;BR /&gt;&lt;BR /&gt;&lt;BR /&gt;This will disable the feature that writes DBFS command results directly in the Data Plane&lt;/P&gt;</description>
    <pubDate>Thu, 16 Jan 2025 16:38:27 GMT</pubDate>
    <dc:creator>Walter_C</dc:creator>
    <dc:date>2025-01-16T16:38:27Z</dc:date>
    <item>
      <title>DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105935#M42321</link>
      <description>&lt;P&gt;I have an Azure Databricks workspace with Unity Catalog setup, using VNet and private endpoints. Serverless works great; however, the regular clusters have problems showing large results:&lt;/P&gt;&lt;DIV class=""&gt;&lt;PRE&gt;Failed to store the result. Try rerunning the command.

Failed to upload command result to DBFS. Error message: PUT request to create file failed with statusCode=403, error=HttpResponseProxy{HTTP/1.1 403 This request is not authorized to perform this operation.&lt;/PRE&gt;&lt;/DIV&gt;&lt;P&gt;Also, I can’t list (ls) into DBFS. The event log shows:&lt;/P&gt;&lt;DIV class=""&gt;&lt;PRE&gt;DBFS_DOWN.&lt;/PRE&gt;&lt;/DIV&gt;&lt;P&gt;Perhaps this is an actual Databricks issue. My network and firewall setup has been validated many times. Somehow, the cluster has no access to the DBFS root. But as this is a Databricks-managed resource group, it should all work out of the box, right?&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 16:15:06 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105935#M42321</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-16T16:15:06Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105941#M42324</link>
      <description>&lt;P&gt;What access mode is being used on this clusters?&lt;/P&gt;
&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 16:31:03 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105941#M42324</guid>
      <dc:creator>Walter_C</dc:creator>
      <dc:date>2025-01-16T16:31:03Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105942#M42325</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Unrestricted and Single User mode.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 16:31:57 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105942#M42325</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-16T16:31:57Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105949#M42326</link>
      <description>&lt;P&gt;Can you please try the same in a shared access mode cluster?&lt;BR /&gt;&lt;BR /&gt;Also can you please try setting up spark configuration &lt;CODE&gt;spark.databricks.driver.enableWriteDbfsCommandResultInDp&lt;/CODE&gt; to &lt;CODE&gt;false&lt;/CODE&gt;.&lt;BR /&gt;&lt;BR /&gt;&lt;BR /&gt;This will disable the feature that writes DBFS command results directly in the Data Plane&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 16:38:27 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105949#M42326</guid>
      <dc:creator>Walter_C</dc:creator>
      <dc:date>2025-01-16T16:38:27Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105983#M42331</link>
      <description>&lt;P&gt;Function display and show work now again thanks!&lt;/P&gt;&lt;P&gt;The dbutils.fs.ls("dbfs:/") command still results in an error. I really wonder how databricks has setup those managed resources. I think something is going wrong there.&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 18:07:17 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/105983#M42331</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-16T18:07:17Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106057#M42368</link>
      <description>&lt;P&gt;From spark and dbutils I also get this message:&lt;/P&gt;&lt;P&gt;Caused by: com.microsoft.azure.storage.StorageException: This request is not authorized to perform this operation.&lt;/P&gt;</description>
      <pubDate>Fri, 17 Jan 2025 10:43:56 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106057#M42368</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-17T10:43:56Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106058#M42369</link>
      <description>&lt;P&gt;I don't understand how the cluster authenticates with the storage account, perhaps if someone at databricks could clear this up for me I would be able to better debug the issue.&lt;/P&gt;</description>
      <pubDate>Fri, 17 Jan 2025 10:44:52 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106058#M42369</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-17T10:44:52Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106339#M42450</link>
      <description>&lt;P&gt;I solved the issue myself. Databricks documentation is hard, but it seems necessary to create private endpoints in the managed storage account of databricks.&lt;/P&gt;</description>
      <pubDate>Mon, 20 Jan 2025 14:02:08 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/106339#M42450</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-20T14:02:08Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/107790#M42920</link>
      <description>&lt;P&gt;I'm having the same issue when i try to save a large delta table (80M of rows). Could you please share how do you solved the problem?&lt;/P&gt;</description>
      <pubDate>Thu, 30 Jan 2025 11:39:44 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/107790#M42920</guid>
      <dc:creator>LeandroDias</dc:creator>
      <dc:date>2025-01-30T11:39:44Z</dc:date>
    </item>
    <item>
      <title>Re: DBFS_DOWN</title>
      <link>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/107989#M42959</link>
      <description>&lt;P&gt;The dbfs (dbstorage) resource in the managed azure resource group needs to have private endpoints to your virtual network. You can create those manually or through iac (bicep/terraform).&lt;/P&gt;</description>
      <pubDate>Fri, 31 Jan 2025 09:33:38 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/dbfs-down/m-p/107989#M42959</guid>
      <dc:creator>markbaas</dc:creator>
      <dc:date>2025-01-31T09:33:38Z</dc:date>
    </item>
  </channel>
</rss>

