<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Connecting to an S3 compatible bucket in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141707#M51799</link>
    <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/200523"&gt;@demo-user&lt;/a&gt;&amp;nbsp;Serverless SQL Warehouses and the Express account type are designed for simplicity and rely almost exclusively on the cloud provider's secure identity mechanisms.&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;Serverless SQL Warehouses (and Serverless Jobs) do not currently support configuring generic S3-compatible endpoint URLs and access keys&lt;/LI&gt;&lt;LI&gt;Complex, non-standard cloud integrations are often limited or unsupported compared to the Enterprise account type&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;The only reliable and supported way to connect to a generic S3-compatible object store using a custom endpoint and access keys is by using Provisioned Compute&amp;nbsp;where you control the Spark configuration.&lt;/P&gt;</description>
    <pubDate>Thu, 11 Dec 2025 23:07:37 GMT</pubDate>
    <dc:creator>Raman_Unifeye</dc:creator>
    <dc:date>2025-12-11T23:07:37Z</dc:date>
    <item>
      <title>Connecting to an S3 compatible bucket</title>
      <link>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141696#M51793</link>
      <description>&lt;P&gt;Hi everyone,&lt;/P&gt;&lt;P&gt;I’m trying to connect Databricks to an S3-compatible bucket using a custom endpoint URL and access keys.&lt;BR /&gt;I’m using an Express account with Serverless SQL Warehouses, but the only external storage options I see are AWS IAM roles or Cloudflare R2.&lt;BR /&gt;Is there any supported way to connect to a generic S3-compatible object store (via access key/secret + endpoint)? What is the workaround?&lt;/P&gt;</description>
      <pubDate>Thu, 11 Dec 2025 18:30:34 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141696#M51793</guid>
      <dc:creator>demo-user</dc:creator>
      <dc:date>2025-12-11T18:30:34Z</dc:date>
    </item>
    <item>
      <title>Re: Connecting to an S3 compatible bucket</title>
      <link>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141707#M51799</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/200523"&gt;@demo-user&lt;/a&gt;&amp;nbsp;Serverless SQL Warehouses and the Express account type are designed for simplicity and rely almost exclusively on the cloud provider's secure identity mechanisms.&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;Serverless SQL Warehouses (and Serverless Jobs) do not currently support configuring generic S3-compatible endpoint URLs and access keys&lt;/LI&gt;&lt;LI&gt;Complex, non-standard cloud integrations are often limited or unsupported compared to the Enterprise account type&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;The only reliable and supported way to connect to a generic S3-compatible object store using a custom endpoint and access keys is by using Provisioned Compute&amp;nbsp;where you control the Spark configuration.&lt;/P&gt;</description>
      <pubDate>Thu, 11 Dec 2025 23:07:37 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141707#M51799</guid>
      <dc:creator>Raman_Unifeye</dc:creator>
      <dc:date>2025-12-11T23:07:37Z</dc:date>
    </item>
    <item>
      <title>Re: Connecting to an S3 compatible bucket</title>
      <link>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141709#M51801</link>
      <description>&lt;P&gt;Thank you for your response! Are Spark configurations enabled on Enterprise accounts with serverless compute? I am getting errors when I attempt the Spark configurations&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 11 Dec 2025 23:19:37 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141709#M51801</guid>
      <dc:creator>demo-user</dc:creator>
      <dc:date>2025-12-11T23:19:37Z</dc:date>
    </item>
    <item>
      <title>Re: Connecting to an S3 compatible bucket</title>
      <link>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141711#M51803</link>
      <description>&lt;P&gt;Serverless compute does not support setting most Apache Spark configuration properties&amp;nbsp;irrespective of Enterprise Tier as dB fully manages the underlying infrastructure.&lt;/P&gt;</description>
      <pubDate>Thu, 11 Dec 2025 23:28:53 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/connecting-to-an-s3-compatible-bucket/m-p/141711#M51803</guid>
      <dc:creator>Raman_Unifeye</dc:creator>
      <dc:date>2025-12-11T23:28:53Z</dc:date>
    </item>
  </channel>
</rss>

