<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions in Administration &amp; Architecture</title>
    <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133590#M4145</link>
    <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/113908"&gt;@Angus-Dawson&lt;/a&gt;&amp;nbsp;&lt;SPAN&gt;encountered the same and used an override (like a pip constraints.txt file or&amp;nbsp;&lt;/SPAN&gt;&lt;A href="https://pdm-project.org/latest/usage/dependency/#dependency-overrides" target="_self" rel="nofollow noopener noreferrer"&gt;PDM resolution override specification&lt;/A&gt;&lt;SPAN&gt;) to make sure my local development environment matched the runtime.&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Thu, 02 Oct 2025 22:23:03 GMT</pubDate>
    <dc:creator>SamAdams</dc:creator>
    <dc:date>2025-10-02T22:23:03Z</dc:date>
    <item>
      <title>Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/120969#M3442</link>
      <description>&lt;P&gt;Per the release notes for Databricks Runtime 16.4 LTS, the environment has Apache Spark 3.5.2 and Delta Lake 3.3.1:&lt;/P&gt;&lt;P&gt;&lt;A href="https://docs.databricks.com/aws/en/release-notes/runtime/16.4lts" target="_blank"&gt;https://docs.databricks.com/aws/en/release-notes/runtime/16.4lts&lt;/A&gt;&lt;/P&gt;&lt;P&gt;However, Delta Lake 3.3.1 is built on Spark 3.5.3; the newest version of Delta Lake compatible with Spark 3.5.2 is Delta Lake 3.2.0.&lt;/P&gt;&lt;P&gt;Whatever custom modifications to Spark and Delta Lake have been done behind the scenes to enable this, it makes it impossible to build an equivalent environment for local development and testing. This is not what one expects from an LTS version.&lt;/P&gt;</description>
      <pubDate>Wed, 04 Jun 2025 21:34:10 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/120969#M3442</guid>
      <dc:creator>Angus-Dawson</dc:creator>
      <dc:date>2025-06-04T21:34:10Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/121067#M3445</link>
      <description>&lt;P&gt;We saw the same thing in previous runtime versions, and even a point-point version broke our code.&amp;nbsp; We actually log the spark version in one pipeline and see different versions popping up from time to time.&amp;nbsp; Apparently the long term goal is to move to "versionless runtimes" so you don't know what you're using, but the execution environment will be monitored for errors and rolled back if errors are detected.&lt;/P&gt;</description>
      <pubDate>Thu, 05 Jun 2025 15:16:57 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/121067#M3445</guid>
      <dc:creator>Rjdudley</dc:creator>
      <dc:date>2025-06-05T15:16:57Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133590#M4145</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/113908"&gt;@Angus-Dawson&lt;/a&gt;&amp;nbsp;&lt;SPAN&gt;encountered the same and used an override (like a pip constraints.txt file or&amp;nbsp;&lt;/SPAN&gt;&lt;A href="https://pdm-project.org/latest/usage/dependency/#dependency-overrides" target="_self" rel="nofollow noopener noreferrer"&gt;PDM resolution override specification&lt;/A&gt;&lt;SPAN&gt;) to make sure my local development environment matched the runtime.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 02 Oct 2025 22:23:03 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133590#M4145</guid>
      <dc:creator>SamAdams</dc:creator>
      <dc:date>2025-10-02T22:23:03Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133711#M4148</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/113908"&gt;@Angus-Dawson&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;Use&lt;SPAN&gt;&amp;nbsp; &lt;STRONG&gt;Databricks Connect&lt;/STRONG&gt;&lt;/SPAN&gt;&lt;SPAN&gt;&amp;nbsp;&lt;/SPAN&gt;for local development/testing against a remote Databricks cluster—this ensures your code runs in the actual Databricks environment and databricks managed dbrs which are different from open-source versions(&lt;SPAN&gt;(DBR) versions often include&amp;nbsp;&lt;/SPAN&gt;&lt;STRONG&gt;custom builds and backports&lt;/STRONG&gt;&lt;SPAN&gt;&amp;nbsp;of Spark and Delta Lake)&lt;/SPAN&gt;.&lt;/LI&gt;&lt;LI&gt;Always validate and test on a real Databricks cluster before deploying to production.&lt;/LI&gt;&lt;/UL&gt;</description>
      <pubDate>Fri, 03 Oct 2025 14:50:40 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133711#M4148</guid>
      <dc:creator>saurabh18cs</dc:creator>
      <dc:date>2025-10-03T14:50:40Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133753#M4151</link>
      <description>&lt;P&gt;Exactly asd&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/22314"&gt;@saurabh18cs&lt;/a&gt;&amp;nbsp;wrote. Databricks is not equal to spark+delta. If you want to perform real tests in a local environment, simply use Databricks Connect and install the matching version of Python in your virtual environment (venv). However, the local Python version, etc., will be automatically advised if you are using, for example, a VSCode extension.&lt;/P&gt;</description>
      <pubDate>Fri, 03 Oct 2025 20:21:50 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/133753#M4151</guid>
      <dc:creator>Hubert-Dudek</dc:creator>
      <dc:date>2025-10-03T20:21:50Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Runtime 16.4 LTS has inconsistent Spark and Delta Lake versions</title>
      <link>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/135607#M4252</link>
      <description>&lt;P&gt;Okay, so then it is meaningless to put Spark and Delta Lake versions in your runtime specifications.&lt;/P&gt;</description>
      <pubDate>Wed, 22 Oct 2025 00:39:26 GMT</pubDate>
      <guid>https://community.databricks.com/t5/administration-architecture/databricks-runtime-16-4-lts-has-inconsistent-spark-and-delta/m-p/135607#M4252</guid>
      <dc:creator>Angus-Dawson</dc:creator>
      <dc:date>2025-10-22T00:39:26Z</dc:date>
    </item>
  </channel>
</rss>

