<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace in Get Started Discussions</title>
    <link>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64214#M6845</link>
    <description>&lt;P&gt;short answer is no&lt;BR /&gt;&lt;BR /&gt;Databricks uses a single catalog -&amp;gt; Unity Catalog for managing tables created with Hive or Spark SQL, simplifying things. In spark sql you can do&lt;/P&gt;&lt;LI-CODE lang="python"&gt;spark-sql (default)&amp;gt; show catalogs;
spark_catalog&lt;/LI-CODE&gt;&lt;P&gt;HTH&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Wed, 20 Mar 2024 21:25:39 GMT</pubDate>
    <dc:creator>MichTalebzadeh</dc:creator>
    <dc:date>2024-03-20T21:25:39Z</dc:date>
    <item>
      <title>databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace</title>
      <link>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/63542#M6841</link>
      <description>&lt;P&gt;databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace. The UI lists `hive_metastore`. It would be better if these two were consistent.&lt;/P&gt;</description>
      <pubDate>Wed, 13 Mar 2024 12:41:12 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/63542#M6841</guid>
      <dc:creator>HakanNordgren</dc:creator>
      <dc:date>2024-03-13T12:41:12Z</dc:date>
    </item>
    <item>
      <title>Re: databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace</title>
      <link>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64110#M6843</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/9"&gt;@Retired_mod&lt;/a&gt;&amp;nbsp;: Could I convince you to add the above as a bug to the backlog?&lt;/P&gt;</description>
      <pubDate>Tue, 19 Mar 2024 17:36:48 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64110#M6843</guid>
      <dc:creator>HakanNordgren</dc:creator>
      <dc:date>2024-03-19T17:36:48Z</dc:date>
    </item>
    <item>
      <title>Re: databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace</title>
      <link>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64188#M6844</link>
      <description>&lt;P&gt;Also,&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/9"&gt;@Retired_mod&lt;/a&gt;&amp;nbsp;: Is it possible to have both a `hive_metastore` catalog and a `spark_catalog` catalog.&lt;/P&gt;</description>
      <pubDate>Wed, 20 Mar 2024 13:19:11 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64188#M6844</guid>
      <dc:creator>HakanNordgren</dc:creator>
      <dc:date>2024-03-20T13:19:11Z</dc:date>
    </item>
    <item>
      <title>Re: databricks-jdbc lists `spark_catalog` among catalogs for Standard tier Azure workspace</title>
      <link>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64214#M6845</link>
      <description>&lt;P&gt;short answer is no&lt;BR /&gt;&lt;BR /&gt;Databricks uses a single catalog -&amp;gt; Unity Catalog for managing tables created with Hive or Spark SQL, simplifying things. In spark sql you can do&lt;/P&gt;&lt;LI-CODE lang="python"&gt;spark-sql (default)&amp;gt; show catalogs;
spark_catalog&lt;/LI-CODE&gt;&lt;P&gt;HTH&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 20 Mar 2024 21:25:39 GMT</pubDate>
      <guid>https://community.databricks.com/t5/get-started-discussions/databricks-jdbc-lists-spark-catalog-among-catalogs-for-standard/m-p/64214#M6845</guid>
      <dc:creator>MichTalebzadeh</dc:creator>
      <dc:date>2024-03-20T21:25:39Z</dc:date>
    </item>
  </channel>
</rss>

