<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Spark Sql Connector : in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11448#M6419</link>
    <description>&lt;P&gt;It seems that&amp;nbsp;.option("databaseName", "test") is redundant here as you need to &lt;A href="https://docs.microsoft.com/en-us/sql/connect/spark/connector?view=sql-server-ver16#write-to-a-new-sql-table" alt="https://docs.microsoft.com/en-us/sql/connect/spark/connector?view=sql-server-ver16#write-to-a-new-sql-table" target="_blank"&gt;include the db name in the url&lt;/A&gt;.&lt;/P&gt;&lt;P&gt;Please verify that you use a connector compatible to your cluster's Spark version : &lt;A href="https://docs.microsoft.com/en-us/sql/connect/spark/connector" alt="https://docs.microsoft.com/en-us/sql/connect/spark/connector" target="_blank"&gt;Apache Spark connector: SQL Server &amp;amp; Azure SQL&lt;/A&gt;&lt;/P&gt;</description>
    <pubDate>Wed, 03 Aug 2022 18:11:03 GMT</pubDate>
    <dc:creator>artsheiko</dc:creator>
    <dc:date>2022-08-03T18:11:03Z</dc:date>
    <item>
      <title>Spark Sql Connector :</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11447#M6418</link>
      <description>&lt;P&gt;i am trying to read data from azure sql database from databricks. azure sql database is created with private link endpoint.Using DBR 10.4 LTS Cluster and expectation is the connector is pre installed as per documentation.&lt;/P&gt;&lt;P&gt;using the below code to fetch but getting java.lang.ClassNotFoundException: &lt;/P&gt;&lt;P&gt;df =&amp;nbsp;spark.read \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.format("com.microsoft.sqlserver.jdbc.spark") \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.option("url", "&lt;A href="https://xyz-server.database.windows.net" alt="https://xyz-server.database.windows.net" target="_blank"&gt;xyz-server.database.windows.net&lt;/A&gt;") \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.option("dbtable", "test") \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.option("databaseName","test") \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.option("user", 'test') \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.option("password", 'abc') \&lt;/P&gt;&lt;P&gt;&amp;nbsp;&amp;nbsp;&amp;nbsp;&amp;nbsp;.load()&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Any help is appreciated&lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 09:03:06 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11447#M6418</guid>
      <dc:creator>Rahul_Samant</dc:creator>
      <dc:date>2022-08-03T09:03:06Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Sql Connector :</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11448#M6419</link>
      <description>&lt;P&gt;It seems that&amp;nbsp;.option("databaseName", "test") is redundant here as you need to &lt;A href="https://docs.microsoft.com/en-us/sql/connect/spark/connector?view=sql-server-ver16#write-to-a-new-sql-table" alt="https://docs.microsoft.com/en-us/sql/connect/spark/connector?view=sql-server-ver16#write-to-a-new-sql-table" target="_blank"&gt;include the db name in the url&lt;/A&gt;.&lt;/P&gt;&lt;P&gt;Please verify that you use a connector compatible to your cluster's Spark version : &lt;A href="https://docs.microsoft.com/en-us/sql/connect/spark/connector" alt="https://docs.microsoft.com/en-us/sql/connect/spark/connector" target="_blank"&gt;Apache Spark connector: SQL Server &amp;amp; Azure SQL&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 03 Aug 2022 18:11:03 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11448#M6419</guid>
      <dc:creator>artsheiko</dc:creator>
      <dc:date>2022-08-03T18:11:03Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Sql Connector :</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11449#M6420</link>
      <description>&lt;P&gt;Hi @Artem Sheiko​&amp;nbsp;,&lt;/P&gt;&lt;P&gt;i read its preinstalled in DBR Runtime and we don't have to worry about it ?&lt;/P&gt;&lt;P&gt;btw as per the link you shared  . connector is only available for 3.1.x an DBR 10.4 LTS is 3.2.X. so that's why its not working ?&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;ConnectorMaven CoordinateSpark 2.4.x compatible connector&lt;/P&gt;&lt;P&gt;com.microsoft.azure:spark-mssql-connector:1.0.2&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Spark 3.0.x compatible connector&lt;/P&gt;&lt;P&gt;com.microsoft.azure:spark-mssql-connector_2.12:1.1.0&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Spark 3.1.x compatible connector&lt;/P&gt;&lt;P&gt;com.microsoft.azure:spark-mssql-connector_2.12:1.2.0&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 04 Aug 2022 11:33:13 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11449#M6420</guid>
      <dc:creator>Rahul_Samant</dc:creator>
      <dc:date>2022-08-04T11:33:13Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Sql Connector :</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11450#M6421</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;My point was just to verify that you use a version compatible to DBR.&lt;/P&gt;&lt;P&gt;No, it's not preinstalled, you need to import it using maven coordinate. You can see &lt;A href="https://docs.microsoft.com/en-us/azure/databricks/libraries/cluster-libraries#cluster" alt="https://docs.microsoft.com/en-us/azure/databricks/libraries/cluster-libraries#cluster" target="_blank"&gt;here&lt;/A&gt; how proceed to install a it.&lt;/P&gt;&lt;P&gt;The list of installed libraries coul be find in a &lt;A href="https://docs.microsoft.com/en-gb/azure/databricks/release-notes/runtime/10.4" alt="https://docs.microsoft.com/en-gb/azure/databricks/release-notes/runtime/10.4" target="_blank"&gt;DBR 10.4 LTS release notes&lt;/A&gt;.&lt;/P&gt;</description>
      <pubDate>Thu, 04 Aug 2022 13:52:16 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11450#M6421</guid>
      <dc:creator>artsheiko</dc:creator>
      <dc:date>2022-08-04T13:52:16Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Sql Connector :</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11451#M6422</link>
      <description>&lt;P&gt;i downgraded to 9.1 LTS and installed below . working now&lt;/P&gt;&lt;P&gt;Spark 3.1.x compatible connector&lt;/P&gt;&lt;P&gt;com.microsoft.azure:spark-mssql-connector_2.12:1.2.0&lt;/P&gt;</description>
      <pubDate>Thu, 04 Aug 2022 13:58:15 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-sql-connector/m-p/11451#M6422</guid>
      <dc:creator>Rahul_Samant</dc:creator>
      <dc:date>2022-08-04T13:58:15Z</dc:date>
    </item>
  </channel>
</rss>

