<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic When trying to use pyodbc connector to write files to SQL server receiving error. java.lang.ClassNotFoundException Any alternatives or ways to fix this? in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33946#M24824</link>
    <description>&lt;PRE&gt;&lt;CODE&gt;jdbcUsername = ********
jdbcPassword = ***************
server_name = "jdbc:sqlserver://***********:******"
database_name = "********"
url = server_name + ";" + "databaseName=" + database_name + ";"
&amp;nbsp;
table_name = "PatientTEST"
&amp;nbsp;
try:
  df.write \
    .format("com.microsoft.sqlserver.jdbc.spark") \
    .mode("overwrite") \
    .option("url", url) \
    .option("dbtable", table_name) \
    .option("user", jdbcUsername) \
    .option("password", jdbcPassword) \
    .save()
except ValueError as error :
    print("Connector write failed", error)&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;java.lang.ClassNotFoundException: &lt;/P&gt;&lt;P&gt;---------------------------------------------------------------------------&lt;/P&gt;&lt;P&gt;Py4JJavaError                             Traceback (most recent call last)&lt;/P&gt;&lt;P&gt;&amp;lt;command-1025811192119468&amp;gt; in &amp;lt;module&amp;gt;&lt;/P&gt;&lt;P&gt;     10 &lt;/P&gt;&lt;P&gt;     11 try:&lt;/P&gt;&lt;P&gt;---&amp;gt; 12   df.write \&lt;/P&gt;&lt;P&gt;     13     .format("com.microsoft.sqlserver.jdbc.spark") \&lt;/P&gt;&lt;P&gt;     14     .mode("append") \&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/pyspark/sql/readwriter.py in save(self, path, format, mode, partitionBy, **options)&lt;/P&gt;&lt;P&gt;    736             self.format(format)&lt;/P&gt;&lt;P&gt;    737         if path is None:&lt;/P&gt;&lt;P&gt;--&amp;gt; 738             self._jwrite.save()&lt;/P&gt;&lt;P&gt;    739         else:&lt;/P&gt;&lt;P&gt;    740             self._jwrite.save(path)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/java_gateway.py in __call__(self, *args)&lt;/P&gt;&lt;P&gt;   1302 &lt;/P&gt;&lt;P&gt;   1303         answer = self.gateway_client.send_command(command)&lt;/P&gt;&lt;P&gt;-&amp;gt; 1304         return_value = get_return_value(&lt;/P&gt;&lt;P&gt;   1305             answer, self.gateway_client, self.target_id, self.name)&lt;/P&gt;&lt;P&gt;   1306 &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)&lt;/P&gt;&lt;P&gt;    115     def deco(*a, **kw):&lt;/P&gt;&lt;P&gt;    116         try:&lt;/P&gt;&lt;P&gt;--&amp;gt; 117             return f(*a, **kw)&lt;/P&gt;&lt;P&gt;    118         except py4j.protocol.Py4JJavaError as e:&lt;/P&gt;&lt;P&gt;    119             converted = convert_exception(e.java_exception)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)&lt;/P&gt;&lt;P&gt;    324             value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)&lt;/P&gt;&lt;P&gt;    325             if answer[1] == REFERENCE_TYPE:&lt;/P&gt;&lt;P&gt;--&amp;gt; 326                 raise Py4JJavaError(&lt;/P&gt;&lt;P&gt;    327                     "An error occurred while calling {0}{1}{2}.\n".&lt;/P&gt;&lt;P&gt;    328                     format(target_id, ".", name), value)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Py4JJavaError: An error occurred while calling o386.save.&lt;/P&gt;&lt;P&gt;: java.lang.ClassNotFoundException: &lt;/P&gt;&lt;P&gt;Failed to find data source: com.microsoft.sqlserver.jdbc.spark. Please find packages at&lt;/P&gt;&lt;P&gt;&lt;A href="http://spark.apache.org/third-party-projects.html" target="test_blank"&gt;http://spark.apache.org/third-party-projects.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;       &lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.errors.QueryExecutionErrors$.failedToFindDataSourceError(QueryExecutionErrors.scala:511)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:747)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSourceV2(DataSource.scala:797)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.lookupV2Provider(DataFrameWriter.scala:962)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:285)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:257)&lt;/P&gt;&lt;P&gt;	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/P&gt;&lt;P&gt;	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;/P&gt;&lt;P&gt;	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/P&gt;&lt;P&gt;	at java.lang.reflect.Method.invoke(Method.java:498)&lt;/P&gt;&lt;P&gt;	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)&lt;/P&gt;&lt;P&gt;	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)&lt;/P&gt;&lt;P&gt;	at py4j.Gateway.invoke(Gateway.java:295)&lt;/P&gt;&lt;P&gt;	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)&lt;/P&gt;&lt;P&gt;	at py4j.commands.CallCommand.execute(CallCommand.java:79)&lt;/P&gt;&lt;P&gt;	at py4j.GatewayConnection.run(GatewayConnection.java:251)&lt;/P&gt;&lt;P&gt;	at java.lang.Thread.run(Thread.java:748)&lt;/P&gt;&lt;P&gt;Caused by: java.lang.ClassNotFoundException: com.microsoft.sqlserver.jdbc.spark.DefaultSource&lt;/P&gt;&lt;P&gt;	at java.net.URLClassLoader.findClass(URLClassLoader.java:382)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:419)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:352)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$5(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	at scala.util.Try$.apply(Try.scala:213)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$4(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	at scala.util.Failure.orElse(Try.scala:224)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	... 15 more&lt;/P&gt;</description>
    <pubDate>Wed, 01 Dec 2021 18:38:11 GMT</pubDate>
    <dc:creator>Chris_Shehu</dc:creator>
    <dc:date>2021-12-01T18:38:11Z</dc:date>
    <item>
      <title>When trying to use pyodbc connector to write files to SQL server receiving error. java.lang.ClassNotFoundException Any alternatives or ways to fix this?</title>
      <link>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33946#M24824</link>
      <description>&lt;PRE&gt;&lt;CODE&gt;jdbcUsername = ********
jdbcPassword = ***************
server_name = "jdbc:sqlserver://***********:******"
database_name = "********"
url = server_name + ";" + "databaseName=" + database_name + ";"
&amp;nbsp;
table_name = "PatientTEST"
&amp;nbsp;
try:
  df.write \
    .format("com.microsoft.sqlserver.jdbc.spark") \
    .mode("overwrite") \
    .option("url", url) \
    .option("dbtable", table_name) \
    .option("user", jdbcUsername) \
    .option("password", jdbcPassword) \
    .save()
except ValueError as error :
    print("Connector write failed", error)&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;java.lang.ClassNotFoundException: &lt;/P&gt;&lt;P&gt;---------------------------------------------------------------------------&lt;/P&gt;&lt;P&gt;Py4JJavaError                             Traceback (most recent call last)&lt;/P&gt;&lt;P&gt;&amp;lt;command-1025811192119468&amp;gt; in &amp;lt;module&amp;gt;&lt;/P&gt;&lt;P&gt;     10 &lt;/P&gt;&lt;P&gt;     11 try:&lt;/P&gt;&lt;P&gt;---&amp;gt; 12   df.write \&lt;/P&gt;&lt;P&gt;     13     .format("com.microsoft.sqlserver.jdbc.spark") \&lt;/P&gt;&lt;P&gt;     14     .mode("append") \&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/pyspark/sql/readwriter.py in save(self, path, format, mode, partitionBy, **options)&lt;/P&gt;&lt;P&gt;    736             self.format(format)&lt;/P&gt;&lt;P&gt;    737         if path is None:&lt;/P&gt;&lt;P&gt;--&amp;gt; 738             self._jwrite.save()&lt;/P&gt;&lt;P&gt;    739         else:&lt;/P&gt;&lt;P&gt;    740             self._jwrite.save(path)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/java_gateway.py in __call__(self, *args)&lt;/P&gt;&lt;P&gt;   1302 &lt;/P&gt;&lt;P&gt;   1303         answer = self.gateway_client.send_command(command)&lt;/P&gt;&lt;P&gt;-&amp;gt; 1304         return_value = get_return_value(&lt;/P&gt;&lt;P&gt;   1305             answer, self.gateway_client, self.target_id, self.name)&lt;/P&gt;&lt;P&gt;   1306 &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)&lt;/P&gt;&lt;P&gt;    115     def deco(*a, **kw):&lt;/P&gt;&lt;P&gt;    116         try:&lt;/P&gt;&lt;P&gt;--&amp;gt; 117             return f(*a, **kw)&lt;/P&gt;&lt;P&gt;    118         except py4j.protocol.Py4JJavaError as e:&lt;/P&gt;&lt;P&gt;    119             converted = convert_exception(e.java_exception)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)&lt;/P&gt;&lt;P&gt;    324             value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)&lt;/P&gt;&lt;P&gt;    325             if answer[1] == REFERENCE_TYPE:&lt;/P&gt;&lt;P&gt;--&amp;gt; 326                 raise Py4JJavaError(&lt;/P&gt;&lt;P&gt;    327                     "An error occurred while calling {0}{1}{2}.\n".&lt;/P&gt;&lt;P&gt;    328                     format(target_id, ".", name), value)&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Py4JJavaError: An error occurred while calling o386.save.&lt;/P&gt;&lt;P&gt;: java.lang.ClassNotFoundException: &lt;/P&gt;&lt;P&gt;Failed to find data source: com.microsoft.sqlserver.jdbc.spark. Please find packages at&lt;/P&gt;&lt;P&gt;&lt;A href="http://spark.apache.org/third-party-projects.html" target="test_blank"&gt;http://spark.apache.org/third-party-projects.html&lt;/A&gt;&lt;/P&gt;&lt;P&gt;       &lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.errors.QueryExecutionErrors$.failedToFindDataSourceError(QueryExecutionErrors.scala:511)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:747)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSourceV2(DataSource.scala:797)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.lookupV2Provider(DataFrameWriter.scala:962)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:285)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:257)&lt;/P&gt;&lt;P&gt;	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)&lt;/P&gt;&lt;P&gt;	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)&lt;/P&gt;&lt;P&gt;	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/P&gt;&lt;P&gt;	at java.lang.reflect.Method.invoke(Method.java:498)&lt;/P&gt;&lt;P&gt;	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)&lt;/P&gt;&lt;P&gt;	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)&lt;/P&gt;&lt;P&gt;	at py4j.Gateway.invoke(Gateway.java:295)&lt;/P&gt;&lt;P&gt;	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)&lt;/P&gt;&lt;P&gt;	at py4j.commands.CallCommand.execute(CallCommand.java:79)&lt;/P&gt;&lt;P&gt;	at py4j.GatewayConnection.run(GatewayConnection.java:251)&lt;/P&gt;&lt;P&gt;	at java.lang.Thread.run(Thread.java:748)&lt;/P&gt;&lt;P&gt;Caused by: java.lang.ClassNotFoundException: com.microsoft.sqlserver.jdbc.spark.DefaultSource&lt;/P&gt;&lt;P&gt;	at java.net.URLClassLoader.findClass(URLClassLoader.java:382)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:419)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:352)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$5(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	at scala.util.Try$.apply(Try.scala:213)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$4(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	at scala.util.Failure.orElse(Try.scala:224)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:733)&lt;/P&gt;&lt;P&gt;	... 15 more&lt;/P&gt;</description>
      <pubDate>Wed, 01 Dec 2021 18:38:11 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33946#M24824</guid>
      <dc:creator>Chris_Shehu</dc:creator>
      <dc:date>2021-12-01T18:38:11Z</dc:date>
    </item>
    <item>
      <title>Re: When trying to use pyodbc connector to write files to SQL server receiving error. java.lang.ClassNotFoundException Any alternatives or ways to fix this?</title>
      <link>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33947#M24825</link>
      <description>&lt;P&gt;please check following code:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;df.write.jdbc(
     url="jdbc:sqlserver://&amp;lt;host&amp;gt;:1433;database=&amp;lt;db&amp;gt;;user=&amp;lt;user&amp;gt;;password=&amp;lt;password&amp;gt;;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;driver=com.microsoft.sqlserver.jdbc.SQLServerDriver",
     table='PatientTEST',
     mode='overwrite')&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 01 Dec 2021 19:15:20 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33947#M24825</guid>
      <dc:creator>Hubert-Dudek</dc:creator>
      <dc:date>2021-12-01T19:15:20Z</dc:date>
    </item>
    <item>
      <title>Re: When trying to use pyodbc connector to write files to SQL server receiving error. java.lang.ClassNotFoundException Any alternatives or ways to fix this?</title>
      <link>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33948#M24826</link>
      <description>&lt;P&gt;You are a life saver! Thanks!&lt;/P&gt;</description>
      <pubDate>Wed, 01 Dec 2021 19:39:34 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/when-trying-to-use-pyodbc-connector-to-write-files-to-sql-server/m-p/33948#M24826</guid>
      <dc:creator>Chris_Shehu</dc:creator>
      <dc:date>2021-12-01T19:39:34Z</dc:date>
    </item>
  </channel>
</rss>

