cancel
Showing results for 
Search instead for 
Did you mean: 
Data Engineering
Join discussions on data engineering best practices, architectures, and optimization strategies within the Databricks Community. Exchange insights and solutions with fellow data engineers.
cancel
Showing results for 
Search instead for 
Did you mean: 

When trying to use pyodbc connector to write files to SQL server receiving error. java.lang.ClassNotFoundException Any alternatives or ways to fix this?

Chris_Shehu
Valued Contributor III
jdbcUsername = ********
jdbcPassword = ***************
server_name = "jdbc:sqlserver://***********:******"
database_name = "********"
url = server_name + ";" + "databaseName=" + database_name + ";"
 
table_name = "PatientTEST"
 
try:
  df.write \
    .format("com.microsoft.sqlserver.jdbc.spark") \
    .mode("overwrite") \
    .option("url", url) \
    .option("dbtable", table_name) \
    .option("user", jdbcUsername) \
    .option("password", jdbcPassword) \
    .save()
except ValueError as error :
    print("Connector write failed", error)

java.lang.ClassNotFoundException:

---------------------------------------------------------------------------

Py4JJavaError Traceback (most recent call last)

<command-1025811192119468> in <module>

10

11 try:

---> 12 df.write \

13 .format("com.microsoft.sqlserver.jdbc.spark") \

14 .mode("append") \

/databricks/spark/python/pyspark/sql/readwriter.py in save(self, path, format, mode, partitionBy, **options)

736 self.format(format)

737 if path is None:

--> 738 self._jwrite.save()

739 else:

740 self._jwrite.save(path)

/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/java_gateway.py in __call__(self, *args)

1302

1303 answer = self.gateway_client.send_command(command)

-> 1304 return_value = get_return_value(

1305 answer, self.gateway_client, self.target_id, self.name)

1306

/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)

115 def deco(*a, **kw):

116 try:

--> 117 return f(*a, **kw)

118 except py4j.protocol.Py4JJavaError as e:

119 converted = convert_exception(e.java_exception)

/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)

324 value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)

325 if answer[1] == REFERENCE_TYPE:

--> 326 raise Py4JJavaError(

327 "An error occurred while calling {0}{1}{2}.\n".

328 format(target_id, ".", name), value)

Py4JJavaError: An error occurred while calling o386.save.

: java.lang.ClassNotFoundException:

Failed to find data source: com.microsoft.sqlserver.jdbc.spark. Please find packages at

http://spark.apache.org/third-party-projects.html

at org.apache.spark.sql.errors.QueryExecutionErrors$.failedToFindDataSourceError(QueryExecutionErrors.scala:511)

at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:747)

at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSourceV2(DataSource.scala:797)

at org.apache.spark.sql.DataFrameWriter.lookupV2Provider(DataFrameWriter.scala:962)

at org.apache.spark.sql.DataFrameWriter.saveInternal(DataFrameWriter.scala:285)

at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:257)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)

at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)

at py4j.Gateway.invoke(Gateway.java:295)

at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)

at py4j.commands.CallCommand.execute(CallCommand.java:79)

at py4j.GatewayConnection.run(GatewayConnection.java:251)

at java.lang.Thread.run(Thread.java:748)

Caused by: java.lang.ClassNotFoundException: com.microsoft.sqlserver.jdbc.spark.DefaultSource

at java.net.URLClassLoader.findClass(URLClassLoader.java:382)

at java.lang.ClassLoader.loadClass(ClassLoader.java:419)

at java.lang.ClassLoader.loadClass(ClassLoader.java:352)

at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$5(DataSource.scala:733)

at scala.util.Try$.apply(Try.scala:213)

at org.apache.spark.sql.execution.datasources.DataSource$.$anonfun$lookupDataSource$4(DataSource.scala:733)

at scala.util.Failure.orElse(Try.scala:224)

at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:733)

... 15 more

1 ACCEPTED SOLUTION

Accepted Solutions

Hubert-Dudek
Esteemed Contributor III

please check following code:

df.write.jdbc(
     url="jdbc:sqlserver://<host>:1433;database=<db>;user=<user>;password=<password>;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;driver=com.microsoft.sqlserver.jdbc.SQLServerDriver",
     table='PatientTEST',
     mode='overwrite')

View solution in original post

2 REPLIES 2

Hubert-Dudek
Esteemed Contributor III

please check following code:

df.write.jdbc(
     url="jdbc:sqlserver://<host>:1433;database=<db>;user=<user>;password=<password>;encrypt=true;trustServerCertificate=false;hostNameInCertificate=*.database.windows.net;loginTimeout=30;driver=com.microsoft.sqlserver.jdbc.SQLServerDriver",
     table='PatientTEST',
     mode='overwrite')

Chris_Shehu
Valued Contributor III

You are a life saver! Thanks!

Join Us as a Local Community Builder!

Passionate about hosting events and connecting people? Help us grow a vibrant local community—sign up today to get started!

Sign Up Now