<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Databricks JDBC Driver 2.7.3 with OAuth2 M2M on Databricks in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/129085#M48433</link>
    <description>&lt;P&gt;According support team. I had to set the JDBC parameter&amp;nbsp;&lt;STRONG&gt;OAuthEnabledIPAddressRanges.&amp;nbsp;&lt;/STRONG&gt;&lt;SPAN&gt;The range of the IP should be the resolved private link IP (usually starting with 10.x) of the hostname for the Databricks workspace URL.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Thu, 21 Aug 2025 08:15:55 GMT</pubDate>
    <dc:creator>der</dc:creator>
    <dc:date>2025-08-21T08:15:55Z</dc:date>
    <item>
      <title>Databricks JDBC Driver 2.7.3 with OAuth2 M2M on Databricks</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/122928#M46905</link>
      <description>&lt;P&gt;We have an application implemented in Java and installed as JAR on the cluster. The application reads data from unity catalog over Databricks JDBC Driver.&lt;/P&gt;&lt;P&gt;We used PAT Tokens for the Service Principal in the past and everything worked fine. Now we changed to OAuth2 Secrets generated in Databricks Account. This works perfect, if the JDBC Driver runs outside Databricks Cluster. If we run same code in Databricks Notebook, we get an error.&lt;/P&gt;&lt;P&gt;Scala code run in Notebook&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;import java.sql.{Connection, DriverManager, ResultSet}

Class.forName("com.databricks.client.jdbc.Driver")

val jdbcUrl = "jdbc:databricks://&amp;lt;HOST&amp;gt;:443;httpPath=&amp;lt;HTTP_PATH&amp;gt;;AuthMech=11;Auth_Flow=1;OAuth2ClientId=&amp;lt;CLIENT_ID&amp;gt;;OAuth2Secret=&amp;lt;DATABRICKS_GENERATED_SP_SECRET&amp;gt;"

val conn: Connection = DriverManager.getConnection(jdbcUrl)
val stmt = conn.createStatement()
val rs = stmt.executeQuery("SELECT current_user(), current_date()")

while (rs.next()) {
  println(s"${rs.getString(1)}\t${rs.getString(2)}")
}

rs.close()
stmt.close()
conn.close()&lt;/LI-CODE&gt;&lt;P&gt;Error:&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;SQLException: [Databricks][JDBCDriver](500151) Error setting/closing session: Invalid local Address .
Caused by: GeneralException: [Databricks][JDBCDriver](500151) Error setting/closing session: Invalid local Address .
Caused by: TException: Invalid local Address 
	at com.databricks.client.hivecommon.api.HS2Client.openSession(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2Client.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClient.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClientFactory.createClient(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.connectToServer(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.connectToServer(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.establishConnection(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
Caused by: com.databricks.client.support.exceptions.GeneralException: [Databricks][JDBCDriver](500151) Error setting/closing session: Invalid local Address .
	at com.databricks.client.hivecommon.api.HS2Client.openSession(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2Client.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClient.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClientFactory.createClient(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.connectToServer(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.connectToServer(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.establishConnection(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
Caused by: com.databricks.client.jdbc42.internal.apache.thrift.TException: Invalid local Address 
	at com.databricks.client.jdbc.oauth.ClientCredentialOAuthProvider.obtainAccessToken(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2OAuthClientWrapper.validateTokens(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2OAuthClientWrapper.OpenSession(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2Client.openSession(Unknown Source)
	at com.databricks.client.hivecommon.api.HS2Client.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClient.&amp;lt;init&amp;gt;(Unknown Source)
	at com.databricks.client.spark.jdbc.DownloadableFetchClientFactory.createClient(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.connectToServer(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.connectToServer(Unknown Source)
	at com.databricks.client.hivecommon.core.HiveJDBCCommonConnection.establishConnection(Unknown Source)
	at com.databricks.client.spark.core.SparkJDBCConnection.establishConnection(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at com.databricks.client.jdbc.core.LoginTimeoutConnection$1.call(Unknown Source)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
	at java.base/java.lang.Thread.run(Thread.java:840)&lt;/LI-CODE&gt;</description>
      <pubDate>Thu, 26 Jun 2025 10:06:55 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/122928#M46905</guid>
      <dc:creator>der</dc:creator>
      <dc:date>2025-06-26T10:06:55Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks JDBC Driver 2.7.3 with OAuth2 M2M on Databricks</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/122930#M46906</link>
      <description>&lt;P&gt;If I generate the Auth Token by my self and use this token, it also works.&lt;/P&gt;&lt;P&gt;Create Token:&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;import java.net.{URL, HttpURLConnection}
import java.io.{OutputStreamWriter}
import scala.io.Source
import scala.util.parsing.json.JSON

// Databricks OAuth client credentials
val clientId = "&amp;lt;CLIENT_ID"
val clientSecret = "&amp;lt;DATABRICKS_GENERATED_SP_SECRET&amp;gt;"
val tokenEndpoint = "https://&amp;lt;HOST&amp;gt;/oidc/v1/token"

val requestBody =
  s"grant_type=client_credentials&amp;amp;client_id=$clientId&amp;amp;client_secret=$clientSecret&amp;amp;scope=all-apis"

val url = new URL(tokenEndpoint)
val conn = url.openConnection().asInstanceOf[HttpURLConnection]
conn.setRequestMethod("POST")
conn.setDoOutput(true)
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")

val writer = new OutputStreamWriter(conn.getOutputStream)
writer.write(requestBody)
writer.flush()
writer.close()

val response = Source.fromInputStream(conn.getInputStream).mkString
val parsed = JSON.parseFull(response).get.asInstanceOf[Map[String, Any]]
val accessToken = parsed("access_token").asInstanceOf[String]&lt;/LI-CODE&gt;&lt;P&gt;Use Auth Token without error:&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;import java.sql.{Connection, DriverManager}

Class.forName("com.databricks.client.jdbc.Driver")

val jdbcUrl = s"jdbc:databricks://&amp;lt;HOST&amp;gt;:443;httpPath=&amp;lt;HTTP_PATH&amp;gt;;AuthMech=11;Auth_Flow=0;Auth_AccessToken=$accessToken"

val conn: Connection = DriverManager.getConnection(jdbcUrl)
val stmt = conn.createStatement()
val rs = stmt.executeQuery("SELECT current_user(), current_date()")

while (rs.next()) {
  println(s"${rs.getString(1)}\t${rs.getString(2)}")
}

rs.close()
stmt.close()
conn.close()&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 26 Jun 2025 10:12:09 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/122930#M46906</guid>
      <dc:creator>der</dc:creator>
      <dc:date>2025-06-26T10:12:09Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks JDBC Driver 2.7.3 with OAuth2 M2M on Databricks</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/129085#M48433</link>
      <description>&lt;P&gt;According support team. I had to set the JDBC parameter&amp;nbsp;&lt;STRONG&gt;OAuthEnabledIPAddressRanges.&amp;nbsp;&lt;/STRONG&gt;&lt;SPAN&gt;The range of the IP should be the resolved private link IP (usually starting with 10.x) of the hostname for the Databricks workspace URL.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 21 Aug 2025 08:15:55 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-jdbc-driver-2-7-3-with-oauth2-m2m-on-databricks/m-p/129085#M48433</guid>
      <dc:creator>der</dc:creator>
      <dc:date>2025-08-21T08:15:55Z</dc:date>
    </item>
  </channel>
</rss>

