<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Query execution after establishing Databricks  to Information Design Tool JDBC Connection in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/query-execution-after-establishing-databricks-to-information/m-p/78839#M35613</link>
    <description>&lt;P&gt;&lt;SPAN&gt;Hello all,&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;I have created a JDBC connection from Databricks to Information Design Tool using access token generated using Databricks Service Principal.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;But it’s throwing below error while running query on top of Databricks data in Information Design Business layer.&lt;/P&gt;&lt;P&gt;Error -&amp;nbsp;&lt;/P&gt;&lt;P&gt;[Databricks][JDBCDriver](500051) ERROR processing query/statement. Error Code: 0, SQL state: 42000, Query: SELECT&lt;BR /&gt;c***, Error message from Server: org.apache.hive.service.cli.HiveSQLException: Error running query: java.lang.reflect.InvocationTargetException&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:57)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.$anonfun$execute$1(SparkExecuteStatementOperation.scala:715)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:128)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:559)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$2(SparkExecuteStatementOperation.scala:403)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:420)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionContext(DatabricksSparkUsageLogger.scala:27)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionTags(DatabricksSparkUsageLogger.scala:27)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionTags0(DatabricksSparkUsageLogger.scala:72)&lt;BR /&gt;at com.databricks.spark.util.DatabricksSparkUsageLogger.withAttributionTags(DatabricksSparkUsageLogger.scala:172)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.$anonfun$withAttributionTags$1(UsageLogger.scala:491)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:603)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:612)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.withAttributionTags(UsageLogger.scala:491)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.withAttributionTags$(UsageLogger.scala:489)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withAttributionTags(SparkExecuteStatementOperation.scala:67)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.$anonfun$withLocalProperties$11(ThriftLocalProperties.scala:190)&lt;BR /&gt;at com.databricks.spark.util.IdentityClaim$.withClaim(IdentityClaim.scala:48)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.withLocalProperties(ThriftLocalProperties.scala:185)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.withLocalProperties$(ThriftLocalProperties.scala:71)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:67)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:381)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:367)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:422)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:415)&lt;BR /&gt;at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)&lt;BR /&gt;at java.util.concurrent.FutureTask.run(FutureTask.java:266)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)&lt;BR /&gt;at java.lang.Thread.run(Thread.java:750)&lt;BR /&gt;Caused by: java.lang.reflect.InvocationTargetException&lt;BR /&gt;at sun.reflect.GeneratedMethodAccessor238.invoke(Unknown Source)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.resolveDbfsV2Path(DbfsReflectionUtils.scala:73)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.getRootFileSystem(DbfsReflectionUtils.scala:137)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.getRootFileSystemName(DbfsReflectionUtils.scala:157)&lt;BR /&gt;at org.apache.spark.api.python.PythonSecurityUtils$.checkPathFileSystemSafety(PythonSecurityUtils.scala:136)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.apply(DeltaLog.scala:1090)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.forTable(DeltaLog.scala:990)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$deltaLog$2(DeltaTableV2.scala:117)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.$anonfun$withAdditionalSnapshotInitializationUsageLogData$1(DeltaLog.scala:860)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:105)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.withAdditionalSnapshotInitializationUsageLogData(DeltaLog.scala:861)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$deltaLog$1(DeltaTableV2.scala:117)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2$.withEnrichedUnsupportedTableException(DeltaTableV2.scala:490)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.deltaLog$lzycompute(DeltaTableV2.scala:116)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.deltaLog(DeltaTableV2.scala:113)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$7(DeltaTableV2.scala:190)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.$anonfun$withAdditionalSnapshotInitializationUsageLogData$1(DeltaLog.scala:860)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:105)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.withAdditionalSnapshotInitializationUsageLogData(DeltaLog.scala:861)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$6(DeltaTableV2.scala:189)&lt;BR /&gt;at scala.Option.orElse(Option.scala:447)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$1(DeltaTableV2.scala:189)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2$.withEnrichedUnsupportedTableException(DeltaTableV2.scala:490)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.initialSnapshot$lzycompute(DeltaTableV2.scala:197)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.initialSnapshot(DeltaTableV2.scala:163)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$tableSchema$2(DeltaTableV2.scala:222)&lt;BR /&gt;at scala.Option.getOrElse(Option.scala:189)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.tableSchema$lzycompute(DeltaTableV2.scala:222)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.tableSchema(DeltaTableV2.scala:220)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.schema(DeltaTableV2.scala:227)&lt;BR /&gt;at org.apache.spark.sql.connector.catalog.Table.columns(Table.java:68)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.columns(DeltaTableV2.scala:66)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation$.create(DataSourceV2Relation.scala:234)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$createRelation$2(Analyzer.scala:1648)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$createRelation$1(Analyzer.scala:1599)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$record(Analyzer.scala:1902)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$createRelation(Analyzer.scala:1599)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anon$3.$anonfun$submit$7(Analyzer.scala:1845)&lt;BR /&gt;at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:83)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anon$3.$anonfun$submit$6(Analyzer.scala:1845)&lt;BR /&gt;at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1175)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$6(SQLExecution.scala:769)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$5(SQLExecution.scala:769)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$4(SQLExecution.scala:769)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$3(SQLExecution.scala:768)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$2(SQLExecution.scala:767)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.withOptimisticTransaction(SQLExecution.scala:789)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:766)&lt;BR /&gt;at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1604)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:134)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.spark.util.IdentityClaim$.withClaim(IdentityClaim.scala:48)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.$anonfun$runWithCaptured$4(SparkThreadLocalForwardingThreadPoolExecutor.scala:91)&lt;BR /&gt;at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:45)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:90)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:67)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:131)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:134)&lt;BR /&gt;... 3 more&lt;BR /&gt;Caused by: com.databricks.backend.daemon.data.common.InvalidMountException: Error while using path /mnt/databrickstowebi/client6000/pficfootnoteallocationsummary/_delta_log for resolving path '/client6000/pficfootnoteallocationsummary/_delta_log' within mount at '/mnt/databrickstowebi'.&lt;BR /&gt;at com.databricks.backend.daemon.data.common.InvalidMountException$.apply(DataMessages.scala:733)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.resolve(MountEntryResolver.scala:114)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.DBFSV2.resolve(DatabricksFileSystemV2.scala:104)&lt;BR /&gt;... 73 more&lt;BR /&gt;Caused by: com.google.common.util.concurrent.UncheckedExecutionException: com.databricks.common.client.DatabricksServiceHttpClientException: RESOURCE_DOES_NOT_EXIST: Refresh token not found for userId: Some(3790767436975024)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199)&lt;BR /&gt;at com.google.common.cache.LocalCache.get(LocalCache.java:3932)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4721)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.CachingCredentialStore.get(CachingCredentialStore.scala:60)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.refreshToken(OAuthTokenRefresherClient.scala:82)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.newToken(OAuthTokenRefresherClient.scala:131)&lt;BR /&gt;at org.apache.spark.credentials.RuntimeCredential.getOrRefresh(CredentialContext.scala:51)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredentialFromStore$2(CredentialContext.scala:233)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredentialFromStore$1(CredentialContext.scala:232)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.getCredentialFromStore(CredentialContext.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredential$6(CredentialContext.scala:225)&lt;BR /&gt;at scala.Option.flatMap(Option.scala:271)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredential$3(CredentialContext.scala:225)&lt;BR /&gt;at scala.Option.orElse(Option.scala:447)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.getCredential(CredentialContext.scala:225)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.adl.AdlGen2UpgradeCredentialContextTokenProvider.getToken(AdlGen2UpgradeCredentialContextTokenProvider.scala:30)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsClient.getAccessToken(AbfsClient.java:1371)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.executeHttpOperation(AbfsRestOperation.java:306)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.completeExecute(AbfsRestOperation.java:238)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.lambda$execute$0(AbfsRestOperation.java:211)&lt;BR /&gt;at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.measureDurationOfInvocation(IOStatisticsBinding.java:494)&lt;BR /&gt;at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDurationOfInvocation(IOStatisticsBinding.java:465)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.execute(AbfsRestOperation.java:209)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsClient.getPathStatus(AbfsClient.java:979)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.getFileStatus(AzureBlobFileSystemStore.java:1128)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.getFileStatus(AzureBlobFileSystem.java:956)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.getFileStatus(AzureBlobFileSystem.java:946)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.getFileStatusNoCache(LokiABFS.scala:52)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.getFileStatus(LokiABFS.scala:42)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1862)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.exists(AzureBlobFileSystem.java:1525)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.lambda$manifestFileExists$0(FallbackEncryptionContextProvider.java:52)&lt;BR /&gt;at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.manifestFileExists(FallbackEncryptionContextProvider.java:48)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.initialize(FallbackEncryptionContextProvider.java:38)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.initialize(AzureBlobFileSystem.java:238)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.initialize(LokiABFS.scala:36)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem$.$anonfun$getLokiFS$1(LokiFileSystem.scala:149)&lt;BR /&gt;at com.databricks.common.filesystem.FileSystemCache.getOrCompute(FileSystemCache.scala:46)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem$.getLokiFS(LokiFileSystem.scala:146)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem.initialize(LokiFileSystem.scala:211)&lt;BR /&gt;at com.databricks.backend.common.util.HadoopFSUtil$.getLokiABFSForMount(HadoopFSUtil.scala:714)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.DatabricksFileSystemV2Factory.createFileSystem(DatabricksFileSystemV2Factory.scala:113)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.$anonfun$resolve$2(MountEntryResolver.scala:82)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:669)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:687)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:426)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:424)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withAttributionContext(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withAttributionTags(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:664)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:582)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.recordOperationWithResultTags(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:542)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.recordOperation(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withLock(LoggedLock.scala:163)&lt;BR /&gt;at com.databricks.common.util.locks.PerKeyLock.withLock(PerKeyLock.scala:42)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.resolve(MountEntryResolver.scala:79)&lt;BR /&gt;... 74 more&lt;BR /&gt;Caused by: com.databricks.common.client.DatabricksServiceHttpClientException: RESOURCE_DOES_NOT_EXIST: Refresh token not found for userId: Some(3790767436975024)&lt;BR /&gt;at com.databricks.common.client.DatabricksServiceHttpClientException.copy(DBHttpClient.scala:1407)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getResponseBody(DBHttpClient.scala:1253)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.$anonfun$httpRequestInternal$1(DBHttpClient.scala:1199)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:669)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:687)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:426)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:424)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.withAttributionContext(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.withAttributionTags(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:664)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:582)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.recordOperationWithResultTags(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:542)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.recordOperation(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.httpRequestInternal(DBHttpClient.scala:1185)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.entityEnclosingRequestInternal(DBHttpClient.scala:1174)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getInternal(DBHttpClient.scala:1123)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.get(DBHttpClient.scala:689)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getWithHeaders(DBHttpClient.scala:717)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.get(DBHttpClient.scala:661)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.$anonfun$refreshToken$2(OAuthTokenRefresherClient.scala:91)&lt;BR /&gt;at com.databricks.common.client.DBHttpClient$.retryWithDeadline(DBHttpClient.scala:376)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.reliably(OAuthTokenRefresherClient.scala:52)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.$anonfun$refreshToken$1(OAuthTokenRefresherClient.scala:91)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4724)&lt;BR /&gt;at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3522)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2315)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2278)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2193)&lt;BR /&gt;... 140 more&lt;BR /&gt;.&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/9"&gt;@Retired_mod&lt;/a&gt; - any help on this issue.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;If someone facing the same issue let me know how you solved this issue?&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;Thanks in advance&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;#Information Design tool #Databricksconnection #Queryexecutionerror&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Mon, 15 Jul 2024 16:42:37 GMT</pubDate>
    <dc:creator>Magesh2798</dc:creator>
    <dc:date>2024-07-15T16:42:37Z</dc:date>
    <item>
      <title>Query execution after establishing Databricks  to Information Design Tool JDBC Connection</title>
      <link>https://community.databricks.com/t5/data-engineering/query-execution-after-establishing-databricks-to-information/m-p/78839#M35613</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Hello all,&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;I have created a JDBC connection from Databricks to Information Design Tool using access token generated using Databricks Service Principal.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;But it’s throwing below error while running query on top of Databricks data in Information Design Business layer.&lt;/P&gt;&lt;P&gt;Error -&amp;nbsp;&lt;/P&gt;&lt;P&gt;[Databricks][JDBCDriver](500051) ERROR processing query/statement. Error Code: 0, SQL state: 42000, Query: SELECT&lt;BR /&gt;c***, Error message from Server: org.apache.hive.service.cli.HiveSQLException: Error running query: java.lang.reflect.InvocationTargetException&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:57)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.$anonfun$execute$1(SparkExecuteStatementOperation.scala:715)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:128)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:559)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$2(SparkExecuteStatementOperation.scala:403)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:420)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionContext(DatabricksSparkUsageLogger.scala:27)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionTags(DatabricksSparkUsageLogger.scala:27)&lt;BR /&gt;at com.databricks.spark.util.PublicDBLogging.withAttributionTags0(DatabricksSparkUsageLogger.scala:72)&lt;BR /&gt;at com.databricks.spark.util.DatabricksSparkUsageLogger.withAttributionTags(DatabricksSparkUsageLogger.scala:172)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.$anonfun$withAttributionTags$1(UsageLogger.scala:491)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:603)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:612)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.withAttributionTags(UsageLogger.scala:491)&lt;BR /&gt;at com.databricks.spark.util.UsageLogging.withAttributionTags$(UsageLogger.scala:489)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withAttributionTags(SparkExecuteStatementOperation.scala:67)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.$anonfun$withLocalProperties$11(ThriftLocalProperties.scala:190)&lt;BR /&gt;at com.databricks.spark.util.IdentityClaim$.withClaim(IdentityClaim.scala:48)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.withLocalProperties(ThriftLocalProperties.scala:185)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.ThriftLocalProperties.withLocalProperties$(ThriftLocalProperties.scala:71)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:67)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:381)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:367)&lt;BR /&gt;at java.security.AccessController.doPrivileged(Native Method)&lt;BR /&gt;at javax.security.auth.Subject.doAs(Subject.java:422)&lt;BR /&gt;at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)&lt;BR /&gt;at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:415)&lt;BR /&gt;at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)&lt;BR /&gt;at java.util.concurrent.FutureTask.run(FutureTask.java:266)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)&lt;BR /&gt;at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)&lt;BR /&gt;at java.lang.Thread.run(Thread.java:750)&lt;BR /&gt;Caused by: java.lang.reflect.InvocationTargetException&lt;BR /&gt;at sun.reflect.GeneratedMethodAccessor238.invoke(Unknown Source)&lt;BR /&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;BR /&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.resolveDbfsV2Path(DbfsReflectionUtils.scala:73)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.getRootFileSystem(DbfsReflectionUtils.scala:137)&lt;BR /&gt;at com.databricks.spark.util.DbfsReflectionUtils$.getRootFileSystemName(DbfsReflectionUtils.scala:157)&lt;BR /&gt;at org.apache.spark.api.python.PythonSecurityUtils$.checkPathFileSystemSafety(PythonSecurityUtils.scala:136)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.apply(DeltaLog.scala:1090)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.forTable(DeltaLog.scala:990)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$deltaLog$2(DeltaTableV2.scala:117)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.$anonfun$withAdditionalSnapshotInitializationUsageLogData$1(DeltaLog.scala:860)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:105)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.withAdditionalSnapshotInitializationUsageLogData(DeltaLog.scala:861)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$deltaLog$1(DeltaTableV2.scala:117)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2$.withEnrichedUnsupportedTableException(DeltaTableV2.scala:490)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.deltaLog$lzycompute(DeltaTableV2.scala:116)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.deltaLog(DeltaTableV2.scala:113)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$7(DeltaTableV2.scala:190)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.$anonfun$withAdditionalSnapshotInitializationUsageLogData$1(DeltaLog.scala:860)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally(SparkErrorUtils.scala:64)&lt;BR /&gt;at org.apache.spark.util.SparkErrorUtils.tryWithSafeFinally$(SparkErrorUtils.scala:61)&lt;BR /&gt;at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:105)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.DeltaLog$.withAdditionalSnapshotInitializationUsageLogData(DeltaLog.scala:861)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$6(DeltaTableV2.scala:189)&lt;BR /&gt;at scala.Option.orElse(Option.scala:447)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$initialSnapshot$1(DeltaTableV2.scala:189)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2$.withEnrichedUnsupportedTableException(DeltaTableV2.scala:490)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.initialSnapshot$lzycompute(DeltaTableV2.scala:197)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.initialSnapshot(DeltaTableV2.scala:163)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.$anonfun$tableSchema$2(DeltaTableV2.scala:222)&lt;BR /&gt;at scala.Option.getOrElse(Option.scala:189)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.tableSchema$lzycompute(DeltaTableV2.scala:222)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.tableSchema(DeltaTableV2.scala:220)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.schema(DeltaTableV2.scala:227)&lt;BR /&gt;at org.apache.spark.sql.connector.catalog.Table.columns(Table.java:68)&lt;BR /&gt;at com.databricks.sql.transaction.tahoe.catalog.DeltaTableV2.columns(DeltaTableV2.scala:66)&lt;BR /&gt;at org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation$.create(DataSourceV2Relation.scala:234)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$createRelation$2(Analyzer.scala:1648)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.$anonfun$createRelation$1(Analyzer.scala:1599)&lt;BR /&gt;at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:94)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$record(Analyzer.scala:1902)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.org$apache$spark$sql$catalyst$analysis$Analyzer$ResolveRelations$$createRelation(Analyzer.scala:1599)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anon$3.$anonfun$submit$7(Analyzer.scala:1845)&lt;BR /&gt;at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:83)&lt;BR /&gt;at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anon$3.$anonfun$submit$6(Analyzer.scala:1845)&lt;BR /&gt;at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1175)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$6(SQLExecution.scala:769)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$5(SQLExecution.scala:769)&lt;BR /&gt;at com.databricks.util.LexicalThreadLocal$Handle.runWith(LexicalThreadLocal.scala:63)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$4(SQLExecution.scala:769)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$3(SQLExecution.scala:768)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$2(SQLExecution.scala:767)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.withOptimisticTransaction(SQLExecution.scala:789)&lt;BR /&gt;at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:766)&lt;BR /&gt;at java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1604)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.$anonfun$run$1(SparkThreadLocalForwardingThreadPoolExecutor.scala:134)&lt;BR /&gt;at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)&lt;BR /&gt;at com.databricks.spark.util.IdentityClaim$.withClaim(IdentityClaim.scala:48)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.$anonfun$runWithCaptured$4(SparkThreadLocalForwardingThreadPoolExecutor.scala:91)&lt;BR /&gt;at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:45)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:90)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingHelper.runWithCaptured$(SparkThreadLocalForwardingThreadPoolExecutor.scala:67)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.runWithCaptured(SparkThreadLocalForwardingThreadPoolExecutor.scala:131)&lt;BR /&gt;at org.apache.spark.util.threads.SparkThreadLocalCapturingRunnable.run(SparkThreadLocalForwardingThreadPoolExecutor.scala:134)&lt;BR /&gt;... 3 more&lt;BR /&gt;Caused by: com.databricks.backend.daemon.data.common.InvalidMountException: Error while using path /mnt/databrickstowebi/client6000/pficfootnoteallocationsummary/_delta_log for resolving path '/client6000/pficfootnoteallocationsummary/_delta_log' within mount at '/mnt/databrickstowebi'.&lt;BR /&gt;at com.databricks.backend.daemon.data.common.InvalidMountException$.apply(DataMessages.scala:733)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.resolve(MountEntryResolver.scala:114)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.DBFSV2.resolve(DatabricksFileSystemV2.scala:104)&lt;BR /&gt;... 73 more&lt;BR /&gt;Caused by: com.google.common.util.concurrent.UncheckedExecutionException: com.databricks.common.client.DatabricksServiceHttpClientException: RESOURCE_DOES_NOT_EXIST: Refresh token not found for userId: Some(3790767436975024)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199)&lt;BR /&gt;at com.google.common.cache.LocalCache.get(LocalCache.java:3932)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4721)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.CachingCredentialStore.get(CachingCredentialStore.scala:60)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.refreshToken(OAuthTokenRefresherClient.scala:82)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.newToken(OAuthTokenRefresherClient.scala:131)&lt;BR /&gt;at org.apache.spark.credentials.RuntimeCredential.getOrRefresh(CredentialContext.scala:51)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredentialFromStore$2(CredentialContext.scala:233)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredentialFromStore$1(CredentialContext.scala:232)&lt;BR /&gt;at scala.Option.map(Option.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.getCredentialFromStore(CredentialContext.scala:230)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredential$6(CredentialContext.scala:225)&lt;BR /&gt;at scala.Option.flatMap(Option.scala:271)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.$anonfun$getCredential$3(CredentialContext.scala:225)&lt;BR /&gt;at scala.Option.orElse(Option.scala:447)&lt;BR /&gt;at org.apache.spark.credentials.CredentialContext$.getCredential(CredentialContext.scala:225)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.adl.AdlGen2UpgradeCredentialContextTokenProvider.getToken(AdlGen2UpgradeCredentialContextTokenProvider.scala:30)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsClient.getAccessToken(AbfsClient.java:1371)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.executeHttpOperation(AbfsRestOperation.java:306)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.completeExecute(AbfsRestOperation.java:238)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.lambda$execute$0(AbfsRestOperation.java:211)&lt;BR /&gt;at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.measureDurationOfInvocation(IOStatisticsBinding.java:494)&lt;BR /&gt;at org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDurationOfInvocation(IOStatisticsBinding.java:465)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsRestOperation.execute(AbfsRestOperation.java:209)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.AbfsClient.getPathStatus(AbfsClient.java:979)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.getFileStatus(AzureBlobFileSystemStore.java:1128)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.getFileStatus(AzureBlobFileSystem.java:956)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.getFileStatus(AzureBlobFileSystem.java:946)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.getFileStatusNoCache(LokiABFS.scala:52)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.getFileStatus(LokiABFS.scala:42)&lt;BR /&gt;at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1862)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.exists(AzureBlobFileSystem.java:1525)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.lambda$manifestFileExists$0(FallbackEncryptionContextProvider.java:52)&lt;BR /&gt;at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.manifestFileExists(FallbackEncryptionContextProvider.java:48)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.FallbackEncryptionContextProvider.initialize(FallbackEncryptionContextProvider.java:38)&lt;BR /&gt;at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.initialize(AzureBlobFileSystem.java:238)&lt;BR /&gt;at com.databricks.common.filesystem.LokiABFS.initialize(LokiABFS.scala:36)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem$.$anonfun$getLokiFS$1(LokiFileSystem.scala:149)&lt;BR /&gt;at com.databricks.common.filesystem.FileSystemCache.getOrCompute(FileSystemCache.scala:46)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem$.getLokiFS(LokiFileSystem.scala:146)&lt;BR /&gt;at com.databricks.common.filesystem.LokiFileSystem.initialize(LokiFileSystem.scala:211)&lt;BR /&gt;at com.databricks.backend.common.util.HadoopFSUtil$.getLokiABFSForMount(HadoopFSUtil.scala:714)&lt;BR /&gt;at com.databricks.backend.daemon.data.client.DatabricksFileSystemV2Factory.createFileSystem(DatabricksFileSystemV2Factory.scala:113)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.$anonfun$resolve$2(MountEntryResolver.scala:82)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:669)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:687)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:426)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:424)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withAttributionContext(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withAttributionTags(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:664)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:582)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.recordOperationWithResultTags(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:542)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.recordOperation(LoggedLock.scala:89)&lt;BR /&gt;at com.databricks.common.util.locks.LoggedLock$.withLock(LoggedLock.scala:163)&lt;BR /&gt;at com.databricks.common.util.locks.PerKeyLock.withLock(PerKeyLock.scala:42)&lt;BR /&gt;at com.databricks.backend.daemon.data.filesystem.MountEntryResolver.resolve(MountEntryResolver.scala:79)&lt;BR /&gt;... 74 more&lt;BR /&gt;Caused by: com.databricks.common.client.DatabricksServiceHttpClientException: RESOURCE_DOES_NOT_EXIST: Refresh token not found for userId: Some(3790767436975024)&lt;BR /&gt;at com.databricks.common.client.DatabricksServiceHttpClientException.copy(DBHttpClient.scala:1407)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getResponseBody(DBHttpClient.scala:1253)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.$anonfun$httpRequestInternal$1(DBHttpClient.scala:1199)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperation$1(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.executeThunkAndCaptureResultTags$1(UsageLogging.scala:669)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$recordOperationWithResultTags$4(UsageLogging.scala:687)&lt;BR /&gt;at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:426)&lt;BR /&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;BR /&gt;at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:424)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.withAttributionContext(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)&lt;BR /&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.withAttributionTags(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags(UsageLogging.scala:664)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperationWithResultTags$(UsageLogging.scala:582)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.recordOperationWithResultTags(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation(UsageLogging.scala:573)&lt;BR /&gt;at com.databricks.logging.UsageLogging.recordOperation$(UsageLogging.scala:542)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.recordOperation(DBHttpClient.scala:604)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.httpRequestInternal(DBHttpClient.scala:1185)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.entityEnclosingRequestInternal(DBHttpClient.scala:1174)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getInternal(DBHttpClient.scala:1123)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.get(DBHttpClient.scala:689)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.getWithHeaders(DBHttpClient.scala:717)&lt;BR /&gt;at com.databricks.common.client.RawDBHttpClient.get(DBHttpClient.scala:661)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.$anonfun$refreshToken$2(OAuthTokenRefresherClient.scala:91)&lt;BR /&gt;at com.databricks.common.client.DBHttpClient$.retryWithDeadline(DBHttpClient.scala:376)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.reliably(OAuthTokenRefresherClient.scala:52)&lt;BR /&gt;at com.databricks.backend.daemon.driver.credentials.OAuthTokenRefresherClient.$anonfun$refreshToken$1(OAuthTokenRefresherClient.scala:91)&lt;BR /&gt;at com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4724)&lt;BR /&gt;at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3522)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2315)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2278)&lt;BR /&gt;at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2193)&lt;BR /&gt;... 140 more&lt;BR /&gt;.&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/9"&gt;@Retired_mod&lt;/a&gt; - any help on this issue.&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;If someone facing the same issue let me know how you solved this issue?&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;Thanks in advance&lt;/SPAN&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;SPAN&gt;#Information Design tool #Databricksconnection #Queryexecutionerror&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 15 Jul 2024 16:42:37 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/query-execution-after-establishing-databricks-to-information/m-p/78839#M35613</guid>
      <dc:creator>Magesh2798</dc:creator>
      <dc:date>2024-07-15T16:42:37Z</dc:date>
    </item>
  </channel>
</rss>

