<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Reading Athen table created on top of s3 in databricks in Data Governance</title>
    <link>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12690#M490</link>
    <description>&lt;P&gt;HI,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;we have databricks that use aws glue catalog as metastore, I am trying to read athena table which is created on top s3, I am getting following error&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;com.databricks.backend.common.rpc.SparkDriverExceptions$SQLExecutionException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.openx.data.jsonserde.JsonSerDe&lt;/P&gt;</description>
    <pubDate>Wed, 11 Jan 2023 09:33:20 GMT</pubDate>
    <dc:creator>HemanthRatakond</dc:creator>
    <dc:date>2023-01-11T09:33:20Z</dc:date>
    <item>
      <title>Reading Athen table created on top of s3 in databricks</title>
      <link>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12690#M490</link>
      <description>&lt;P&gt;HI,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;we have databricks that use aws glue catalog as metastore, I am trying to read athena table which is created on top s3, I am getting following error&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;com.databricks.backend.common.rpc.SparkDriverExceptions$SQLExecutionException: java.lang.RuntimeException: java.lang.ClassNotFoundException: org.openx.data.jsonserde.JsonSerDe&lt;/P&gt;</description>
      <pubDate>Wed, 11 Jan 2023 09:33:20 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12690#M490</guid>
      <dc:creator>HemanthRatakond</dc:creator>
      <dc:date>2023-01-11T09:33:20Z</dc:date>
    </item>
    <item>
      <title>Re: Reading Athen table created on top of s3 in databricks</title>
      <link>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12691#M491</link>
      <description>&lt;P&gt;@Hemanth Ratakonda​&amp;nbsp;Can you paste full error message? It looks like the one you've pasted is cut in half.&lt;/P&gt;</description>
      <pubDate>Wed, 11 Jan 2023 09:42:31 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12691#M491</guid>
      <dc:creator>daniel_sahal</dc:creator>
      <dc:date>2023-01-11T09:42:31Z</dc:date>
    </item>
    <item>
      <title>Re: Reading Athen table created on top of s3 in databricks</title>
      <link>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12692#M492</link>
      <description>&lt;P&gt;@Daniel Sahal​&amp;nbsp;&lt;/P&gt;&lt;P&gt;at org.apache.hadoop.hive.ql.plan.TableDesc.getDeserializerClass(TableDesc.java:79)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.addColumnMetadataToConf(HiveTableScanExec.scala:127)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopConf$lzycompute(HiveTableScanExec.scala:104)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopConf(HiveTableScanExec.scala:101)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader$lzycompute(HiveTableScanExec.scala:113)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.hadoopReader(HiveTableScanExec.scala:108)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.$anonfun$doExecute$2(HiveTableScanExec.scala:214)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.util.Utils$.withDummyCallSite(Utils.scala:2952)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.hive.execution.HiveTableScanExec.doExecute(HiveTableScanExec.scala:214)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:232)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:276)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:272)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:228)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:121)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.collect.Collector$.collect(Collector.scala:133)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:120)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:108)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.InternalRowFormat$.collect(cachedSparkResults.scala:90)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.$anonfun$computeResult$1(ResultCacheManager.scala:528)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.collectResult$1(ResultCacheManager.scala:520)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.computeResult(ResultCacheManager.scala:540)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.$anonfun$getOrComputeResultInternal$1(ResultCacheManager.scala:395)&lt;/P&gt;&lt;P&gt;	at scala.Option.getOrElse(Option.scala:189)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.getOrComputeResultInternal(ResultCacheManager.scala:388)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.qrc.ResultCacheManager.getOrComputeResult(ResultCacheManager.scala:286)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.$anonfun$executeCollectResult$1(SparkPlan.scala:438)&lt;/P&gt;&lt;P&gt;	at com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SparkPlan.executeCollectResult(SparkPlan.scala:435)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.collectResult(Dataset.scala:3471)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.$anonfun$collectResult$1(Dataset.scala:3462)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.$anonfun$withAction$3(Dataset.scala:4344)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:789)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4342)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:245)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:414)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:190)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1003)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:144)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:364)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.withAction(Dataset.scala:4342)&lt;/P&gt;&lt;P&gt;	at org.apache.spark.sql.Dataset.collectResult(Dataset.scala:3461)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation0(OutputAggregator.scala:267)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation(OutputAggregator.scala:101)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:115)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$23(DriverLocal.scala:731)&lt;/P&gt;&lt;P&gt;	at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:103)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$20(DriverLocal.scala:714)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:401)&lt;/P&gt;&lt;P&gt;	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:399)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:396)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:64)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:444)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:429)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:64)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:691)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:622)&lt;/P&gt;&lt;P&gt;	at scala.util.Try$.apply(Try.scala:213)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:614)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:533)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:568)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:438)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:232)&lt;/P&gt;&lt;P&gt;	at java.lang.Thread.run(Thread.java:750)&lt;/P&gt;&lt;P&gt;Caused by: java.lang.ClassNotFoundException: org.openx.data.jsonserde.JsonSerDe&lt;/P&gt;&lt;P&gt;	at java.net.URLClassLoader.findClass(URLClassLoader.java:387)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:419)&lt;/P&gt;&lt;P&gt;	at java.lang.ClassLoader.loadClass(ClassLoader.java:352)&lt;/P&gt;&lt;P&gt;	at java.lang.Class.forName0(Native Method)&lt;/P&gt;&lt;P&gt;	at java.lang.Class.forName(Class.java:348)&lt;/P&gt;&lt;P&gt;	at org.apache.hadoop.hive.ql.plan.TableDesc.getDeserializerClass(TableDesc.java:76)&lt;/P&gt;&lt;P&gt;	... 68 more&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.SQLDriverLocal.executeSql(SQLDriverLocal.scala:130)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.SQLDriverLocal.repl(SQLDriverLocal.scala:145)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$23(DriverLocal.scala:731)&lt;/P&gt;&lt;P&gt;	at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:103)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$20(DriverLocal.scala:714)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:401)&lt;/P&gt;&lt;P&gt;	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:399)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:396)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:64)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:444)&lt;/P&gt;&lt;P&gt;	at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:429)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:64)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:691)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:622)&lt;/P&gt;&lt;P&gt;	at scala.util.Try$.apply(Try.scala:213)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:614)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:533)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:568)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:438)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:381)&lt;/P&gt;&lt;P&gt;	at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:232)&lt;/P&gt;&lt;P&gt;	at java.lang.Thread.run(Thread.java:750)&lt;/P&gt;</description>
      <pubDate>Wed, 11 Jan 2023 09:45:14 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-governance/reading-athen-table-created-on-top-of-s3-in-databricks/m-p/12692#M492</guid>
      <dc:creator>HemanthRatakond</dc:creator>
      <dc:date>2023-01-11T09:45:14Z</dc:date>
    </item>
  </channel>
</rss>

