Same issue here and the AI Assistant cannot help...
JVM stacktrace:
org.apache.spark.sql.AnalysisException
at com.databricks.sql.connect.SparkConnectConfig$.assertConfigAllowedForRead(SparkConnectConfig.scala:203)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler$RuntimeConfigWrapper.get(SparkConnectConfigHandler.scala:106)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler.transform(SparkConnectConfigHandler.scala:241)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler.$anonfun$handleGetWithDefault$1(SparkConnectConfigHandler.scala:282)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler.$anonfun$handleGetWithDefault$1$adapted(SparkConnectConfigHandler.scala:280)
at scala.collection.Iterator.foreach(Iterator.scala:943)
at scala.collection.Iterator.foreach$(Iterator.scala:943)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler.handleGetWithDefault(SparkConnectConfigHandler.scala:280)
at org.apache.spark.sql.connect.service.SparkConnectConfigHandler.handle(SparkConnectConfigHandler.scala:199)
at org.apache.spark.sql.connect.service.SparkConnectService.config(SparkConnectService.scala:122)
at org.apache.spark.connect.proto.SparkConnectServiceGrpc$MethodHandlers.invoke(SparkConnectServiceGrpc.java:805)
at grpc_shaded.io.grpc.stub.ServerCalls$UnaryServerCallHandler$UnaryServerCallListener.onHalfClose(ServerCalls.java:182)
at com.databricks.spark.connect.service.AuthenticationInterceptor$AuthenticatedServerCallListener.$anonfun$onHalfClose$1(AuthenticationInterceptor.scala:312)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:45)
at com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:103)
at com.databricks.spark.connect.service.RequestContext.$anonfun$runWith$3(RequestContext.scala:265)
at com.databricks.spark.connect.service.RequestContext$.com$databricks$spark$connect$service$RequestContext$$withLocalProperties(RequestContext.scala:525)
at com.databricks.spark.connect.service.RequestContext.$anonfun$runWith$2(RequestContext.scala:265)
at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:435)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)
at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:433)
at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:427)
at com.databricks.spark.util.PublicDBLogging.withAttributionContext(DatabricksSparkUsageLogger.scala:27)
at com.databricks.spark.util.UniverseAttributionContextWrapper.withValue(AttributionContextUtils.scala:225)
at com.databricks.spark.connect.service.RequestContext.$anonfun$runWith$1(RequestContext.scala:264)
at com.databricks.spark.connect.service.RequestContext.withContext(RequestContext.scala:277)
at com.databricks.spark.connect.service.RequestContext.runWith(RequestContext.scala:257)
at com.databricks.spark.connect.service.AuthenticationInterceptor$AuthenticatedServerCallListener.onHalfClose(AuthenticationInterceptor.scala:312)
at grpc_shaded.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.halfClosed(ServerCallImpl.java:340)
at grpc_shaded.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1HalfClosed.runInContext(ServerImpl.java:866)
at grpc_shaded.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
at grpc_shaded.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:133)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.lang.Thread.run(Thread.java:840)
File <command-2080937168644028>, line 2
1 import pyspark.sql.functions as F
----> 2 from dbacademy import dbgems
3 from dbacademy.dbhelper import DBAcademyHelper, Paths, CourseConfig, LessonConfig
5 # The following attributes are externalized to make them easy
6 # for content developers to update with every new course.
File /databricks/python/lib/python3.10/site-packages/pyspark/sql/connect/client/core.py:1988, in SparkConnectClient._handle_rpc_error(self, rpc_error)
1985 info = error_details_pb2.ErrorInfo()
1986 d.Unpack(info)
-> 1988 raise convert_exception(
1989 info,
1990 status.message,
1991 self._fetch_enriched_error(info),
1992 self._display_server_stack_trace(),
1993 ) from None
1995 raise SparkConnectGrpcException(status.message) from None
1996 else: