<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Got Failure:   com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException  error in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/43904#M27573</link>
    <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/44632"&gt;@JKR&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Could you try setting the configurations below at the cluster level and retry the job?&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;spark.databricks.python.defaultPythonRepl pythonshell&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;spark.databricks.pyspark.py4j.pinnedThread false&lt;/SPAN&gt;&lt;/P&gt;</description>
    <pubDate>Thu, 07 Sep 2023 05:19:18 GMT</pubDate>
    <dc:creator>Tharun-Kumar</dc:creator>
    <dc:date>2023-09-07T05:19:18Z</dc:date>
    <item>
      <title>Got Failure:   com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException  error</title>
      <link>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/43140#M27494</link>
      <description>&lt;P&gt;Job is scheduled on interactive cluster, and it failed with below error and in the next scheduled run it ran fine.&amp;nbsp;&lt;BR /&gt;I want to why this error occurred and how can I prevent from occurring this again.&lt;BR /&gt;&lt;BR /&gt;How to debug these types of error?&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;DIV&gt;com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.JupyterKernelListener.waitForExecution(JupyterKernelListener.scala:811)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.JupyterKernelListener.executeCommand(JupyterKernelListener.scala:857)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.JupyterDriverLocal.executePython(JupyterDriverLocal.scala:578)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.JupyterDriverLocal.repl(JupyterDriverLocal.scala:535)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$24(DriverLocal.scala:879)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$21(DriverLocal.scala:862)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:412)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:410)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:407)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:69)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:455)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:440)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:69)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:839)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:660)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at scala.util.Try$.apply(Try.scala:213)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:652)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:571)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:606)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:448)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:389)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:247)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at java.lang.Thread.run(Thread.java:750)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;23/09/02 03:28:46 INFO WorkflowDriver: Workflow run exited with error&lt;/DIV&gt;&lt;DIV&gt;com.databricks.NotebookExecutionException: FAILED&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:147)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:94)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:130)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:92)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at sun.reflect.GeneratedMethodAccessor810.invoke(Unknown Source)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.Gateway.invoke(Gateway.java:306)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.commands.CallCommand.execute(CallCommand.java:79)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.ClientServerConnection.run(ClientServerConnection.java:115)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at java.lang.Thread.run(Thread.java:750)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;23/09/02 03:28:47 WARN JupyterDriverLocal: User code returned error with traceback: &amp;#27;[0;31m---------------------------------------------------------------------------&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;31mPy4JJavaError&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;Traceback (most recent call last)&lt;/DIV&gt;&lt;DIV&gt;File &amp;#27;[0;32m&amp;lt;command-92643869439321&amp;gt;:4&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; &amp;nbsp; 2&amp;#27;[0m Cutoff_time&amp;#27;[38;5;241m=&amp;#27;[39mdatetime&amp;#27;[38;5;241m.&amp;#27;[39mnow()&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; &amp;nbsp; 3&amp;#27;[0m &amp;#27;[38;5;28;01mwhile&amp;#27;[39;00m&amp;nbsp; end_time1&amp;#27;[38;5;241m&amp;lt;&amp;#27;[39m&amp;#27;[38;5;241m=&amp;#27;[39m Cutoff_time:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;32m----&amp;gt; 4&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; status&amp;#27;[38;5;241m=&amp;#27;[39mdbutils&amp;#27;[38;5;241m.&amp;#27;[39mnotebook&amp;#27;[38;5;241m.&amp;#27;[39mrun(&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124m/Repos/Master/ADB/OKC/Cov/02-okc-silver-5min-agg-batch&amp;#27;[39m&amp;#27;[38;5;124m"&amp;#27;[39m,&amp;#27;[38;5;241m1000&amp;#27;[39m, {&amp;#27;[38;5;124m'&amp;#27;[39m&amp;#27;[38;5;124mStart time&amp;#27;[39m&amp;#27;[38;5;124m'&amp;#27;[39m:start_time,&amp;#27;[38;5;124m'&amp;#27;[39m&amp;#27;[38;5;124mEnd time&amp;#27;[39m&amp;#27;[38;5;124m'&amp;#27;[39m:end_time})&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; &amp;nbsp; 5&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; start_time&amp;#27;[38;5;241m=&amp;#27;[39m dt&amp;#27;[38;5;241m.&amp;#27;[39mstrptime(start_time,&amp;#27;[38;5;124m'&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mY-&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mm-&amp;#27;[39m&amp;#27;[38;5;132;01m%d&amp;#27;[39;00m&amp;#27;[38;5;124m &amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mH:&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mM:&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mS&amp;#27;[39m&amp;#27;[38;5;124m'&amp;#27;[39m)&amp;#27;[38;5;241m+&amp;#27;[39mtimedelta(minutes&amp;#27;[38;5;241m=&amp;#27;[39m&amp;#27;[38;5;241m5&amp;#27;[39m)&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; &amp;nbsp; 6&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; start_time&amp;#27;[38;5;241m=&amp;#27;[39mstart_time&amp;#27;[38;5;241m.&amp;#27;[39mstrftime(&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mY-&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mm-&amp;#27;[39m&amp;#27;[38;5;132;01m%d&amp;#27;[39;00m&amp;#27;[38;5;124m &amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mH:&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mM:&amp;#27;[39m&amp;#27;[38;5;124m%&amp;#27;[39m&amp;#27;[38;5;124mS&amp;#27;[39m&amp;#27;[38;5;124m"&amp;#27;[39m)&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;File &amp;#27;[0;32m/databricks/python_shell/dbruntime/dbutils.py:204&amp;#27;[0m, in &amp;#27;[0;36mDBUtils.NotebookHandler.run&amp;#27;[0;34m(self, path, timeout_seconds, arguments, _NotebookHandler__databricks_internal_cluster_spec)&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 203&amp;#27;[0m &amp;#27;[38;5;28;01mdef&amp;#27;[39;00m &amp;#27;[38;5;21mrun&amp;#27;[39m(&amp;#27;[38;5;28mself&amp;#27;[39m, path, timeout_seconds, arguments&amp;#27;[38;5;241m=&amp;#27;[39m{}, __databricks_internal_cluster_spec&amp;#27;[38;5;241m=&amp;#27;[39m&amp;#27;[38;5;28;01mNone&amp;#27;[39;00m):&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;32m--&amp;gt; 204&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mreturn&amp;#27;[39;00m &amp;#27;[38;5;28;43mself&amp;#27;[39;49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mentry_point&amp;#27;[49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mgetDbutils&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mnotebook&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43m_run&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 205&amp;#27;[0m &amp;#27;[43m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;#27;[49m&amp;#27;[43mpath&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[43mtimeout_seconds&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[43marguments&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[43m__databricks_internal_cluster_spec&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 206&amp;#27;[0m &amp;#27;[43m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;#27;[49m&amp;#27;[38;5;28;43mself&amp;#27;[39;49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mentry_point&amp;#27;[49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mgetJobGroupId&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;File &amp;#27;[0;32m/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py:1321&amp;#27;[0m, in &amp;#27;[0;36mJavaMember.__call__&amp;#27;[0;34m(self, *args)&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1315&amp;#27;[0m command &amp;#27;[38;5;241m=&amp;#27;[39m proto&amp;#27;[38;5;241m.&amp;#27;[39mCALL_COMMAND_NAME &amp;#27;[38;5;241m+&amp;#27;[39m\&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1316&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28mself&amp;#27;[39m&amp;#27;[38;5;241m.&amp;#27;[39mcommand_header &amp;#27;[38;5;241m+&amp;#27;[39m\&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1317&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;args_command &amp;#27;[38;5;241m+&amp;#27;[39m\&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1318&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;proto&amp;#27;[38;5;241m.&amp;#27;[39mEND_COMMAND_PART&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1320&amp;#27;[0m answer &amp;#27;[38;5;241m=&amp;#27;[39m &amp;#27;[38;5;28mself&amp;#27;[39m&amp;#27;[38;5;241m.&amp;#27;[39mgateway_client&amp;#27;[38;5;241m.&amp;#27;[39msend_command(command)&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;32m-&amp;gt; 1321&amp;#27;[0m return_value &amp;#27;[38;5;241m=&amp;#27;[39m &amp;#27;[43mget_return_value&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1322&amp;#27;[0m &amp;#27;[43m&amp;nbsp; &amp;nbsp; &amp;#27;[49m&amp;#27;[43manswer&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[38;5;28;43mself&amp;#27;[39;49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mgateway_client&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[38;5;28;43mself&amp;#27;[39;49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mtarget_id&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[38;5;28;43mself&amp;#27;[39;49m&amp;#27;[38;5;241;43m.&amp;#27;[39;49m&amp;#27;[43mname&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1324&amp;#27;[0m &amp;#27;[38;5;28;01mfor&amp;#27;[39;00m temp_arg &amp;#27;[38;5;129;01min&amp;#27;[39;00m temp_args:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp;1325&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;temp_arg&amp;#27;[38;5;241m.&amp;#27;[39m_detach()&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;File &amp;#27;[0;32m/databricks/spark/python/pyspark/errors/exceptions.py:228&amp;#27;[0m, in &amp;#27;[0;36mcapture_sql_exception.&amp;lt;locals&amp;gt;.deco&amp;#27;[0;34m(*a, **kw)&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 226&amp;#27;[0m &amp;#27;[38;5;28;01mdef&amp;#27;[39;00m &amp;#27;[38;5;21mdeco&amp;#27;[39m(&amp;#27;[38;5;241m*&amp;#27;[39ma: Any, &amp;#27;[38;5;241m*&amp;#27;[39m&amp;#27;[38;5;241m*&amp;#27;[39mkw: Any) &amp;#27;[38;5;241m-&amp;#27;[39m&amp;#27;[38;5;241m&amp;gt;&amp;#27;[39m Any:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 227&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mtry&amp;#27;[39;00m:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;32m--&amp;gt; 228&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mreturn&amp;#27;[39;00m &amp;#27;[43mf&amp;#27;[49m&amp;#27;[43m(&amp;#27;[49m&amp;#27;[38;5;241;43m*&amp;#27;[39;49m&amp;#27;[43ma&amp;#27;[49m&amp;#27;[43m,&amp;#27;[49m&amp;#27;[43m &amp;#27;[49m&amp;#27;[38;5;241;43m*&amp;#27;[39;49m&amp;#27;[38;5;241;43m*&amp;#27;[39;49m&amp;#27;[43mkw&amp;#27;[49m&amp;#27;[43m)&amp;#27;[49m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 229&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mexcept&amp;#27;[39;00m Py4JJavaError &amp;#27;[38;5;28;01mas&amp;#27;[39;00m e:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 230&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;converted &amp;#27;[38;5;241m=&amp;#27;[39m convert_exception(e&amp;#27;[38;5;241m.&amp;#27;[39mjava_exception)&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;File &amp;#27;[0;32m/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py:326&amp;#27;[0m, in &amp;#27;[0;36mget_return_value&amp;#27;[0;34m(answer, gateway_client, target_id, name)&amp;#27;[0m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 324&amp;#27;[0m value &amp;#27;[38;5;241m=&amp;#27;[39m OUTPUT_CONVERTER[&amp;#27;[38;5;28mtype&amp;#27;[39m](answer[&amp;#27;[38;5;241m2&amp;#27;[39m:], gateway_client)&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 325&amp;#27;[0m &amp;#27;[38;5;28;01mif&amp;#27;[39;00m answer[&amp;#27;[38;5;241m1&amp;#27;[39m] &amp;#27;[38;5;241m==&amp;#27;[39m REFERENCE_TYPE:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;32m--&amp;gt; 326&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mraise&amp;#27;[39;00m Py4JJavaError(&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 327&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124mAn error occurred while calling &amp;#27;[39m&amp;#27;[38;5;132;01m{0}&amp;#27;[39;00m&amp;#27;[38;5;132;01m{1}&amp;#27;[39;00m&amp;#27;[38;5;132;01m{2}&amp;#27;[39;00m&amp;#27;[38;5;124m.&amp;#27;[39m&amp;#27;[38;5;130;01m\n&amp;#27;[39;00m&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;241m.&amp;#27;[39m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 328&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28mformat&amp;#27;[39m(target_id, &amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124m.&amp;#27;[39m&amp;#27;[38;5;124m"&amp;#27;[39m, name), value)&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 329&amp;#27;[0m &amp;#27;[38;5;28;01melse&amp;#27;[39;00m:&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 330&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28;01mraise&amp;#27;[39;00m Py4JError(&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 331&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124mAn error occurred while calling &amp;#27;[39m&amp;#27;[38;5;132;01m{0}&amp;#27;[39;00m&amp;#27;[38;5;132;01m{1}&amp;#27;[39;00m&amp;#27;[38;5;132;01m{2}&amp;#27;[39;00m&amp;#27;[38;5;124m. Trace:&amp;#27;[39m&amp;#27;[38;5;130;01m\n&amp;#27;[39;00m&amp;#27;[38;5;132;01m{3}&amp;#27;[39;00m&amp;#27;[38;5;130;01m\n&amp;#27;[39;00m&amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;241m.&amp;#27;[39m&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[1;32m&amp;nbsp; &amp;nbsp; 332&amp;#27;[0m&amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp; &amp;nbsp;&amp;#27;[38;5;28mformat&amp;#27;[39m(target_id, &amp;#27;[38;5;124m"&amp;#27;[39m&amp;#27;[38;5;124m.&amp;#27;[39m&amp;#27;[38;5;124m"&amp;#27;[39m, name, value))&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&amp;#27;[0;31mPy4JJavaError&amp;#27;[0m: An error occurred while calling o445._run.&lt;/DIV&gt;&lt;DIV&gt;: com.databricks.WorkflowException: com.databricks.NotebookExecutionException: FAILED&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:99)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:130)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:92)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at sun.reflect.GeneratedMethodAccessor810.invoke(Unknown Source)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at java.lang.reflect.Method.invoke(Method.java:498)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.Gateway.invoke(Gateway.java:306)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.commands.CallCommand.execute(CallCommand.java:79)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at py4j.ClientServerConnection.run(ClientServerConnection.java:115)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at java.lang.Thread.run(Thread.java:750)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;Caused by: com.databricks.NotebookExecutionException: FAILED&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:147)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:94)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;... 13 more&lt;/SPAN&gt;&lt;/DIV&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Sat, 02 Sep 2023 04:44:07 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/43140#M27494</guid>
      <dc:creator>JKR</dc:creator>
      <dc:date>2023-09-02T04:44:07Z</dc:date>
    </item>
    <item>
      <title>Re: Got Failure:   com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException  error</title>
      <link>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/43904#M27573</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/44632"&gt;@JKR&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Could you try setting the configurations below at the cluster level and retry the job?&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;spark.databricks.python.defaultPythonRepl pythonshell&lt;/SPAN&gt;&lt;BR /&gt;&lt;SPAN&gt;spark.databricks.pyspark.py4j.pinnedThread false&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 07 Sep 2023 05:19:18 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/43904#M27573</guid>
      <dc:creator>Tharun-Kumar</dc:creator>
      <dc:date>2023-09-07T05:19:18Z</dc:date>
    </item>
    <item>
      <title>Re: Got Failure:   com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException  error</title>
      <link>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/53532#M29809</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/39403"&gt;@Tharun-Kumar&lt;/a&gt;&amp;nbsp;My job was scheduled for every 5 mins and the next scheduled job run executed fine and even job runs after this error one mentioned executed fine without adding these configs, so how does adding the above configurations will help me to identify the root cause? I really want to know how I can debug this kind of intermittent issue?&lt;/P&gt;</description>
      <pubDate>Wed, 22 Nov 2023 20:27:00 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/got-failure-com-databricks-backend-common-rpc/m-p/53532#M29809</guid>
      <dc:creator>JKR</dc:creator>
      <dc:date>2023-11-22T20:27:00Z</dc:date>
    </item>
  </channel>
</rss>

