Job is scheduled on interactive cluster, and it failed with below error and in the next scheduled run it ran fine.
I want to why this error occurred and how can I prevent from occurring this again.
How to debug these types of error?
com.databricks.backend.common.rpc.SparkDriverExceptions$ReplFatalException
at com.databricks.backend.daemon.driver.JupyterKernelListener.waitForExecution(JupyterKernelListener.scala:811)
at com.databricks.backend.daemon.driver.JupyterKernelListener.executeCommand(JupyterKernelListener.scala:857)
at com.databricks.backend.daemon.driver.JupyterDriverLocal.executePython(JupyterDriverLocal.scala:578)
at com.databricks.backend.daemon.driver.JupyterDriverLocal.repl(JupyterDriverLocal.scala:535)
at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$24(DriverLocal.scala:879)
at com.databricks.unity.EmptyHandle$.runWith(UCSHandle.scala:124)
at com.databricks.backend.daemon.driver.DriverLocal.$anonfun$execute$21(DriverLocal.scala:862)
at com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:412)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)
at com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:158)
at com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:410)
at com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:407)
at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:69)
at com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:455)
at com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:440)
at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:69)
at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:839)
at com.databricks.backend.daemon.driver.DriverWrapper.$anonfun$tryExecutingCommand$1(DriverWrapper.scala:660)
at scala.util.Try$.apply(Try.scala:213)
at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:652)
at com.databricks.backend.daemon.driver.DriverWrapper.executeCommandAndGetError(DriverWrapper.scala:571)
at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:606)
at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:448)
at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:389)
at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:247)
at java.lang.Thread.run(Thread.java:750)
23/09/02 03:28:46 INFO WorkflowDriver: Workflow run exited with error
com.databricks.NotebookExecutionException: FAILED
at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:147)
at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:94)
at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:130)
at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:92)
at sun.reflect.GeneratedMethodAccessor810.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:750)
23/09/02 03:28:47 WARN JupyterDriverLocal: User code returned error with traceback: [0;31m---------------------------------------------------------------------------[0m
[0;31mPy4JJavaError[0m Traceback (most recent call last)
File [0;32m<command-92643869439321>:4[0m
[1;32m 2[0m Cutoff_time[38;5;241m=[39mdatetime[38;5;241m.[39mnow()
[1;32m 3[0m [38;5;28;01mwhile[39;00m end_time1[38;5;241m<[39m[38;5;241m=[39m Cutoff_time:
[0;32m----> 4[0m status[38;5;241m=[39mdbutils[38;5;241m.[39mnotebook[38;5;241m.[39mrun([38;5;124m"[39m[38;5;124m/Repos/Master/ADB/OKC/Cov/02-okc-silver-5min-agg-batch[39m[38;5;124m"[39m,[38;5;241m1000[39m, {[38;5;124m'[39m[38;5;124mStart time[39m[38;5;124m'[39m:start_time,[38;5;124m'[39m[38;5;124mEnd time[39m[38;5;124m'[39m:end_time})
[1;32m 5[0m start_time[38;5;241m=[39m dt[38;5;241m.[39mstrptime(start_time,[38;5;124m'[39m[38;5;124m%[39m[38;5;124mY-[39m[38;5;124m%[39m[38;5;124mm-[39m[38;5;132;01m%d[39;00m[38;5;124m [39m[38;5;124m%[39m[38;5;124mH:[39m[38;5;124m%[39m[38;5;124mM:[39m[38;5;124m%[39m[38;5;124mS[39m[38;5;124m'[39m)[38;5;241m+[39mtimedelta(minutes[38;5;241m=[39m[38;5;241m5[39m)
[1;32m 6[0m start_time[38;5;241m=[39mstart_time[38;5;241m.[39mstrftime([38;5;124m"[39m[38;5;124m%[39m[38;5;124mY-[39m[38;5;124m%[39m[38;5;124mm-[39m[38;5;132;01m%d[39;00m[38;5;124m [39m[38;5;124m%[39m[38;5;124mH:[39m[38;5;124m%[39m[38;5;124mM:[39m[38;5;124m%[39m[38;5;124mS[39m[38;5;124m"[39m)
File [0;32m/databricks/python_shell/dbruntime/dbutils.py:204[0m, in [0;36mDBUtils.NotebookHandler.run[0;34m(self, path, timeout_seconds, arguments, _NotebookHandler__databricks_internal_cluster_spec)[0m
[1;32m 203[0m [38;5;28;01mdef[39;00m [38;5;21mrun[39m([38;5;28mself[39m, path, timeout_seconds, arguments[38;5;241m=[39m{}, __databricks_internal_cluster_spec[38;5;241m=[39m[38;5;28;01mNone[39;00m):
[0;32m--> 204[0m [38;5;28;01mreturn[39;00m [38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43mentry_point[49m[38;5;241;43m.[39;49m[43mgetDbutils[49m[43m([49m[43m)[49m[38;5;241;43m.[39;49m[43mnotebook[49m[43m([49m[43m)[49m[38;5;241;43m.[39;49m[43m_run[49m[43m([49m
[1;32m 205[0m [43m [49m[43mpath[49m[43m,[49m[43m [49m[43mtimeout_seconds[49m[43m,[49m[43m [49m[43marguments[49m[43m,[49m[43m [49m[43m__databricks_internal_cluster_spec[49m[43m,[49m
[1;32m 206[0m [43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43mentry_point[49m[38;5;241;43m.[39;49m[43mgetJobGroupId[49m[43m([49m[43m)[49m[43m)[49m
File [0;32m/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py:1321[0m, in [0;36mJavaMember.__call__[0;34m(self, *args)[0m
[1;32m 1315[0m command [38;5;241m=[39m proto[38;5;241m.[39mCALL_COMMAND_NAME [38;5;241m+[39m\
[1;32m 1316[0m [38;5;28mself[39m[38;5;241m.[39mcommand_header [38;5;241m+[39m\
[1;32m 1317[0m args_command [38;5;241m+[39m\
[1;32m 1318[0m proto[38;5;241m.[39mEND_COMMAND_PART
[1;32m 1320[0m answer [38;5;241m=[39m [38;5;28mself[39m[38;5;241m.[39mgateway_client[38;5;241m.[39msend_command(command)
[0;32m-> 1321[0m return_value [38;5;241m=[39m [43mget_return_value[49m[43m([49m
[1;32m 1322[0m [43m [49m[43manswer[49m[43m,[49m[43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43mgateway_client[49m[43m,[49m[43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43mtarget_id[49m[43m,[49m[43m [49m[38;5;28;43mself[39;49m[38;5;241;43m.[39;49m[43mname[49m[43m)[49m
[1;32m 1324[0m [38;5;28;01mfor[39;00m temp_arg [38;5;129;01min[39;00m temp_args:
[1;32m 1325[0m temp_arg[38;5;241m.[39m_detach()
File [0;32m/databricks/spark/python/pyspark/errors/exceptions.py:228[0m, in [0;36mcapture_sql_exception.<locals>.deco[0;34m(*a, **kw)[0m
[1;32m 226[0m [38;5;28;01mdef[39;00m [38;5;21mdeco[39m([38;5;241m*[39ma: Any, [38;5;241m*[39m[38;5;241m*[39mkw: Any) [38;5;241m-[39m[38;5;241m>[39m Any:
[1;32m 227[0m [38;5;28;01mtry[39;00m:
[0;32m--> 228[0m [38;5;28;01mreturn[39;00m [43mf[49m[43m([49m[38;5;241;43m*[39;49m[43ma[49m[43m,[49m[43m [49m[38;5;241;43m*[39;49m[38;5;241;43m*[39;49m[43mkw[49m[43m)[49m
[1;32m 229[0m [38;5;28;01mexcept[39;00m Py4JJavaError [38;5;28;01mas[39;00m e:
[1;32m 230[0m converted [38;5;241m=[39m convert_exception(e[38;5;241m.[39mjava_exception)
File [0;32m/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py:326[0m, in [0;36mget_return_value[0;34m(answer, gateway_client, target_id, name)[0m
[1;32m 324[0m value [38;5;241m=[39m OUTPUT_CONVERTER[[38;5;28mtype[39m](answer[[38;5;241m2[39m:], gateway_client)
[1;32m 325[0m [38;5;28;01mif[39;00m answer[[38;5;241m1[39m] [38;5;241m==[39m REFERENCE_TYPE:
[0;32m--> 326[0m [38;5;28;01mraise[39;00m Py4JJavaError(
[1;32m 327[0m [38;5;124m"[39m[38;5;124mAn error occurred while calling [39m[38;5;132;01m{0}[39;00m[38;5;132;01m{1}[39;00m[38;5;132;01m{2}[39;00m[38;5;124m.[39m[38;5;130;01m\n[39;00m[38;5;124m"[39m[38;5;241m.[39m
[1;32m 328[0m [38;5;28mformat[39m(target_id, [38;5;124m"[39m[38;5;124m.[39m[38;5;124m"[39m, name), value)
[1;32m 329[0m [38;5;28;01melse[39;00m:
[1;32m 330[0m [38;5;28;01mraise[39;00m Py4JError(
[1;32m 331[0m [38;5;124m"[39m[38;5;124mAn error occurred while calling [39m[38;5;132;01m{0}[39;00m[38;5;132;01m{1}[39;00m[38;5;132;01m{2}[39;00m[38;5;124m. Trace:[39m[38;5;130;01m\n[39;00m[38;5;132;01m{3}[39;00m[38;5;130;01m\n[39;00m[38;5;124m"[39m[38;5;241m.[39m
[1;32m 332[0m [38;5;28mformat[39m(target_id, [38;5;124m"[39m[38;5;124m.[39m[38;5;124m"[39m, name, value))
[0;31mPy4JJavaError[0m: An error occurred while calling o445._run.
: com.databricks.WorkflowException: com.databricks.NotebookExecutionException: FAILED
at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:99)
at com.databricks.dbutils_v1.impl.NotebookUtilsImpl.run(NotebookUtilsImpl.scala:130)
at com.databricks.dbutils_v1.impl.NotebookUtilsImpl._run(NotebookUtilsImpl.scala:92)
at sun.reflect.GeneratedMethodAccessor810.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:750)
Caused by: com.databricks.NotebookExecutionException: FAILED
at com.databricks.workflow.WorkflowDriver.run0(WorkflowDriver.scala:147)
at com.databricks.workflow.WorkflowDriver.run(WorkflowDriver.scala:94)
... 13 more