I am doing a structure streaming and getting this error on databricks, the source table already have 2 versions(0,1). It is still not able to find
Query {'_id': UUID('fe7a563e-f487-4d0e-beb0-efe794ab4708'), '_runId': UUID('bf0e94b5-b6ce-42bb-9bc7-15725f16ba1a'), '_exception': 'com.databricks.sql.transaction.tahoe.DeltaIllegalStateException: [DELTA_INVALID_SOURCE_VERSION] sourceVersion(0) is invalid\n\tat com.databricks.sql.transaction.tahoe.DeltaErrorsBase.invalidSourceVersion(DeltaErrors.scala:2491)\n\tat com.databricks.sql.transaction.tahoe.DeltaErrorsBase.invalidSourceVersion$(DeltaErrors.scala:2490)\n\tat com.databricks.sql.transaction.tahoe.DeltaErrors$.invalidSourceVersion(DeltaErrors.scala:3323)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSourceOffset$Deserializer.deserialize(DeltaSourceOffset.scala:237)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSourceOffset$Deserializer.deserialize(DeltaSourceOffset.scala:222)\n\tat com.fasterxml.jackson.databind.deser.DefaultDeserializationContext.readRootValue(DefaultDeserializationContext.java:323)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4825)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3772)\n\tat com.fasterxml.jackson.module.scala.ScalaObjectMapper.readValue(ScalaObjectMapper.scala:211)\n\tat com.fasterxml.jackson.module.scala.ScalaObjectMapper.readValue$(ScalaObjectMapper.scala:210)\n\tat com.databricks.sql.transaction.tahoe.util.JsonUtils$$anon$1.readValue(JsonUtils.scala:31)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSourceOffset$.apply(DeltaSourceOffset.scala:165)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSource.toDeltaSourceOffset(DeltaSource.scala:951)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSource.$anonfun$init$2(DeltaSource.scala:1475)\n\tat scala.Option.map(Option.scala:230)\n\tat com.databricks.sql.transaction.tahoe.sources.DeltaSource.init(DeltaSource.scala:1475)\n\tat org.apache.spark.sql.execution.streaming.SourceInitializationHandler$.$anonfun$initSources$4(SupportsSourceInitialization.scala:81)\n\tat org.apache.spark.sql.execution.streaming.SourceInitializationHandler$.$anonfun$initSources$4$adapted(SupportsSourceInitialization.scala:60)\n\tat scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)\n\tat scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)\n\tat scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)\n\tat org.apache.spark.sql.execution.streaming.SourceInitializationHandler$.initSources(SupportsSourceInitialization.scala:60)\n\tat org.apache.spark.sql.execution.streaming.MicroBatchExecution.initializeExecution(MicroBatchExecution.scala:489)\n\tat org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStreamWithListener(MicroBatchExecution.scala:642)\n\tat org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:432)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$2(StreamExecution.scala:450)\n\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:1175)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:398)\n\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n\tat com.databricks.logging.UsageLogging.$anonfun$withAttributionContext$1(UsageLogging.scala:426)\n\tat scala.util.DynamicVariable.withValue(DynamicVariable.scala:62)\n\tat com.databricks.logging.AttributionContext$.withValue(AttributionContext.scala:216)\n\tat com.databricks.logging.UsageLogging.withAttributionContext(UsageLogging.scala:424)\n\tat com.databricks.logging.UsageLogging.withAttributionContext$(UsageLogging.scala:418)\n\tat com.databricks.spark.util.PublicDBLogging.withAttributionContext(DatabricksSparkUsageLogger.scala:27)\n\tat com.databricks.logging.UsageLogging.withAttributionTags(UsageLogging.scala:472)\n\tat com.databricks.logging.UsageLogging.withAttributionTags$(UsageLogging.scala:455)\n\tat com.databricks.spark.util.PublicDBLogging.withAttributionTags(DatabricksSparkUsageLogger.scala:27)\n\tat com.databricks.spark.util.PublicDBLogging.withAttributionTags0(DatabricksSparkUsageLogger.scala:72)\n\tat com.databricks.spark.util.DatabricksSparkUsageLogger.withAttributionTags(DatabricksSparkUsageLogger.scala:172)\n\tat com.databricks.spark.util.UsageLogging.$anonfun$withAttributionTags$1(UsageLogger.scala:491)\n\tat com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:603)\n\tat com.databricks.spark.util.UsageLogging$.withAttributionTags(UsageLogger.scala:612)\n\tat com.databricks.spark.util.UsageLogging.withAttributionTags(UsageLogger.scala:491)\n\tat com.databricks.spark.util.UsageLogging.withAttributionTags$(UsageLogger.scala:489)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution.withAttributionTags(StreamExecution.scala:84)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:378)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$3(StreamExecution.scala:282)\n\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n\tat org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:97)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$2(StreamExecution.scala:282)\n\tat scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)\n\tat com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:45)\n\tat com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:103)\n\tat com.databricks.unity.HandleImpl.$anonfun$runWithAndClose$1(UCSHandle.scala:108)\n\tat scala.util.Using$.resource(Using.scala:269)\n\tat com.databricks.unity.HandleImpl.runWithAndClose(UCSHandle.scala:107)\n\tat org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:281)\n', '_errorClassOnException': 'DELTA_INVALID_SOURCE_VERSION'}
can someone help me on this.
#Structurestreaming