00007160: 2023-01-30T14:22:06 [TARGET_LOAD ]E: Failed (retcode -1) to execute statement: 'COPY INTO `e2underwriting_dbo`.`product` FROM(SELECT cast(_c0 as INT) as `ProductID`, _c1 as `ShortName`, cast(_c2 as INT) as `Status`, cast(_c3 as TIMESTAMP) as `StatusDate`, cast(_c4 as INT) as `ProductComponentID`, cast(_c5 as INT) as `ProductFamilyPAndCID`, cast(_c6 as BOOLEAN) as `IsReinsurance`, cast(_c7 as BOOLEAN) as `IsNonAdmitted`, _c8 as `BinderSectionTemplate`, _c9 as `QuoteSectionTemplate`, cast(_c10 as INT) as `SupercededID`, cast(_c11 as BOOLEAN) as `IsTemplate`, cast(_c12 as BOOLEAN) as `IsAdmitted`, cast(_c13 as BOOLEAN) as `IsSurplus`, cast(_c14 as BOOLEAN) as `IsExcess`, _c15 as `ASLOB`, _c16 as `LockedBy`, _c17 as `MaxUID`, cast(_c18 as BOOLEAN) as `IsCover`, cast(_c19 as INT) as `RiskTypeID`, _c20 as `UpdatedBy`, cast(_c21 as TIMESTAMP) as `UpdatedOn`, _c22 as `ProductXML`, cast(_c23 as BOOLEAN) as `IsReportable`, cast(_c24 as INT) as `ProductTemplateID`, cast(_c25 as INT) as `ProductOrder`, cast(_c26 as BOOLEAN) as `IsObsolete` from 'abfss://<filesystem name>.dfs.core.windows.net')
FILEFORMAT = CSV FILES = ('//delta_test/1/LOAD00000001.csv.gz') FORMAT_OPTIONS('nullValue' = 'attrep_null', 'multiLine'='true') COPY_OPTIONS('force' = 'true')' [1022502] (ar_odbc_stmt.c:4985)
RetCode: SQL_ERROR SqlState: HY000 NativeError: 35 Message: [Simba][Hardy] (35) Error from server: error code: '0' error message: 'org.apache.hive.service.cli.HiveSQLException: Error running query: com.databricks.sql.managedcatalog.UnityCatalogServiceException: [RequestId=25b8c822-3052-46b6-90b0-d1945b0df14d ErrorClass=INVALID_PARAMETER_VALUE] Input path <file system name>.dfs.core.windows.net overlaps with other external tables
at org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:48)
at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.$anonfun$execute$1(SparkExecuteStatementOperation.scala:498)
at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
at com.databricks.unity.UCSEphemeralState$Handle.runWith(UCSEphemeralState.scala:41)
at com.databricks.unity.HandleImpl.runWith(UCSHandle.scala:99)
at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOp