<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: AnalysisException : when attempting to save a spark DataFrame as delta table in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37557#M26387</link>
    <description>&lt;P&gt;Use the overwrite schema option:&lt;/P&gt;&lt;LI-CODE lang="python"&gt;(df.write
    .mode('overwrite')
    .format('delta')
    .option('overwriteSchema', 'true')
    .saveAsTable('schema.k_adhoc.df'))&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Thu, 13 Jul 2023 11:37:57 GMT</pubDate>
    <dc:creator>dream</dc:creator>
    <dc:date>2023-07-13T11:37:57Z</dc:date>
    <item>
      <title>AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37122#M26267</link>
      <description>&lt;P&gt;&lt;SPAN class=""&gt;I get an,&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;PRE&gt;&lt;SPAN class=""&gt;`AnalysisException&amp;nbsp;&lt;SPAN&gt;Failed to merge incompatible data types LongType and StringType&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/PRE&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;when attempting to run the below command,&amp;nbsp;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;`df&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;write&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;format&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;delta&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;saveAsTable&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;"schema&lt;/SPAN&gt;&lt;SPAN class=""&gt;.k_adhoc.df&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;mode&lt;/SPAN&gt;&lt;SPAN class=""&gt;=&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;overwrite&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;)`&amp;nbsp;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;I am casting the column before saving:&amp;nbsp;&lt;/P&gt;&lt;P&gt;```&lt;/P&gt;&lt;DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;# Cast datatype&lt;/SPAN&gt;&lt;/DIV&gt;&lt;BR /&gt;&lt;DIV&gt;&lt;SPAN&gt;df = df.withColumn(&lt;/SPAN&gt;&lt;SPAN&gt;"h_id"&lt;/SPAN&gt;&lt;SPAN&gt;, df4[&lt;/SPAN&gt;&lt;SPAN&gt;"h_id"&lt;/SPAN&gt;&lt;SPAN&gt;].cast(StringType()))&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;df.show()&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;```&lt;/SPAN&gt;&lt;/DIV&gt;&lt;/DIV&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;The schema of the `df` dataframe is:&amp;nbsp;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;```&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- id: string (nullable = false) &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- year: integer (nullable = true) &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- month: integer (nullable = true) &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- month_year: string (nullable = true) &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- mm-yyyy: date (nullable = true) &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;|-- h_id: string (nullable = true)&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN&gt;```&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;```&lt;/P&gt;&lt;P&gt;&lt;SPAN class=""&gt;---------------------------------------------------------------------------&lt;/SPAN&gt; &lt;SPAN class=""&gt;AnalysisException&lt;/SPAN&gt;&lt;SPAN&gt; Traceback (most recent call last) File &lt;/SPAN&gt;&lt;SPAN class=""&gt;&amp;lt;&lt;A&gt;command-3186243284832184&lt;/A&gt;&amp;gt;:2&lt;/SPAN&gt; &lt;SPAN&gt;1&lt;/SPAN&gt; &lt;SPAN&gt;# Save Table&lt;/SPAN&gt; &lt;SPAN class=""&gt;----&amp;gt; 2&lt;/SPAN&gt; &lt;SPAN class=""&gt;df&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;write&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;format&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;delta&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;saveAsTable&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;"schema&lt;/SPAN&gt;&lt;SPAN class=""&gt;.k_adhoc.df&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;mode&lt;/SPAN&gt;&lt;SPAN class=""&gt;=&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;overwrite&lt;/SPAN&gt;&lt;SPAN class=""&gt;"&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt;&lt;SPAN&gt; File &lt;/SPAN&gt;&lt;SPAN class=""&gt;/databricks/spark/python/pyspark/instrumentation_utils.py:48&lt;/SPAN&gt;&lt;SPAN&gt;, in &lt;/SPAN&gt;&lt;SPAN class=""&gt;_wrap_function.&amp;lt;locals&amp;gt;.wrapper&lt;/SPAN&gt;&lt;SPAN class=""&gt;(*args, **kwargs)&lt;/SPAN&gt; &lt;SPAN&gt;46&lt;/SPAN&gt;&lt;SPAN&gt; start &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt;&lt;SPAN&gt; time&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;perf_counter() &lt;/SPAN&gt;&lt;SPAN&gt;47&lt;/SPAN&gt; &lt;SPAN class=""&gt;try&lt;/SPAN&gt;&lt;SPAN&gt;: &lt;/SPAN&gt;&lt;SPAN class=""&gt;---&amp;gt; 48&lt;/SPAN&gt;&lt;SPAN&gt; res &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt; &lt;SPAN class=""&gt;func&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;*&lt;/SPAN&gt;&lt;SPAN class=""&gt;args&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;*&lt;/SPAN&gt;&lt;SPAN class=""&gt;*&lt;/SPAN&gt;&lt;SPAN class=""&gt;kwargs&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt; &lt;SPAN&gt;49&lt;/SPAN&gt;&lt;SPAN&gt; logger&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;log_success( &lt;/SPAN&gt;&lt;SPAN&gt;50&lt;/SPAN&gt;&lt;SPAN&gt; module_name, class_name, function_name, time&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;perf_counter() &lt;/SPAN&gt;&lt;SPAN&gt;-&lt;/SPAN&gt;&lt;SPAN&gt; start, signature &lt;/SPAN&gt;&lt;SPAN&gt;51&lt;/SPAN&gt;&lt;SPAN&gt; ) &lt;/SPAN&gt;&lt;SPAN&gt;52&lt;/SPAN&gt; &lt;SPAN class=""&gt;return&lt;/SPAN&gt;&lt;SPAN&gt; res File &lt;/SPAN&gt;&lt;SPAN class=""&gt;/databricks/spark/python/pyspark/sql/readwriter.py:1520&lt;/SPAN&gt;&lt;SPAN&gt;, in &lt;/SPAN&gt;&lt;SPAN class=""&gt;DataFrameWriter.saveAsTable&lt;/SPAN&gt;&lt;SPAN class=""&gt;(self, name, format, mode, partitionBy, **options)&lt;/SPAN&gt; &lt;SPAN&gt;1518&lt;/SPAN&gt; &lt;SPAN class=""&gt;if&lt;/SPAN&gt; &lt;SPAN&gt;format&lt;/SPAN&gt; &lt;SPAN class=""&gt;is&lt;/SPAN&gt; &lt;SPAN class=""&gt;not&lt;/SPAN&gt; &lt;SPAN class=""&gt;None&lt;/SPAN&gt;&lt;SPAN&gt;: &lt;/SPAN&gt;&lt;SPAN&gt;1519&lt;/SPAN&gt; &lt;SPAN&gt;self&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;format(&lt;/SPAN&gt;&lt;SPAN&gt;format&lt;/SPAN&gt;&lt;SPAN&gt;) &lt;/SPAN&gt;&lt;SPAN class=""&gt;-&amp;gt; 1520&lt;/SPAN&gt; &lt;SPAN class=""&gt;self&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;_jwrite&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;saveAsTable&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt;&lt;SPAN class=""&gt;name&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt;&lt;SPAN&gt; File &lt;/SPAN&gt;&lt;SPAN class=""&gt;/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py:1321&lt;/SPAN&gt;&lt;SPAN&gt;, in &lt;/SPAN&gt;&lt;SPAN class=""&gt;JavaMember.__call__&lt;/SPAN&gt;&lt;SPAN class=""&gt;(self, *args)&lt;/SPAN&gt; &lt;SPAN&gt;1315&lt;/SPAN&gt;&lt;SPAN&gt; command &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt;&lt;SPAN&gt; proto&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;CALL_COMMAND_NAME &lt;/SPAN&gt;&lt;SPAN&gt;+&lt;/SPAN&gt;&lt;SPAN&gt;\ &lt;/SPAN&gt;&lt;SPAN&gt;1316&lt;/SPAN&gt; &lt;SPAN&gt;self&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;command_header &lt;/SPAN&gt;&lt;SPAN&gt;+&lt;/SPAN&gt;&lt;SPAN&gt;\ &lt;/SPAN&gt;&lt;SPAN&gt;1317&lt;/SPAN&gt;&lt;SPAN&gt; args_command &lt;/SPAN&gt;&lt;SPAN&gt;+&lt;/SPAN&gt;&lt;SPAN&gt;\ &lt;/SPAN&gt;&lt;SPAN&gt;1318&lt;/SPAN&gt;&lt;SPAN&gt; proto&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;END_COMMAND_PART &lt;/SPAN&gt;&lt;SPAN&gt;1320&lt;/SPAN&gt;&lt;SPAN&gt; answer &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt; &lt;SPAN&gt;self&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;gateway_client&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;send_command(command) &lt;/SPAN&gt;&lt;SPAN class=""&gt;-&amp;gt; 1321&lt;/SPAN&gt;&lt;SPAN&gt; return_value &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt; &lt;SPAN class=""&gt;get_return_value&lt;/SPAN&gt;&lt;SPAN class=""&gt;(&lt;/SPAN&gt; &lt;SPAN&gt;1322&lt;/SPAN&gt; &lt;SPAN class=""&gt;answer&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;self&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;gateway_client&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;self&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;target_id&lt;/SPAN&gt;&lt;SPAN class=""&gt;,&lt;/SPAN&gt; &lt;SPAN class=""&gt;self&lt;/SPAN&gt;&lt;SPAN class=""&gt;.&lt;/SPAN&gt;&lt;SPAN class=""&gt;name&lt;/SPAN&gt;&lt;SPAN class=""&gt;)&lt;/SPAN&gt; &lt;SPAN&gt;1324&lt;/SPAN&gt; &lt;SPAN class=""&gt;for&lt;/SPAN&gt;&lt;SPAN&gt; temp_arg &lt;/SPAN&gt;&lt;SPAN class=""&gt;in&lt;/SPAN&gt;&lt;SPAN&gt; temp_args: &lt;/SPAN&gt;&lt;SPAN&gt;1325&lt;/SPAN&gt;&lt;SPAN&gt; temp_arg&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;_detach() File &lt;/SPAN&gt;&lt;SPAN class=""&gt;/databricks/spark/python/pyspark/errors/exceptions.py:234&lt;/SPAN&gt;&lt;SPAN&gt;, in &lt;/SPAN&gt;&lt;SPAN class=""&gt;capture_sql_exception.&amp;lt;locals&amp;gt;.deco&lt;/SPAN&gt;&lt;SPAN class=""&gt;(*a, **kw)&lt;/SPAN&gt; &lt;SPAN&gt;230&lt;/SPAN&gt;&lt;SPAN&gt; converted &lt;/SPAN&gt;&lt;SPAN&gt;=&lt;/SPAN&gt;&lt;SPAN&gt; convert_exception(e&lt;/SPAN&gt;&lt;SPAN&gt;.&lt;/SPAN&gt;&lt;SPAN&gt;java_exception) &lt;/SPAN&gt;&lt;SPAN&gt;231&lt;/SPAN&gt; &lt;SPAN class=""&gt;if&lt;/SPAN&gt; &lt;SPAN class=""&gt;not&lt;/SPAN&gt; &lt;SPAN&gt;isinstance&lt;/SPAN&gt;&lt;SPAN&gt;(converted, UnknownException): &lt;/SPAN&gt;&lt;SPAN&gt;232&lt;/SPAN&gt; &lt;SPAN&gt;# Hide where the exception came from that shows a non-Pythonic&lt;/SPAN&gt; &lt;SPAN&gt;233&lt;/SPAN&gt; &lt;SPAN&gt;# JVM exception message.&lt;/SPAN&gt; &lt;SPAN class=""&gt;--&amp;gt; 234&lt;/SPAN&gt; &lt;SPAN class=""&gt;raise&lt;/SPAN&gt;&lt;SPAN&gt; converted &lt;/SPAN&gt;&lt;SPAN class=""&gt;from&lt;/SPAN&gt; &lt;SPAN&gt;None&lt;/SPAN&gt; &lt;SPAN&gt;235&lt;/SPAN&gt; &lt;SPAN class=""&gt;else&lt;/SPAN&gt;&lt;SPAN&gt;: &lt;/SPAN&gt;&lt;SPAN&gt;236&lt;/SPAN&gt; &lt;SPAN class=""&gt;raise&lt;/SPAN&gt; &lt;SPAN class=""&gt;AnalysisException&lt;/SPAN&gt;&lt;SPAN&gt;: Failed to merge fields 'h_id' and 'h_id'. Failed to merge incompatible data types LongType and StringType&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;```&lt;/P&gt;</description>
      <pubDate>Thu, 06 Jul 2023 23:32:43 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37122#M26267</guid>
      <dc:creator>kll</dc:creator>
      <dc:date>2023-07-06T23:32:43Z</dc:date>
    </item>
    <item>
      <title>Re: AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37125#M26269</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/58899"&gt;@kll&lt;/a&gt;&lt;/P&gt;&lt;DIV class=""&gt;&lt;SPAN&gt;T&lt;/SPAN&gt;his error is because of schema validation.&amp;nbsp;&lt;SPAN&gt;The delta table performs schema validation of every column, and the source dataframe column data types must match the column data types in the target table. If they don’t match, an exception is raised.&lt;/SPAN&gt;&lt;P&gt;For reference-&lt;/P&gt;&lt;P&gt;&lt;A href="https://docs.databricks.com/delta/delta-batch.html#schema-validation-1" target="_blank" rel="nofollow noopener noreferrer"&gt;https://docs.databricks.com/delta/delta-batch.html#schema-validation-1&lt;/A&gt;&lt;/P&gt;&lt;BR /&gt;Could you please refer below 2 article and see if it helps ?&lt;/DIV&gt;&lt;OL class=""&gt;&lt;LI&gt;&lt;A href="https://community.databricks.com/s/question/0D58Y00008k0YO8SAM/failed-to-merge-incompatible-data-types-longtype-and-stringtype" target="_blank" rel="noopener noreferrer"&gt;https://community.databricks.com/s/question/0D58Y00008k0YO8SAM/failed-to-merge-incompatible-data-types-longtype-and-stringtype&lt;/A&gt;&lt;/LI&gt;&lt;LI&gt;&lt;A href="https://kb.databricks.com/en_US/data/wrong-schema-in-files" target="_blank" rel="noopener noreferrer"&gt;https://kb.databricks.com/en_US/data/wrong-schema-in-files&lt;/A&gt;&lt;/LI&gt;&lt;/OL&gt;</description>
      <pubDate>Fri, 07 Jul 2023 04:06:16 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37125#M26269</guid>
      <dc:creator>Priyanka_Biswas</dc:creator>
      <dc:date>2023-07-07T04:06:16Z</dc:date>
    </item>
    <item>
      <title>Re: AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37140#M26273</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/58899"&gt;@kll&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;First, drop your table and delete the underlying files by using dbutils.fs.rm('path') and then try to run the write command then it will work.&lt;/P&gt;</description>
      <pubDate>Fri, 07 Jul 2023 07:27:26 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37140#M26273</guid>
      <dc:creator>Ajay-Pandey</dc:creator>
      <dc:date>2023-07-07T07:27:26Z</dc:date>
    </item>
    <item>
      <title>Re: AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37551#M26385</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/58899"&gt;@kll&lt;/a&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;Hope everything is going great.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;Just wanted to check in if you were able to resolve your issue. If yes, would you be happy to mark an answer as best so that other members can find the solution more quickly? If not, please tell us so we can help you.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;Cheers!&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 13 Jul 2023 09:43:55 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37551#M26385</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2023-07-13T09:43:55Z</dc:date>
    </item>
    <item>
      <title>Re: AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37557#M26387</link>
      <description>&lt;P&gt;Use the overwrite schema option:&lt;/P&gt;&lt;LI-CODE lang="python"&gt;(df.write
    .mode('overwrite')
    .format('delta')
    .option('overwriteSchema', 'true')
    .saveAsTable('schema.k_adhoc.df'))&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 13 Jul 2023 11:37:57 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37557#M26387</guid>
      <dc:creator>dream</dc:creator>
      <dc:date>2023-07-13T11:37:57Z</dc:date>
    </item>
    <item>
      <title>Re: AnalysisException : when attempting to save a spark DataFrame as delta table</title>
      <link>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37577#M26388</link>
      <description>&lt;P&gt;The issue seems to be because the job is trying to merge columns with different schema. Could you please make sure that the schema matches for the columns.&lt;/P&gt;</description>
      <pubDate>Thu, 13 Jul 2023 15:29:15 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/analysisexception-when-attempting-to-save-a-spark-dataframe-as/m-p/37577#M26388</guid>
      <dc:creator>Lakshay</dc:creator>
      <dc:date>2023-07-13T15:29:15Z</dc:date>
    </item>
  </channel>
</rss>

