<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic when attempting to load a large 800 MB CSV file in Databricks Free Edition Help</title>
    <link>https://community.databricks.com/t5/databricks-free-edition-help/when-attempting-to-load-a-large-800-mb-csv-file/m-p/105861#M119</link>
    <description>&lt;P&gt;Hello everyone,&lt;/P&gt;&lt;P&gt;I’m facing an issue when attempting to load a large 800 MB CSV file from ADLS using Auto Loader. Unfortunately, the notebook crashes during the loading process. Has anyone experienced something similar or have any suggestions on how to handle this?&lt;/P&gt;&lt;P&gt;Any tips to resolve or troubleshoot this issue would be greatly appreciated.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks in advance,&lt;/P&gt;&lt;P&gt;Violeta&lt;/P&gt;</description>
    <pubDate>Thu, 16 Jan 2025 10:04:07 GMT</pubDate>
    <dc:creator>violeta482yee</dc:creator>
    <dc:date>2025-01-16T10:04:07Z</dc:date>
    <item>
      <title>when attempting to load a large 800 MB CSV file</title>
      <link>https://community.databricks.com/t5/databricks-free-edition-help/when-attempting-to-load-a-large-800-mb-csv-file/m-p/105861#M119</link>
      <description>&lt;P&gt;Hello everyone,&lt;/P&gt;&lt;P&gt;I’m facing an issue when attempting to load a large 800 MB CSV file from ADLS using Auto Loader. Unfortunately, the notebook crashes during the loading process. Has anyone experienced something similar or have any suggestions on how to handle this?&lt;/P&gt;&lt;P&gt;Any tips to resolve or troubleshoot this issue would be greatly appreciated.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks in advance,&lt;/P&gt;&lt;P&gt;Violeta&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 10:04:07 GMT</pubDate>
      <guid>https://community.databricks.com/t5/databricks-free-edition-help/when-attempting-to-load-a-large-800-mb-csv-file/m-p/105861#M119</guid>
      <dc:creator>violeta482yee</dc:creator>
      <dc:date>2025-01-16T10:04:07Z</dc:date>
    </item>
    <item>
      <title>Re: when attempting to load a large 800 MB CSV file</title>
      <link>https://community.databricks.com/t5/databricks-free-edition-help/when-attempting-to-load-a-large-800-mb-csv-file/m-p/105891#M120</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/143761"&gt;@violeta482yee&lt;/a&gt;,&lt;/P&gt;
&lt;P&gt;Have you checked the resource availability of the compute attached to the notebook?&lt;/P&gt;
&lt;P&gt;One solution could be:&amp;nbsp;to use&lt;STRONG&gt;&lt;CODE&gt;cloudFiles.maxBytesPerTrigger&lt;/CODE&gt;&lt;/STRONG&gt;Option: This option allows you to control the maximum number of bytes processed in each trigger. For example, setting it to &lt;CODE&gt;100m&lt;/CODE&gt; will process up to 100 MB of data per trigger. This can help prevent the notebook from crashing due to memory overload&lt;/P&gt;
&lt;P&gt;But it requires more investigation on the compute being used to determine the reason.&lt;/P&gt;</description>
      <pubDate>Thu, 16 Jan 2025 12:35:18 GMT</pubDate>
      <guid>https://community.databricks.com/t5/databricks-free-edition-help/when-attempting-to-load-a-large-800-mb-csv-file/m-p/105891#M120</guid>
      <dc:creator>Alberto_Umana</dc:creator>
      <dc:date>2025-01-16T12:35:18Z</dc:date>
    </item>
  </channel>
</rss>

