<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Pyspark logging - custom to Azure blob mount directory in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11010#M6051</link>
    <description>&lt;P&gt;@karthick J​&amp;nbsp;- If Jose's answer helped solve the issue, would you be happy to mark their answer as best so that others can find the solution more easily?&lt;/P&gt;</description>
    <pubDate>Sat, 13 Nov 2021 19:39:35 GMT</pubDate>
    <dc:creator>Anonymous</dc:creator>
    <dc:date>2021-11-13T19:39:35Z</dc:date>
    <item>
      <title>Pyspark logging - custom to Azure blob mount directory</title>
      <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11004#M6045</link>
      <description>&lt;P&gt;I'm using the logging module to log the events from the job, but it seems the log is creating the file with only 1 lines. The consecutive log events are not being recorded.  Is there any reference for custom logging in Databricks.   &lt;/P&gt;</description>
      <pubDate>Thu, 11 Nov 2021 12:37:20 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11004#M6045</guid>
      <dc:creator>kjoth</dc:creator>
      <dc:date>2021-11-11T12:37:20Z</dc:date>
    </item>
    <item>
      <title>Re: Pyspark logging - custom to Azure blob mount directory</title>
      <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11007#M6048</link>
      <description>&lt;P&gt;hi @karthick J​&amp;nbsp;,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;If you want to create a custom logger, then you will need to use log4j to create your logger. The first post will show you how to do it.  If you want to saved your captured events, then you will need to follow the second post that Kaniz has shared. You will need to parse your data when reading it back.&lt;/P&gt;</description>
      <pubDate>Fri, 12 Nov 2021 23:33:57 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11007#M6048</guid>
      <dc:creator>jose_gonzalez</dc:creator>
      <dc:date>2021-11-12T23:33:57Z</dc:date>
    </item>
    <item>
      <title>Re: Pyspark logging - custom to Azure blob mount directory</title>
      <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11008#M6049</link>
      <description>&lt;P&gt;Thanks for the answer. &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Instead of cluster logs, i want log only specific program run logs to a file with rotating.Is there a way to write the logs to mounted blob storage directory with append mode. I have read that in data-bricks file write doesn't support append mode. &lt;/P&gt;</description>
      <pubDate>Sat, 13 Nov 2021 15:02:07 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11008#M6049</guid>
      <dc:creator>kjoth</dc:creator>
      <dc:date>2021-11-13T15:02:07Z</dc:date>
    </item>
    <item>
      <title>Re: Pyspark logging - custom to Azure blob mount directory</title>
      <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11009#M6050</link>
      <description>&lt;P&gt;Thank you for the answer&lt;/P&gt;</description>
      <pubDate>Sat, 13 Nov 2021 15:03:27 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11009#M6050</guid>
      <dc:creator>kjoth</dc:creator>
      <dc:date>2021-11-13T15:03:27Z</dc:date>
    </item>
    <item>
      <title>Re: Pyspark logging - custom to Azure blob mount directory</title>
      <link>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11010#M6051</link>
      <description>&lt;P&gt;@karthick J​&amp;nbsp;- If Jose's answer helped solve the issue, would you be happy to mark their answer as best so that others can find the solution more easily?&lt;/P&gt;</description>
      <pubDate>Sat, 13 Nov 2021 19:39:35 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pyspark-logging-custom-to-azure-blob-mount-directory/m-p/11010#M6051</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2021-11-13T19:39:35Z</dc:date>
    </item>
  </channel>
</rss>

