<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Does Spark Structured streaming query require write-ahead logs for at least once delivery with ForeachBatch output sink? in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/does-spark-structured-streaming-query-require-write-ahead-logs/m-p/4040#M875</link>
    <description>&lt;P&gt;Is it sufficient to use the checkpoint directory with write-ahead logs?&lt;/P&gt;&lt;P&gt;BTW. I use Kafka connector to read data from EventHub&lt;/P&gt;</description>
    <pubDate>Tue, 23 May 2023 22:01:22 GMT</pubDate>
    <dc:creator>DimaP</dc:creator>
    <dc:date>2023-05-23T22:01:22Z</dc:date>
    <item>
      <title>Does Spark Structured streaming query require write-ahead logs for at least once delivery with ForeachBatch output sink?</title>
      <link>https://community.databricks.com/t5/data-engineering/does-spark-structured-streaming-query-require-write-ahead-logs/m-p/4040#M875</link>
      <description>&lt;P&gt;Is it sufficient to use the checkpoint directory with write-ahead logs?&lt;/P&gt;&lt;P&gt;BTW. I use Kafka connector to read data from EventHub&lt;/P&gt;</description>
      <pubDate>Tue, 23 May 2023 22:01:22 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/does-spark-structured-streaming-query-require-write-ahead-logs/m-p/4040#M875</guid>
      <dc:creator>DimaP</dc:creator>
      <dc:date>2023-05-23T22:01:22Z</dc:date>
    </item>
  </channel>
</rss>

