<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Delta Live tables stream output to Kafka in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/132547#M49539</link>
    <description>&lt;P&gt;I have the same issue, but i using delta live pipeline. DLT pipeline not allow install maven lib. How to resolve the issue?&lt;/P&gt;</description>
    <pubDate>Fri, 19 Sep 2025 10:49:14 GMT</pubDate>
    <dc:creator>Hanfo2back</dc:creator>
    <dc:date>2025-09-19T10:49:14Z</dc:date>
    <item>
      <title>Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77192#M35415</link>
      <description>&lt;P&gt;Hello,&lt;/P&gt;&lt;P&gt;Wanted to Know if we can write the stream output to a Kafka topic&amp;nbsp; in DLT pipeline?&lt;/P&gt;&lt;P&gt;Please let me know.&lt;/P&gt;&lt;P&gt;Thankyou.&lt;/P&gt;</description>
      <pubDate>Mon, 08 Jul 2024 20:29:22 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77192#M35415</guid>
      <dc:creator>SharathE</dc:creator>
      <dc:date>2024-07-08T20:29:22Z</dc:date>
    </item>
    <item>
      <title>Re: Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77212#M35416</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;Yes, you can write the stream output to a Kafka topic in a Databricks Delta Live Tables (DLT) pipeline. Here’s how you can do it:&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;Set Up Kafka Configuration: Ensure you have the necessary Kafka configurations such as Kafka broker URL, topic name, and security settings (if any).&lt;/LI&gt;&lt;LI&gt;Create a DLT Pipeline: Set up a Delta Live Table pipeline in Databricks.&lt;/LI&gt;&lt;LI&gt;Define the Stream: In your DLT pipeline, define the streaming source. For example, this could be a stream from a file, a Delta table, or another streaming source.&lt;/LI&gt;&lt;LI&gt;Write to Kafka: Use the writeStream method with Kafka options to send the stream to a Kafka topic.&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;Here’s a basic example in PySpark:&lt;/P&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="carbon.png" style="width: 720px;"&gt;&lt;img src="https://community.databricks.com/t5/image/serverpage/image-id/9289i4BE3CF91E2D2EAEE/image-size/medium/is-moderation-mode/true?v=v2&amp;amp;px=400" role="button" title="carbon.png" alt="carbon.png" /&gt;&lt;/span&gt;&lt;/P&gt;&lt;P&gt; &lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 08 Jul 2024 23:03:02 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77212#M35416</guid>
      <dc:creator>mtajmouati</dc:creator>
      <dc:date>2024-07-08T23:03:02Z</dc:date>
    </item>
    <item>
      <title>Re: Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77524#M35445</link>
      <description>&lt;P&gt;Any sample code snippet for connecting to&amp;nbsp;&lt;SPAN&gt;ScramLoginModule&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;I'm using below code to push the data to kafka topic and getting error saying&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;Job aborted due to stage failure: Task 15 in stage 879.0 failed 4 times, most recent failure: Lost task 15.3 in stage 879.0 (TID 9663) (executor 0): kafkashaded.org.apache.kafka.common.KafkaException: Failed to construct kafka producer at kafkashaded.org.apache.kafka.clients.producer.KafkaProducer.&amp;lt;init&amp;gt;(KafkaProducer.java:465) at kafkashaded.org.apache.kafka.clients.producer.KafkaProducer.&amp;lt;init&amp;gt;(KafkaProducer.java:290) at kafkashaded.org.apache.kafka.clients.producer.KafkaProducer.&amp;lt;init&amp;gt;(KafkaProducer.java:273)&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;Caused by: kafkashaded.org.apache.kafka.common.KafkaException: javax.security.auth.login.LoginException: unable to find LoginModule class: org.apache.kafka.common.security.scram.ScramLoginModule&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;code:&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN&gt;df1 = df.selectExpr("CAST(null AS STRING) as key", "to_json(struct(*)) AS value") \&lt;BR /&gt;.writeStream \&lt;BR /&gt;.format("kafka") \&lt;BR /&gt;.option("kafka.bootstrap.servers", "broker details") \&lt;BR /&gt;.option("kafka.security.protocol", "SASL_SSL") \&lt;BR /&gt;.option("kafka.sasl.mechanism", "SCRAM-SHA-512") \&lt;BR /&gt;.option("kafka.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username='username' password='password';") \&lt;BR /&gt;.option("topic", "topic1") \&lt;BR /&gt;.option("checkpointLocation", "checkpointname") \&lt;BR /&gt;.option("kafka.metadata.max.age.ms", "120000") \&lt;BR /&gt;.start()&lt;/SPAN&gt;&lt;/DIV&gt;&lt;/DIV&gt;</description>
      <pubDate>Tue, 09 Jul 2024 16:42:50 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/77524#M35445</guid>
      <dc:creator>SharathE</dc:creator>
      <dc:date>2024-07-09T16:42:50Z</dc:date>
    </item>
    <item>
      <title>Re: Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/78090#M35463</link>
      <description>&lt;P&gt;Hi !&amp;nbsp;&lt;/P&gt;&lt;P&gt;Ensure your code is set up to use these libraries. Here is the complete example:&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;&amp;nbsp; &lt;STRONG&gt;Navigate to your cluster configuration:&lt;/STRONG&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;UL&gt;&lt;LI&gt;Go to your Databricks workspace.&lt;/LI&gt;&lt;LI&gt;Click on "Clusters" and select your cluster.&lt;/LI&gt;&lt;LI&gt;Go to the "Libraries" tab.&lt;/LI&gt;&lt;/UL&gt;&lt;UL&gt;&lt;LI&gt;&amp;nbsp; &lt;STRONG&gt;Install the necessary Maven libraries:&lt;/STRONG&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;UL&gt;&lt;LI&gt;Click on "Install New".&lt;/LI&gt;&lt;LI&gt;Choose "Maven" as the library source.&lt;/LI&gt;&lt;LI&gt;Add the following Maven coordinates:&lt;UL&gt;&lt;LI&gt;org.apache.kafka:kafka-clients:2.8.0&lt;/LI&gt;&lt;LI&gt;org.apache.kafka:kafka_2.13:2.8.0&lt;/LI&gt;&lt;/UL&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;LI-CODE lang="python"&gt;df1 = df.selectExpr("CAST(null AS STRING) as key", "to_json(struct(*)) AS value")

df1.writeStream \
    .format("kafka") \
    .option("kafka.bootstrap.servers", "your_broker_details") \
    .option("kafka.security.protocol", "SASL_SSL") \
    .option("kafka.sasl.mechanism", "SCRAM-SHA-512") \
    .option("kafka.sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username='your_username' password='your_password';") \
    .option("topic", "your_topic1") \
    .option("checkpointLocation", "/path/to/your/checkpoint") \
    .option("kafka.metadata.max.age.ms", "120000") \
    .start()&lt;/LI-CODE&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;Mehdi TAJMOUATI&lt;/P&gt;&lt;P&gt;&lt;A href="https://www.wytasoft.com/wytasoft-group/" target="_blank" rel="noopener"&gt;WyTaSoft&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&lt;A href="https://medium.com/@mehdi.tajmouati.wytasoft" target="_blank" rel="noopener"&gt;medium&lt;/A&gt;&lt;BR /&gt;&lt;BR /&gt;&lt;A href="https://www.linkedin.com/in/mtajmouati/" target="_blank" rel="noopener"&gt;linkedin&lt;/A&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Wed, 10 Jul 2024 11:19:49 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/78090#M35463</guid>
      <dc:creator>mtajmouati</dc:creator>
      <dc:date>2024-07-10T11:19:49Z</dc:date>
    </item>
    <item>
      <title>Re: Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/90949#M38039</link>
      <description>&lt;P&gt;Is it possible to have 2 notebooks in a DLT pipeline, with the first notebook reading from topic1 in Kafka and writing to a DLT and the second notebook reading from this DLT, applying some data transformations and write streaming to a topic2 in Kafka? All in streaming mode?&lt;/P&gt;</description>
      <pubDate>Wed, 18 Sep 2024 16:17:06 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/90949#M38039</guid>
      <dc:creator>koarjun</dc:creator>
      <dc:date>2024-09-18T16:17:06Z</dc:date>
    </item>
    <item>
      <title>Re: Delta Live tables stream output to Kafka</title>
      <link>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/132547#M49539</link>
      <description>&lt;P&gt;I have the same issue, but i using delta live pipeline. DLT pipeline not allow install maven lib. How to resolve the issue?&lt;/P&gt;</description>
      <pubDate>Fri, 19 Sep 2025 10:49:14 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/delta-live-tables-stream-output-to-kafka/m-p/132547#M49539</guid>
      <dc:creator>Hanfo2back</dc:creator>
      <dc:date>2025-09-19T10:49:14Z</dc:date>
    </item>
  </channel>
</rss>

