<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Learning Series | DevOps Essentials for Data Engineering in Announcements</title>
    <link>https://community.databricks.com/t5/announcements/learning-series-devops-essentials-for-data-engineering/m-p/155448#M755</link>
    <description>&lt;P&gt;&lt;SPAN&gt;Databricks Academy offers the free &lt;/SPAN&gt;&lt;STRONG&gt;DevOps Essentials&lt;/STRONG&gt; &lt;STRONG&gt;for Data Engineering course&lt;/STRONG&gt;&lt;SPAN&gt;, designed to help data engineers apply &lt;/SPAN&gt;&lt;STRONG&gt;software engineering best practices and DevOps principles&lt;/STRONG&gt;&lt;SPAN&gt; on the Databricks Data Intelligence Platform. Instead of going deep into every tool, this course focuses on the &lt;/SPAN&gt;&lt;STRONG&gt;core habits&lt;/STRONG&gt;&lt;SPAN&gt; that make data pipelines easier to build, test, and maintain over time.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;You’ll learn to:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Explain the &lt;/SPAN&gt;&lt;STRONG&gt;core principles of software engineering best practices&lt;/STRONG&gt;&lt;SPAN&gt; for data engineering, including code quality, version control, documentation, and testing&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Describe what &lt;/SPAN&gt;&lt;STRONG&gt;DevOps&lt;/STRONG&gt;&lt;SPAN&gt; means for data teams, including its main components, benefits, and how &lt;/SPAN&gt;&lt;STRONG&gt;CI/CD&lt;/STRONG&gt;&lt;SPAN&gt; fits into day-to-day workflows&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Apply &lt;/SPAN&gt;&lt;STRONG&gt;modularity principles in PySpark&lt;/STRONG&gt;&lt;SPAN&gt; to break code into reusable functions and components&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Design and run &lt;/SPAN&gt;&lt;STRONG&gt;unit tests for PySpark functions with pytest&lt;/STRONG&gt;&lt;SPAN&gt;, and perform &lt;/SPAN&gt;&lt;STRONG&gt;integration testing&lt;/STRONG&gt;&lt;SPAN&gt; for Databricks data pipelines using Spark Declarative Pipeline and Jobs&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Use &lt;/SPAN&gt;&lt;STRONG&gt;Git operations in Databricks&lt;/STRONG&gt;&lt;SPAN&gt; with Git Folders to support basic continuous integration workflows&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Compare different &lt;/SPAN&gt;&lt;STRONG&gt;deployment options for Databricks assets&lt;/STRONG&gt;&lt;SPAN&gt; (REST API, CLI, SDK, DABs) so you know which approaches exist and when they might be useful&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;Designed for:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Data engineers on Databricks who want to improve the quality and reliability of their pipelines&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Practitioners with solid Databricks Platform experience (workspaces, Delta Lake, Medallion Architecture, Unity Catalog, Delta Live Tables, Workflows)&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Users comfortable with PySpark, intermediate SQL, Python, and basic Git version control&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;Course format &amp;amp; details:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Syllabus:&lt;/STRONG&gt;&lt;SPAN&gt; 3 sections | 24 lessons&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Duration:&lt;/STRONG&gt;&lt;SPAN&gt;&amp;nbsp;2 hours&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Skill level:&lt;/STRONG&gt;&lt;SPAN&gt; Associate&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG style="color: #1b3139; font-family: inherit;"&gt;Cost:&lt;/STRONG&gt;&lt;SPAN&gt; Free&lt;BR /&gt;&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P class="p8i6j01 paragraph"&gt;&lt;A style="background-color: #ff3621; color: white; padding: 10px 20px; text-decoration: none; border-radius: 5px; font-weight: bold; display: inline-block;" href="https://customer-academy.databricks.com/learn/courses/3640/devops-essentials-for-data-engineering" target="_blank" rel="noopener"&gt;&lt;span class="lia-unicode-emoji" title=":link:"&gt;🔗&lt;/span&gt;&amp;nbsp;Enroll Now&amp;nbsp;&lt;span class="lia-unicode-emoji" title=":backhand_index_pointing_left:"&gt;👈&lt;/span&gt;&lt;/A&gt;&lt;/P&gt;</description>
    <pubDate>Fri, 24 Apr 2026 12:06:38 GMT</pubDate>
    <dc:creator>Tushar_Parekar</dc:creator>
    <dc:date>2026-04-24T12:06:38Z</dc:date>
    <item>
      <title>Learning Series | DevOps Essentials for Data Engineering</title>
      <link>https://community.databricks.com/t5/announcements/learning-series-devops-essentials-for-data-engineering/m-p/155448#M755</link>
      <description>&lt;P&gt;&lt;SPAN&gt;Databricks Academy offers the free &lt;/SPAN&gt;&lt;STRONG&gt;DevOps Essentials&lt;/STRONG&gt; &lt;STRONG&gt;for Data Engineering course&lt;/STRONG&gt;&lt;SPAN&gt;, designed to help data engineers apply &lt;/SPAN&gt;&lt;STRONG&gt;software engineering best practices and DevOps principles&lt;/STRONG&gt;&lt;SPAN&gt; on the Databricks Data Intelligence Platform. Instead of going deep into every tool, this course focuses on the &lt;/SPAN&gt;&lt;STRONG&gt;core habits&lt;/STRONG&gt;&lt;SPAN&gt; that make data pipelines easier to build, test, and maintain over time.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;You’ll learn to:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Explain the &lt;/SPAN&gt;&lt;STRONG&gt;core principles of software engineering best practices&lt;/STRONG&gt;&lt;SPAN&gt; for data engineering, including code quality, version control, documentation, and testing&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Describe what &lt;/SPAN&gt;&lt;STRONG&gt;DevOps&lt;/STRONG&gt;&lt;SPAN&gt; means for data teams, including its main components, benefits, and how &lt;/SPAN&gt;&lt;STRONG&gt;CI/CD&lt;/STRONG&gt;&lt;SPAN&gt; fits into day-to-day workflows&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Apply &lt;/SPAN&gt;&lt;STRONG&gt;modularity principles in PySpark&lt;/STRONG&gt;&lt;SPAN&gt; to break code into reusable functions and components&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Design and run &lt;/SPAN&gt;&lt;STRONG&gt;unit tests for PySpark functions with pytest&lt;/STRONG&gt;&lt;SPAN&gt;, and perform &lt;/SPAN&gt;&lt;STRONG&gt;integration testing&lt;/STRONG&gt;&lt;SPAN&gt; for Databricks data pipelines using Spark Declarative Pipeline and Jobs&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Use &lt;/SPAN&gt;&lt;STRONG&gt;Git operations in Databricks&lt;/STRONG&gt;&lt;SPAN&gt; with Git Folders to support basic continuous integration workflows&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Compare different &lt;/SPAN&gt;&lt;STRONG&gt;deployment options for Databricks assets&lt;/STRONG&gt;&lt;SPAN&gt; (REST API, CLI, SDK, DABs) so you know which approaches exist and when they might be useful&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;Designed for:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Data engineers on Databricks who want to improve the quality and reliability of their pipelines&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Practitioners with solid Databricks Platform experience (workspaces, Delta Lake, Medallion Architecture, Unity Catalog, Delta Live Tables, Workflows)&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;SPAN&gt;Users comfortable with PySpark, intermediate SQL, Python, and basic Git version control&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P&gt;&lt;FONT size="4"&gt;&lt;STRONG&gt;Course format &amp;amp; details:&lt;/STRONG&gt;&lt;/FONT&gt;&lt;/P&gt;
&lt;UL&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Syllabus:&lt;/STRONG&gt;&lt;SPAN&gt; 3 sections | 24 lessons&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Duration:&lt;/STRONG&gt;&lt;SPAN&gt;&amp;nbsp;2 hours&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG&gt;Skill level:&lt;/STRONG&gt;&lt;SPAN&gt; Associate&lt;/SPAN&gt;&lt;/LI&gt;
&lt;LI style="font-weight: 400;" aria-level="1"&gt;&lt;STRONG style="color: #1b3139; font-family: inherit;"&gt;Cost:&lt;/STRONG&gt;&lt;SPAN&gt; Free&lt;BR /&gt;&lt;/SPAN&gt;&lt;/LI&gt;
&lt;/UL&gt;
&lt;P class="p8i6j01 paragraph"&gt;&lt;A style="background-color: #ff3621; color: white; padding: 10px 20px; text-decoration: none; border-radius: 5px; font-weight: bold; display: inline-block;" href="https://customer-academy.databricks.com/learn/courses/3640/devops-essentials-for-data-engineering" target="_blank" rel="noopener"&gt;&lt;span class="lia-unicode-emoji" title=":link:"&gt;🔗&lt;/span&gt;&amp;nbsp;Enroll Now&amp;nbsp;&lt;span class="lia-unicode-emoji" title=":backhand_index_pointing_left:"&gt;👈&lt;/span&gt;&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Fri, 24 Apr 2026 12:06:38 GMT</pubDate>
      <guid>https://community.databricks.com/t5/announcements/learning-series-devops-essentials-for-data-engineering/m-p/155448#M755</guid>
      <dc:creator>Tushar_Parekar</dc:creator>
      <dc:date>2026-04-24T12:06:38Z</dc:date>
    </item>
  </channel>
</rss>

