<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Databricks Workflow Orchestration in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110180#M43513</link>
    <description>&lt;P&gt;Breakup these 50 tables logically or functionally and place them in their own workflows. A good strategy would be to group tables that are dependent in the same workflow. Then use a master workflow to trigger each child workflow. So it will be like a branching workflow where a parent can trigger other workflow. A single workflow can have up to 100 tasks. But you can work around this by having 1 workflow trigger other workflows and in this way bypass that limit.&lt;/P&gt;</description>
    <pubDate>Fri, 14 Feb 2025 08:06:08 GMT</pubDate>
    <dc:creator>Edthehead</dc:creator>
    <dc:date>2025-02-14T08:06:08Z</dc:date>
    <item>
      <title>Databricks Workflow Orchestration</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110110#M43490</link>
      <description>&lt;P&gt;I have 50 tables and will increase gradually, so I want to create a single workflow to orchestrate the job and run it table-wise. Is there an option to do this in Databricks workflow?&lt;/P&gt;</description>
      <pubDate>Thu, 13 Feb 2025 12:18:59 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110110#M43490</guid>
      <dc:creator>shan-databricks</dc:creator>
      <dc:date>2025-02-13T12:18:59Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Workflow Orchestration</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110180#M43513</link>
      <description>&lt;P&gt;Breakup these 50 tables logically or functionally and place them in their own workflows. A good strategy would be to group tables that are dependent in the same workflow. Then use a master workflow to trigger each child workflow. So it will be like a branching workflow where a parent can trigger other workflow. A single workflow can have up to 100 tasks. But you can work around this by having 1 workflow trigger other workflows and in this way bypass that limit.&lt;/P&gt;</description>
      <pubDate>Fri, 14 Feb 2025 08:06:08 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110180#M43513</guid>
      <dc:creator>Edthehead</dc:creator>
      <dc:date>2025-02-14T08:06:08Z</dc:date>
    </item>
    <item>
      <title>Re: Databricks Workflow Orchestration</title>
      <link>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110346#M43550</link>
      <description>&lt;P&gt;Thank you, I will try this.&lt;/P&gt;</description>
      <pubDate>Mon, 17 Feb 2025 02:21:03 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/databricks-workflow-orchestration/m-p/110346#M43550</guid>
      <dc:creator>shan-databricks</dc:creator>
      <dc:date>2025-02-17T02:21:03Z</dc:date>
    </item>
  </channel>
</rss>

