<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Adding spark_conf tag on Jobs API in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/adding-spark-conf-tag-on-jobs-api/m-p/8806#M4337</link>
    <description>&lt;P&gt;using &lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html?&amp;amp;_ga=2.57382760.395717715.1676995292-443605115.1676995292#operation/JobsCreate" alt="https://docs.databricks.com/dev-tools/api/latest/jobs.html?&amp;amp;_ga=2.57382760.395717715.1676995292-443605115.1676995292#operation/JobsCreate" target="_blank"&gt;Jobs API&lt;/A&gt;, when we create a new job to run on an interactive cluster, can we add &lt;B&gt;&lt;I&gt;spark_conf &lt;/I&gt;&lt;/B&gt;tag and specify spark config tuning parameters?  &lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
    <pubDate>Thu, 23 Feb 2023 20:31:12 GMT</pubDate>
    <dc:creator>shan_chandra</dc:creator>
    <dc:date>2023-02-23T20:31:12Z</dc:date>
    <item>
      <title>Adding spark_conf tag on Jobs API</title>
      <link>https://community.databricks.com/t5/data-engineering/adding-spark-conf-tag-on-jobs-api/m-p/8806#M4337</link>
      <description>&lt;P&gt;using &lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html?&amp;amp;_ga=2.57382760.395717715.1676995292-443605115.1676995292#operation/JobsCreate" alt="https://docs.databricks.com/dev-tools/api/latest/jobs.html?&amp;amp;_ga=2.57382760.395717715.1676995292-443605115.1676995292#operation/JobsCreate" target="_blank"&gt;Jobs API&lt;/A&gt;, when we create a new job to run on an interactive cluster, can we add &lt;B&gt;&lt;I&gt;spark_conf &lt;/I&gt;&lt;/B&gt;tag and specify spark config tuning parameters?  &lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 23 Feb 2023 20:31:12 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/adding-spark-conf-tag-on-jobs-api/m-p/8806#M4337</guid>
      <dc:creator>shan_chandra</dc:creator>
      <dc:date>2023-02-23T20:31:12Z</dc:date>
    </item>
    <item>
      <title>Re: Adding spark_conf tag on Jobs API</title>
      <link>https://community.databricks.com/t5/data-engineering/adding-spark-conf-tag-on-jobs-api/m-p/8807#M4338</link>
      <description>&lt;P&gt;&amp;nbsp;&lt;B&gt;&lt;I&gt;spark_conf &lt;/I&gt;&lt;/B&gt;needs to be set prior to the start of the cluster or have to restart the existing cluster. Hence, the spark_conf tag is available only on the job_cluster. you may have to set the configs manually on the interactive cluster prior to using Jobs API. we cannot change interactive clusters from Jobs API only restart them if they have terminated. &lt;/P&gt;</description>
      <pubDate>Thu, 23 Feb 2023 20:53:26 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/adding-spark-conf-tag-on-jobs-api/m-p/8807#M4338</guid>
      <dc:creator>shan_chandra</dc:creator>
      <dc:date>2023-02-23T20:53:26Z</dc:date>
    </item>
  </channel>
</rss>

