<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Spark Configuration Parameter for Cluster Downscaling in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/spark-configuration-parameter-for-cluster-downscaling/m-p/48148#M28255</link>
    <description>&lt;P&gt;spark.databricks.aggressiveWindowDownS This parameter is designed to determine the frequency, in seconds, at which the cluster decides to downscale.&lt;/P&gt;&lt;P&gt;By adjusting this setting, you can fine-tune how rapidly clusters release workers. A higher value will result in the cluster holding onto workers longer before releasing them. The maximum limit for this parameter is set at 600 seconds.&lt;/P&gt;</description>
    <pubDate>Wed, 04 Oct 2023 13:43:43 GMT</pubDate>
    <dc:creator>Hubert-Dudek</dc:creator>
    <dc:date>2023-10-04T13:43:43Z</dc:date>
    <item>
      <title>Spark Configuration Parameter for Cluster Downscaling</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-configuration-parameter-for-cluster-downscaling/m-p/48148#M28255</link>
      <description>&lt;P&gt;spark.databricks.aggressiveWindowDownS This parameter is designed to determine the frequency, in seconds, at which the cluster decides to downscale.&lt;/P&gt;&lt;P&gt;By adjusting this setting, you can fine-tune how rapidly clusters release workers. A higher value will result in the cluster holding onto workers longer before releasing them. The maximum limit for this parameter is set at 600 seconds.&lt;/P&gt;</description>
      <pubDate>Wed, 04 Oct 2023 13:43:43 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-configuration-parameter-for-cluster-downscaling/m-p/48148#M28255</guid>
      <dc:creator>Hubert-Dudek</dc:creator>
      <dc:date>2023-10-04T13:43:43Z</dc:date>
    </item>
    <item>
      <title>Re: Spark Configuration Parameter for Cluster Downscaling</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-configuration-parameter-for-cluster-downscaling/m-p/48157#M28256</link>
      <description>&lt;P&gt;I wish there was a configuration to toggle upscaling behavior. I want the clusters to scale up only if the bottleneck is approaching 70% memory usage. Currently the autoscaling is only based on CPU not Memory (RAM).&lt;/P&gt;</description>
      <pubDate>Wed, 04 Oct 2023 14:08:12 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-configuration-parameter-for-cluster-downscaling/m-p/48157#M28256</guid>
      <dc:creator>Haiyangl104</dc:creator>
      <dc:date>2023-10-04T14:08:12Z</dc:date>
    </item>
  </channel>
</rss>

