<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: how to pass secrets keys using a spark_python_task in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114378#M44796</link>
    <description>&lt;P&gt;ping, it's important &lt;span class="lia-unicode-emoji" title=":disappointed_face:"&gt;😞&lt;/span&gt;&lt;/P&gt;</description>
    <pubDate>Thu, 03 Apr 2025 08:53:30 GMT</pubDate>
    <dc:creator>jeremy98</dc:creator>
    <dc:date>2025-04-03T08:53:30Z</dc:date>
    <item>
      <title>how to pass secrets keys using a spark_python_task</title>
      <link>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114373#M44792</link>
      <description>&lt;P&gt;Hello community,&lt;/P&gt;&lt;P&gt;I was searching a way to pass secrets to spark_python_task. Using a notebook file is easy, it's only to use dbutils.secrets.get(...) but how to do the same thing using a spark_python_task set using serveless compute?&lt;/P&gt;&lt;P&gt;Kind regards,&lt;/P&gt;</description>
      <pubDate>Thu, 03 Apr 2025 07:52:11 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114373#M44792</guid>
      <dc:creator>jeremy98</dc:creator>
      <dc:date>2025-04-03T07:52:11Z</dc:date>
    </item>
    <item>
      <title>Re: how to pass secrets keys using a spark_python_task</title>
      <link>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114378#M44796</link>
      <description>&lt;P&gt;ping, it's important &lt;span class="lia-unicode-emoji" title=":disappointed_face:"&gt;😞&lt;/span&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 03 Apr 2025 08:53:30 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114378#M44796</guid>
      <dc:creator>jeremy98</dc:creator>
      <dc:date>2025-04-03T08:53:30Z</dc:date>
    </item>
    <item>
      <title>Re: how to pass secrets keys using a spark_python_task</title>
      <link>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114388#M44799</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/133094"&gt;@jeremy98&lt;/a&gt;. To securely access secrets in a spark_python_task with serverless compute in Databricks... first create a secret scope and add secrets (refer to &lt;A href="https://docs.databricks.com/aws/en/security/secrets" target="_self"&gt;this article&lt;/A&gt;). Then, pass secrets by injecting them into environment variables in the job configuration and access them in your Python script using os.environ().&lt;/P&gt;&lt;P&gt;You can also check out &lt;A href="https://blog.ah.technology/using-secrets-from-databricks-secret-scopes-in-jobs-743aef880b9c" target="_self"&gt;this blog&lt;/A&gt; for more details.&lt;/P&gt;</description>
      <pubDate>Thu, 03 Apr 2025 10:12:20 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/114388#M44799</guid>
      <dc:creator>Renu_</dc:creator>
      <dc:date>2025-04-03T10:12:20Z</dc:date>
    </item>
    <item>
      <title>Re: how to pass secrets keys using a spark_python_task</title>
      <link>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/136428#M50565</link>
      <description>&lt;P&gt;&lt;a href="https://community.databricks.com/t5/user/viewprofilepage/user-id/151751"&gt;@Renu_&lt;/a&gt;&amp;nbsp; but passing them as spark_env will not work with serverless I guess? See also the limitations on the docs&amp;nbsp;&amp;nbsp;&lt;A href="https://docs.databricks.com/aws/en/compute/serverless/limitations#compute-specific-limitations" target="_blank"&gt;Serverless compute limitations | Databricks on AWS&lt;/A&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 28 Oct 2025 15:44:45 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/how-to-pass-secrets-keys-using-a-spark-python-task/m-p/136428#M50565</guid>
      <dc:creator>analytics_eng</dc:creator>
      <dc:date>2025-10-28T15:44:45Z</dc:date>
    </item>
  </channel>
</rss>

