<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Pass Typesafe config file to the Spark Submit Job in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14361#M8868</link>
    <description>&lt;P&gt;Hi @Praveen Kumar Bachu​&amp;nbsp;,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;There are several limitations for&amp;nbsp;spark-submit&amp;nbsp;tasks:&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;You can run spark-submit tasks only on new clusters.&lt;/LI&gt;&lt;LI&gt;Spark-submit does not support cluster autoscaling. To learn more about autoscaling, see&amp;nbsp;&lt;A href="https://docs.databricks.com/clusters/configure.html#autoscaling" alt="https://docs.databricks.com/clusters/configure.html#autoscaling" target="_blank"&gt;Cluster autoscaling&lt;/A&gt;.&lt;/LI&gt;&lt;LI&gt;Spark-submit does not support&amp;nbsp;&lt;A href="https://docs.databricks.com/dev-tools/databricks-utils.html" alt="https://docs.databricks.com/dev-tools/databricks-utils.html" target="_blank"&gt;Databricks Utilities&lt;/A&gt;. To use Databricks Utilities, use JAR tasks instead.&lt;/LI&gt;&lt;LI&gt;For more information on which parameters may be passed to a spark-submit task, see&amp;nbsp;&lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" alt="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" target="_blank"&gt;SparkSubmitTask&lt;/A&gt;.&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;Please check the docs for more information &lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" target="test_blank"&gt;https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask&lt;/A&gt;&lt;/P&gt;</description>
    <pubDate>Tue, 28 Sep 2021 17:42:36 GMT</pubDate>
    <dc:creator>jose_gonzalez</dc:creator>
    <dc:date>2021-09-28T17:42:36Z</dc:date>
    <item>
      <title>Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14357#M8864</link>
      <description>&lt;P&gt;&lt;/P&gt;&lt;P&gt;Hello everyone ! &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I am trying to pass a Typesafe config file to the spark submit task and print the details in the config file. &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Code:&amp;nbsp;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;import org.slf4j.{Logger, LoggerFactory}
 
import com.typesafe.config.{Config, ConfigFactory}
 
import org.apache.spark.sql.SparkSession
 
 
 
 
 
object Bootstrap extends MyLogging {
 
 
 
 val spark: SparkSession = SparkSession.builder.enableHiveSupport().getOrCreate()
 
 
 
 val config: Config = ConfigFactory.load("application.conf")
 
 
 
 def main(args: Array[String]): Unit = {
 
   val url: String = config.getString("db.url")
 
  val user: String = config.getString("db.user")
 
   println(url)
 
  println(user)
 
 }
 
}&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;application.conf file : &lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;db {
  url = "jdbc:postgresql://localhost:5432/test"
  user = "test"
}&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I have uploaded the file to the dbfs and using the path to create the job.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;B&gt;Spark submit job json : &lt;/B&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;{
  "new_cluster": {
    "spark_version": "6.4.x-esr-scala2.11",
    "azure_attributes": {
      "availability": "ON_DEMAND_AZURE",
      "first_on_demand": 1,
      "spot_bid_max_price": -1
    },
    "node_type_id": "Standard_DS3_v2",
    "enable_elastic_disk": true,
    "num_workers": 1
  },
  "spark_submit_task": {
    "parameters": [
      "--class",
      "Bootstrap",
      "--conf",
      "spark.driver.extraClassPath=dbfs:/tmp/",
      "--conf",
      "spark.executor.extraClassPath=dbfs:/tmp/",
      "--files",
      "dbfs:/tmp/application.conf",
      "dbfs:/tmp/code-assembly-0.1.0.jar"
    ]
  },
  "email_notifications": {},
  "name": "application-conf-test",
  "max_concurrent_runs": 1
}&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;I have used above json to create the spark submit job and tried to run the spark-submit job using datbricks CLI commands.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Error : &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;Exception in thread "main" com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'db'
	at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:147)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
	at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:206)
	at Bootstrap$.main(Test.scala:16)
	at Bootstrap.main(Test.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845)
	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929)&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;I can see the below line in logs but the file is not getting loaded.&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;21/09/22 07:21:43 INFO SparkContext: Added file dbfs:/tmp/application.conf at dbfs:/tmp/application.conf with timestamp 1632295303654
21/09/22 07:21:43 INFO Utils: Fetching dbfs:/tmp/application.conf to /local_disk0/spark-20456b30-fddd-42d7-9b23-9e4c0d3c91cd/userFiles-ee199161-6f48-4c47-b1c7-763ce7c0895f/fetchFileTemp4713981355306806616.tmp&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Please help me in passing this typesafe config file to the spark-submit job using the appropriate spark submit job parameters.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 27 Sep 2021 08:22:34 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14357#M8864</guid>
      <dc:creator>Praveen</dc:creator>
      <dc:date>2021-09-27T08:22:34Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14359#M8866</link>
      <description>&lt;P&gt;Thank you so much @Kaniz Fatma​&amp;nbsp;, I'm looking forward for the answer!&lt;/P&gt;</description>
      <pubDate>Tue, 28 Sep 2021 12:58:06 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14359#M8866</guid>
      <dc:creator>Praveen</dc:creator>
      <dc:date>2021-09-28T12:58:06Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14361#M8868</link>
      <description>&lt;P&gt;Hi @Praveen Kumar Bachu​&amp;nbsp;,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;There are several limitations for&amp;nbsp;spark-submit&amp;nbsp;tasks:&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;You can run spark-submit tasks only on new clusters.&lt;/LI&gt;&lt;LI&gt;Spark-submit does not support cluster autoscaling. To learn more about autoscaling, see&amp;nbsp;&lt;A href="https://docs.databricks.com/clusters/configure.html#autoscaling" alt="https://docs.databricks.com/clusters/configure.html#autoscaling" target="_blank"&gt;Cluster autoscaling&lt;/A&gt;.&lt;/LI&gt;&lt;LI&gt;Spark-submit does not support&amp;nbsp;&lt;A href="https://docs.databricks.com/dev-tools/databricks-utils.html" alt="https://docs.databricks.com/dev-tools/databricks-utils.html" target="_blank"&gt;Databricks Utilities&lt;/A&gt;. To use Databricks Utilities, use JAR tasks instead.&lt;/LI&gt;&lt;LI&gt;For more information on which parameters may be passed to a spark-submit task, see&amp;nbsp;&lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" alt="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" target="_blank"&gt;SparkSubmitTask&lt;/A&gt;.&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;Please check the docs for more information &lt;A href="https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask" target="test_blank"&gt;https://docs.databricks.com/dev-tools/api/latest/jobs.html#jobssparksubmittask&lt;/A&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 28 Sep 2021 17:42:36 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14361#M8868</guid>
      <dc:creator>jose_gonzalez</dc:creator>
      <dc:date>2021-09-28T17:42:36Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14362#M8869</link>
      <description>&lt;P&gt;Hi @Jose Gonzalez​&amp;nbsp;,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Thanks for the reply and Yes I have gone through all the docs and steps regarding the spark-submit task and my above question is more about the passing config file  in the spark-submit task.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Please re-check the above steps and let me know if that helps , if not we will write more details about  what we are trying to do and you can tell whether it is possible in databricks or not.&lt;/P&gt;</description>
      <pubDate>Wed, 29 Sep 2021 05:53:21 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14362#M8869</guid>
      <dc:creator>Praveen</dc:creator>
      <dc:date>2021-09-29T05:53:21Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14363#M8870</link>
      <description>&lt;P&gt;Hi @Praveen Kumar Bachu​&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;The error shows that the job was not able to read your configuration. It means that the only way to pass your configurations is thru submit parameters. &lt;/P&gt;</description>
      <pubDate>Fri, 01 Oct 2021 19:49:16 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14363#M8870</guid>
      <dc:creator>jose_gonzalez</dc:creator>
      <dc:date>2021-10-01T19:49:16Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14364#M8871</link>
      <description>&lt;P&gt;Hi @Jose Gonzalez​&amp;nbsp;,&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Please see the below spark-submit json and few more examples we have tried with Spark submit parameters &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;B&gt;spark-submit json:&lt;/B&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;{
  "new_cluster": {
    "spark_version": "6.4.x-esr-scala2.11",
    "azure_attributes": {
      "availability": "ON_DEMAND_AZURE",
      "first_on_demand": 1,
      "spot_bid_max_price": -1
    },
    "node_type_id": "Standard_DS3_v2",
    "enable_elastic_disk": true,
    "num_workers": 1
  },
  "spark_submit_task": {
    "parameters": [
      "--class",
      "Bootstrap",
      "--conf",
      "spark.driver.extraClassPath=dbfs:/tmp/",
      "--conf",
      "spark.executor.extraClassPath=dbfs:/tmp/",
      "--files",
      "dbfs:/tmp/application.conf",
      "dbfs:/tmp/code-assembly-0.1.0.jar"
    ]
  },
  "email_notifications": {},
  "name": "application-conf-test",
  "max_concurrent_runs": 1
}&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;We have tried below &lt;B&gt;spark_submit_task parameters&lt;/B&gt; in the above json &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraClassPath=/tmp/application.conf",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraClassPath=/tmp/",
  "--conf",
  "spark.executor.extraClassPath=/tmp/",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraClassPath=dbfs:/tmp/application.conf",
  "--conf",
  "spark.executor.extraClassPath=dbfs:/tmp/application.conf",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraClassPath=dbfs:/tmp/",
  "--conf",
  "spark.executor.extraClassPath=dbfs:/tmp/",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraClassPath=dbfs:./",
  "--conf",
  "spark.executor.extraClassPath=dbfs:./",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--driver-java-options",
  "-Dconfig.file=application.conf",
  "--conf",
  "spark.executor.extraJavaOptions=-Dconfig.file=application.conf",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;PRE&gt;&lt;CODE&gt;[
  "--class",
  "Bootstrap",
  "--conf",
  "spark.driver.extraJavaOptions=-Dconfig.file=application.conf",
  "--conf",
  "spark.executor.extraJavaOptions=-Dconfig.file=application.conf",
  "--files",
  "dbfs:/tmp/application.conf",
  "dbfs:/tmp/code-assembly-0.1.0.jar"
]&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;For all the above &lt;B&gt;spark_submit_task parameters , &lt;/B&gt; we are facing the same below specified error.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;Exception in thread "main" com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'db'
	at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:147)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
	at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
	at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:206)
	at Bootstrap.main(Test.scala:16)
	at Bootstrap.main(Test.scala)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
	at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845)
	at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
	at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
	at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
	at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920)
	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929)&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Please can you quickly help on this as we need this implementation &lt;/P&gt;</description>
      <pubDate>Tue, 05 Oct 2021 14:58:32 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14364#M8871</guid>
      <dc:creator>Praveen</dc:creator>
      <dc:date>2021-10-05T14:58:32Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14365#M8872</link>
      <description>&lt;P&gt;whats  the code inside your main?&lt;/P&gt;</description>
      <pubDate>Tue, 15 Mar 2022 00:38:37 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14365#M8872</guid>
      <dc:creator>jose_gonzalez</dc:creator>
      <dc:date>2022-03-15T00:38:37Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14366#M8873</link>
      <description>&lt;P&gt;Hi @Praveen Kumar Bachu​&amp;nbsp; Could you please try the below approach? Let me if this works for you.&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;import java.io.File
import org.apache.spark.SparkFiles
&amp;nbsp;
//use parseFile instead of load
val config: Config = ConfigFactory.parseFile( new File(SparkFiles.get("application.conf")))&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt; note: you will need to pass the file using  --files &lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;"--files",
      "dbfs:/tmp/application.conf",&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 15 Mar 2022 08:21:05 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14366#M8873</guid>
      <dc:creator>User16763506477</dc:creator>
      <dc:date>2022-03-15T08:21:05Z</dc:date>
    </item>
    <item>
      <title>Re: Pass Typesafe config file to the Spark Submit Job</title>
      <link>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14367#M8874</link>
      <description>&lt;P&gt;I've experenced similar issues; please help to answer how to get this working;&lt;/P&gt;&lt;P&gt;I've tried using below to be either /dbfs/mnt/blah path or dbfs:/mnt/blah path&lt;/P&gt;&lt;P&gt;in either &lt;B&gt;spark_submit_task&lt;/B&gt; or &lt;B&gt;spark_jar_task &lt;/B&gt;(via cluster spark_conf for java optinos); no success.&lt;/P&gt;&lt;P&gt;&lt;B&gt;spark.driver.extraJavaOptions&lt;/B&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;NOTE: TESTING VIA NOTEBOOK using the extraJavaOptions had no problems. (but we did notice, in the notebook, &lt;/P&gt;&lt;P&gt;below command would not succeed unless we try to ls the parent folders 1 by 1 first.&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;ls /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf
cat /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;see below snippet; &lt;/P&gt;&lt;P&gt; spark_submit_task= {&lt;/P&gt;&lt;P&gt; &lt;B&gt;"parameters"&lt;/B&gt;: [&lt;/P&gt;&lt;P&gt; &lt;B&gt;"--class"&lt;/B&gt;, &lt;B&gt;"com.source2sea.glue.GlueMain"&lt;/B&gt;,&lt;/P&gt;&lt;P&gt; &lt;B&gt;"--conf"&lt;/B&gt;, &lt;B&gt;f"spark.driver.extraJavaOptions={&lt;/B&gt;java_option_d_config_file&lt;B&gt;}"&lt;/B&gt;,&lt;/P&gt;&lt;P&gt; &lt;B&gt;"--files"&lt;/B&gt;, conf_path,&lt;/P&gt;&lt;P&gt; jar_full_path, MY-PARAMETERS&lt;/P&gt;&lt;P&gt;&lt;I&gt; &lt;/I&gt;]&lt;/P&gt;&lt;P&gt;}&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;in my scala code i have code like this (use pureConfig, which is a wrapper of typeSafeConfig, ensured this is done: &lt;A href="https://pureconfig.github.io/docs/faq.html#how-can-i-use-pureconfig-with-spark-210-problematic-shapeless-dependency" alt="https://pureconfig.github.io/docs/faq.html#how-can-i-use-pureconfig-with-spark-210-problematic-shapeless-dependency" target="_blank"&gt;https://pureconfig.github.io/docs/faq.html#how-can-i-use-pureconfig-with-spark-210-problematic-shapeless-dependency&lt;/A&gt;), &lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;val source = defaultOverrides.withFallback(defaultApplication).withFallback(defaultReference)
&amp;nbsp;
def read(source: ConfigObjectSource): Either[Throwable, AppConfig] = {
&amp;nbsp;
  implicit def hint[A] = ProductHint[A](ConfigFieldMapping(CamelCase, CamelCase))
&amp;nbsp;
  logger.debug(s"Loading configuration ${source.config()}")
&amp;nbsp;&amp;nbsp;
  val original: Either[ConfigReaderFailures, AppConfig] = source.load[AppConfig]
&amp;nbsp;
  logger.info(s"Loaded and casted configuration ${original}")
&amp;nbsp;
  original.leftMap[Throwable](ConfigReaderException.apply)
&amp;nbsp;
}&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;error log&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;23/04/25 13:45:49 INFO AppConfig$: Loaded and casted configuration Left(ConfigReaderFailures(ThrowableFailure(shaded.com.typesafe.config.ConfigException$IO: dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf: java.io.FileNotFoundException: dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf (No such file or directory),Some(ConfigOrigin(dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf)))))
23/04/25 13:45:49 ERROR GlueMain$: Glue failure
pureconfig.error.ConfigReaderException: Cannot convert configuration to a scala.runtime.Nothing$. Failures are:
  - (dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf) dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf: java.io.FileNotFoundException: dbfs:/mnt/glue-artifacts/conf-staging-env/application.conf (No such file or directory).
&amp;nbsp;
&amp;nbsp;
or
&amp;nbsp;
&amp;nbsp;
23/04/25 12:46:10 INFO AppConfig$: Loaded and casted configuration Left(ConfigReaderFailures(ThrowableFailure(shaded.com.typesafe.config.ConfigException$IO: /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf: java.io.FileNotFoundException: /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf (No such file or directory),Some(ConfigOrigin(/dbfs/mnt/glue-artifacts/conf-staging-env/application.conf)))))
23/04/25 12:46:10 ERROR GlueMain$: Glue failure
pureconfig.error.ConfigReaderException: Cannot convert configuration to a scala.runtime.Nothing$. Failures are:
  - (/dbfs/mnt/glue-artifacts/conf-staging-env/application.conf) /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf: java.io.FileNotFoundException: /dbfs/mnt/glue-artifacts/conf-staging-env/application.conf (No such file or directory).
&amp;nbsp;
	at com.source2sea.glue.config.AppConfig$.$anonfun$read$2(AppConfig.scala:31)&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 25 Apr 2023 13:54:24 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/pass-typesafe-config-file-to-the-spark-submit-job/m-p/14367#M8874</guid>
      <dc:creator>source2sea</dc:creator>
      <dc:date>2023-04-25T13:54:24Z</dc:date>
    </item>
  </channel>
</rss>

