<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic &amp;quot;Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key&amp;quot; using com.databricks:spark-xml_2.12:0.12.0 in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/quot-failure-to-initialize-configurationinvalid-configuration/m-p/10790#M5875</link>
    <description>&lt;P&gt;Hi community,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I'm trying to read XML data from Azure Datalake Gen 2 using com.databricks:spark-xml_2.12:0.12.0:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;spark.read.format('XML').load('abfss://[CONTAINER]@[storageaccount].dfs.core.windows.net/PATH/TO/FILE.xml')&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;The code above gives the following exception:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key
&amp;nbsp;
Py4JJavaError                             Traceback (most recent call last)
&amp;lt;command-568646403925120&amp;gt; in &amp;lt;module&amp;gt;
----&amp;gt; 1 spark.read.format('XML').load('abfss://[container]@[storageaccount].dfs.core.windows.net/[PATH]/[FILE].xml')
&amp;nbsp;
/databricks/spark/python/pyspark/sql/readwriter.py in load(self, path, format, schema, **options)
    156         self.options(**options)
    157         if isinstance(path, str):
--&amp;gt; 158             return self._df(self._jreader.load(path))
    159         elif path is not None:
    160             if type(path) != list:
&amp;nbsp;
/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1302 
   1303         answer = self.gateway_client.send_command(command)
-&amp;gt; 1304         return_value = get_return_value(
   1305             answer, self.gateway_client, self.target_id, self.name)
   1306 
&amp;nbsp;
/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
    115     def deco(*a, **kw):
    116         try:
--&amp;gt; 117             return f(*a, **kw)
    118         except py4j.protocol.Py4JJavaError as e:
    119             converted = convert_exception(e.java_exception)
&amp;nbsp;
/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
    324             value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
    325             if answer[1] == REFERENCE_TYPE:
--&amp;gt; 326                 raise Py4JJavaError(
    327                     "An error occurred while calling {0}{1}{2}.\n".
    328                     format(target_id, ".", name), value)
&amp;nbsp;
Py4JJavaError: An error occurred while calling o474.load.
: Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.getStorageAccountKey(SimpleKeyProvider.java:51)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AbfsConfiguration.getStorageAccountKey(AbfsConfiguration.java:577)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.initializeClient(AzureBlobFileSystemStore.java:1832)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.&amp;lt;init&amp;gt;(AzureBlobFileSystemStore.java:224)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.initialize(AzureBlobFileSystem.java:142)
	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3469)
	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:537)
	at org.apache.hadoop.fs.Path.getFileSystem(Path.java:365)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:530)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:499)
	at org.apache.spark.SparkContext.$anonfun$newAPIHadoopFile$2(SparkContext.scala:1533)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:125)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.SparkContext.withScope(SparkContext.scala:1066)
	at org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:1520)
	at com.databricks.spark.xml.util.XmlFile$.withCharset(XmlFile.scala:46)
	at com.databricks.spark.xml.DefaultSource.$anonfun$createRelation$1(DefaultSource.scala:71)
	at com.databricks.spark.xml.XmlRelation.$anonfun$schema$1(XmlRelation.scala:43)
	at scala.Option.getOrElse(Option.scala:189)
	at com.databricks.spark.xml.XmlRelation.&amp;lt;init&amp;gt;(XmlRelation.scala:42)
	at com.databricks.spark.xml.XmlRelation$.apply(XmlRelation.scala:29)
	at com.databricks.spark.xml.DefaultSource.createRelation(DefaultSource.scala:74)
	at com.databricks.spark.xml.DefaultSource.createRelation(DefaultSource.scala:52)
	at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:385)
	at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:356)
	at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:323)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:323)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:236)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
	at py4j.Gateway.invoke(Gateway.java:295)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:251)
	at java.lang.Thread.run(Thread.java:750)
Caused by: Invalid configuration value detected for fs.azure.account.key
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.diagnostics.ConfigurationBasicValidator.validate(ConfigurationBasicValidator.java:49)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.diagnostics.Base64StringConfigurationBasicValidator.validate(Base64StringConfigurationBasicValidator.java:40)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.validateStorageAccountKey(SimpleKeyProvider.java:70)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.getStorageAccountKey(SimpleKeyProvider.java:49)
	... 40 more&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;I do this on a non-UC enabled cluster (no isolation shared), Databricks version 10.4LTS.&lt;/P&gt;&lt;P&gt;In order to connect to ADLS, we've set the following Spark config (&lt;A href="https://learn.microsoft.com/en-us/azure/databricks/external-data/azure-storage#--access-azure-data-lake-storage-gen2-or-blob-storage-using-oauth-20-with-an-azure-service-principal" alt="https://learn.microsoft.com/en-us/azure/databricks/external-data/azure-storage#--access-azure-data-lake-storage-gen2-or-blob-storage-using-oauth-20-with-an-azure-service-principal" target="_blank"&gt;&lt;U&gt;following the docs&lt;/U&gt;&lt;/A&gt;&lt;span class="lia-unicode-emoji" title=":disappointed_face:"&gt;😞&lt;/span&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;spark.databricks.cluster.profile singleNode
spark.master local[*, 4]
fs.azure.account.oauth2.client.id.nubulosdpdlsdev01.dfs.core.windows.net [SPN-ID-HERE]
fs.azure.account.auth.type.nubulosdpdlsdev01.dfs.core.windows.net OAuth
fs.azure.account.oauth2.client.secret.nubulosdpdlsdev01.dfs.core.windows.net [SPN-SECRET-HERE]
fs.azure.account.oauth.provider.type.nubulosdpdlsdev01.dfs.core.windows.net org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
spark.databricks.delta.preview.enabled true
fs.azure.account.oauth2.client.endpoint.nubulosdpdlsdev01.dfs.core.windows.net https://login.microsoftonline.com/[TENANT-ID-HERE]/oauth2/token&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;We've chosen to use a service-principal for authentication, so we &lt;B&gt;explicitly&lt;/B&gt; &lt;B&gt;do not want to use the account key. Reading file using &lt;I&gt;delta, text or csv&lt;/I&gt; works. &lt;/B&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;My issue seems to be related to &lt;A href="https://community.databricks.com/s/question/0D58Y00009GZIsgSAH/invalid-configuration-value-detected-for-fsazureaccountkey-with-comcrealyticssparkexcel" alt="https://community.databricks.com/s/question/0D58Y00009GZIsgSAH/invalid-configuration-value-detected-for-fsazureaccountkey-with-comcrealyticssparkexcel" target="_blank"&gt;&lt;U&gt;this issue&lt;/U&gt;&lt;/A&gt;&lt;U&gt;.&lt;/U&gt;&lt;/P&gt;</description>
    <pubDate>Tue, 24 Jan 2023 09:12:04 GMT</pubDate>
    <dc:creator>powerus</dc:creator>
    <dc:date>2023-01-24T09:12:04Z</dc:date>
    <item>
      <title>"Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key" using com.databricks:spark-xml_2.12:0.12.0</title>
      <link>https://community.databricks.com/t5/data-engineering/quot-failure-to-initialize-configurationinvalid-configuration/m-p/10790#M5875</link>
      <description>&lt;P&gt;Hi community,&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;I'm trying to read XML data from Azure Datalake Gen 2 using com.databricks:spark-xml_2.12:0.12.0:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;spark.read.format('XML').load('abfss://[CONTAINER]@[storageaccount].dfs.core.windows.net/PATH/TO/FILE.xml')&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;The code above gives the following exception:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key
&amp;nbsp;
Py4JJavaError                             Traceback (most recent call last)
&amp;lt;command-568646403925120&amp;gt; in &amp;lt;module&amp;gt;
----&amp;gt; 1 spark.read.format('XML').load('abfss://[container]@[storageaccount].dfs.core.windows.net/[PATH]/[FILE].xml')
&amp;nbsp;
/databricks/spark/python/pyspark/sql/readwriter.py in load(self, path, format, schema, **options)
    156         self.options(**options)
    157         if isinstance(path, str):
--&amp;gt; 158             return self._df(self._jreader.load(path))
    159         elif path is not None:
    160             if type(path) != list:
&amp;nbsp;
/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/java_gateway.py in __call__(self, *args)
   1302 
   1303         answer = self.gateway_client.send_command(command)
-&amp;gt; 1304         return_value = get_return_value(
   1305             answer, self.gateway_client, self.target_id, self.name)
   1306 
&amp;nbsp;
/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
    115     def deco(*a, **kw):
    116         try:
--&amp;gt; 117             return f(*a, **kw)
    118         except py4j.protocol.Py4JJavaError as e:
    119             converted = convert_exception(e.java_exception)
&amp;nbsp;
/databricks/spark/python/lib/py4j-0.10.9.1-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
    324             value = OUTPUT_CONVERTER[type](answer[2:], gateway_client)
    325             if answer[1] == REFERENCE_TYPE:
--&amp;gt; 326                 raise Py4JJavaError(
    327                     "An error occurred while calling {0}{1}{2}.\n".
    328                     format(target_id, ".", name), value)
&amp;nbsp;
Py4JJavaError: An error occurred while calling o474.load.
: Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.getStorageAccountKey(SimpleKeyProvider.java:51)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AbfsConfiguration.getStorageAccountKey(AbfsConfiguration.java:577)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.initializeClient(AzureBlobFileSystemStore.java:1832)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore.&amp;lt;init&amp;gt;(AzureBlobFileSystemStore.java:224)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem.initialize(AzureBlobFileSystem.java:142)
	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3469)
	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:537)
	at org.apache.hadoop.fs.Path.getFileSystem(Path.java:365)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:530)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(FileInputFormat.java:499)
	at org.apache.spark.SparkContext.$anonfun$newAPIHadoopFile$2(SparkContext.scala:1533)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:165)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:125)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.SparkContext.withScope(SparkContext.scala:1066)
	at org.apache.spark.SparkContext.newAPIHadoopFile(SparkContext.scala:1520)
	at com.databricks.spark.xml.util.XmlFile$.withCharset(XmlFile.scala:46)
	at com.databricks.spark.xml.DefaultSource.$anonfun$createRelation$1(DefaultSource.scala:71)
	at com.databricks.spark.xml.XmlRelation.$anonfun$schema$1(XmlRelation.scala:43)
	at scala.Option.getOrElse(Option.scala:189)
	at com.databricks.spark.xml.XmlRelation.&amp;lt;init&amp;gt;(XmlRelation.scala:42)
	at com.databricks.spark.xml.XmlRelation$.apply(XmlRelation.scala:29)
	at com.databricks.spark.xml.DefaultSource.createRelation(DefaultSource.scala:74)
	at com.databricks.spark.xml.DefaultSource.createRelation(DefaultSource.scala:52)
	at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:385)
	at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:356)
	at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:323)
	at scala.Option.getOrElse(Option.scala:189)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:323)
	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:236)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
	at py4j.Gateway.invoke(Gateway.java:295)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:251)
	at java.lang.Thread.run(Thread.java:750)
Caused by: Invalid configuration value detected for fs.azure.account.key
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.diagnostics.ConfigurationBasicValidator.validate(ConfigurationBasicValidator.java:49)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.diagnostics.Base64StringConfigurationBasicValidator.validate(Base64StringConfigurationBasicValidator.java:40)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.validateStorageAccountKey(SimpleKeyProvider.java:70)
	at shaded.databricks.azurebfs.org.apache.hadoop.fs.azurebfs.services.SimpleKeyProvider.getStorageAccountKey(SimpleKeyProvider.java:49)
	... 40 more&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;I do this on a non-UC enabled cluster (no isolation shared), Databricks version 10.4LTS.&lt;/P&gt;&lt;P&gt;In order to connect to ADLS, we've set the following Spark config (&lt;A href="https://learn.microsoft.com/en-us/azure/databricks/external-data/azure-storage#--access-azure-data-lake-storage-gen2-or-blob-storage-using-oauth-20-with-an-azure-service-principal" alt="https://learn.microsoft.com/en-us/azure/databricks/external-data/azure-storage#--access-azure-data-lake-storage-gen2-or-blob-storage-using-oauth-20-with-an-azure-service-principal" target="_blank"&gt;&lt;U&gt;following the docs&lt;/U&gt;&lt;/A&gt;&lt;span class="lia-unicode-emoji" title=":disappointed_face:"&gt;😞&lt;/span&gt;&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;spark.databricks.cluster.profile singleNode
spark.master local[*, 4]
fs.azure.account.oauth2.client.id.nubulosdpdlsdev01.dfs.core.windows.net [SPN-ID-HERE]
fs.azure.account.auth.type.nubulosdpdlsdev01.dfs.core.windows.net OAuth
fs.azure.account.oauth2.client.secret.nubulosdpdlsdev01.dfs.core.windows.net [SPN-SECRET-HERE]
fs.azure.account.oauth.provider.type.nubulosdpdlsdev01.dfs.core.windows.net org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
spark.databricks.delta.preview.enabled true
fs.azure.account.oauth2.client.endpoint.nubulosdpdlsdev01.dfs.core.windows.net https://login.microsoftonline.com/[TENANT-ID-HERE]/oauth2/token&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;We've chosen to use a service-principal for authentication, so we &lt;B&gt;explicitly&lt;/B&gt; &lt;B&gt;do not want to use the account key. Reading file using &lt;I&gt;delta, text or csv&lt;/I&gt; works. &lt;/B&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;My issue seems to be related to &lt;A href="https://community.databricks.com/s/question/0D58Y00009GZIsgSAH/invalid-configuration-value-detected-for-fsazureaccountkey-with-comcrealyticssparkexcel" alt="https://community.databricks.com/s/question/0D58Y00009GZIsgSAH/invalid-configuration-value-detected-for-fsazureaccountkey-with-comcrealyticssparkexcel" target="_blank"&gt;&lt;U&gt;this issue&lt;/U&gt;&lt;/A&gt;&lt;U&gt;.&lt;/U&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 24 Jan 2023 09:12:04 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/quot-failure-to-initialize-configurationinvalid-configuration/m-p/10790#M5875</guid>
      <dc:creator>powerus</dc:creator>
      <dc:date>2023-01-24T09:12:04Z</dc:date>
    </item>
    <item>
      <title>Re: "Failure to initialize configurationInvalid configuration value detected for fs.azure.account.key" using com.databricks:spark-xml_2.12:0.12.0</title>
      <link>https://community.databricks.com/t5/data-engineering/quot-failure-to-initialize-configurationinvalid-configuration/m-p/10791#M5876</link>
      <description>&lt;P&gt;The issue was also raised here: &lt;A href="https://github.com/databricks/spark-xml/issues/591" alt="https://github.com/databricks/spark-xml/issues/591" target="_blank"&gt;https://github.com/databricks/spark-xml/issues/591&lt;/A&gt;&lt;/P&gt;&lt;P&gt;A fix is to use the "spark.hadoop" prefix in front of the fs.azure spark config keys:&lt;/P&gt;&lt;PRE&gt;&lt;CODE&gt;spark.hadoop.fs.azure.account.oauth2.client.id.nubulosdpdlsdev01.dfs.core.windows.net [SPN-ID-HERE]
spark.hadoop.fs.azure.account.auth.type.nubulosdpdlsdev01.dfs.core.windows.net OAuth
spark.hadoop.fs.azure.account.oauth2.client.secret.nubulosdpdlsdev01.dfs.core.windows.net [SPN-SECRET-HERE]
spark.hadoop.fs.azure.account.oauth.provider.type.nubulosdpdlsdev01.dfs.core.windows.net org.apache.hadoop.fs.azurebfs.oauth2.ClientCredsTokenProvider
spark.databricks.delta.preview.enabled true
spark.hadoop.fs.azure.account.oauth2.client.endpoint.nubulosdpdlsdev01.dfs.core.windows.net https://login.microsoftonline.com/[TENANT-ID-HERE]/oauth2/token&lt;/CODE&gt;&lt;/PRE&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 24 Jan 2023 12:43:25 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/quot-failure-to-initialize-configurationinvalid-configuration/m-p/10791#M5876</guid>
      <dc:creator>powerus</dc:creator>
      <dc:date>2023-01-24T12:43:25Z</dc:date>
    </item>
  </channel>
</rss>

