<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Feature store feature table location in Machine Learning</title>
    <link>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4966#M230</link>
    <description>&lt;P&gt;Can Databricks feature tables be stored outside of DBFS?&lt;/P&gt;</description>
    <pubDate>Mon, 01 May 2023 20:27:02 GMT</pubDate>
    <dc:creator>Direo</dc:creator>
    <dc:date>2023-05-01T20:27:02Z</dc:date>
    <item>
      <title>Feature store feature table location</title>
      <link>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4966#M230</link>
      <description>&lt;P&gt;Can Databricks feature tables be stored outside of DBFS?&lt;/P&gt;</description>
      <pubDate>Mon, 01 May 2023 20:27:02 GMT</pubDate>
      <guid>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4966#M230</guid>
      <dc:creator>Direo</dc:creator>
      <dc:date>2023-05-01T20:27:02Z</dc:date>
    </item>
    <item>
      <title>Re: Feature store feature table location</title>
      <link>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4967#M231</link>
      <description>&lt;P&gt;@Direo Direo​&amp;nbsp;Feature tables are delta tables it can shared among different workspaces . But you are asking outside of dbfs , what exactly is requirement and where you want to store &lt;/P&gt;</description>
      <pubDate>Thu, 04 May 2023 04:09:30 GMT</pubDate>
      <guid>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4967#M231</guid>
      <dc:creator>Priyag1</dc:creator>
      <dc:date>2023-05-04T04:09:30Z</dc:date>
    </item>
    <item>
      <title>Re: Feature store feature table location</title>
      <link>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4968#M232</link>
      <description>&lt;P&gt;Hi @Direo Direo​&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;Does @Priyadarshini G​&amp;nbsp; answer help? If it does, would you be happy to mark it as best? If it doesn't, please tell us so we can help you.&lt;/P&gt;&lt;P&gt;&lt;/P&gt;&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 22 Jun 2023 07:34:16 GMT</pubDate>
      <guid>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/4968#M232</guid>
      <dc:creator>Anonymous</dc:creator>
      <dc:date>2023-06-22T07:34:16Z</dc:date>
    </item>
    <item>
      <title>Re: Feature store feature table location</title>
      <link>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/35442#M1841</link>
      <description>&lt;P&gt;Yes, Databricks feature tables can be stored outside of Databricks File System (DBFS). You can store your feature tables in external storage systems such as Amazon S3, Azure Blob Storage, Azure Data Lake Storage, or Hadoop Distributed File System (HDFS).&lt;/P&gt;&lt;P&gt;To store your feature tables in external storage, you need to configure the storage system and provide the appropriate connection information when creating your Delta table. For example, when using Amazon S3, you would specify the S3 bucket path when creating the table.&lt;/P&gt;&lt;P&gt;Here's an example of how to create a Delta table in an Amazon S3 bucket using PySpark:&lt;/P&gt;&lt;P&gt;```python&lt;BR /&gt;from pyspark.sql import SparkSession&lt;/P&gt;&lt;P&gt;# Start a Spark session&lt;BR /&gt;spark = SparkSession.builder \&lt;BR /&gt;.appName("Databricks Feature Table on S3") \&lt;BR /&gt;.getOrCreate()&lt;/P&gt;&lt;P&gt;# Define a sample DataFrame&lt;BR /&gt;data = [("Alice", 34), ("Bob", 45), ("Cathy", 29)]&lt;BR /&gt;columns = ["Name", "Age"]&lt;BR /&gt;df = spark.createDataFrame(data, columns)&lt;/P&gt;&lt;P&gt;# Write the DataFrame to a Delta table in S3&lt;BR /&gt;delta_table_path = "s3a://your-bucket-name/your-delta-table-path/"&lt;BR /&gt;df.write.format("delta").mode("overwrite").save(delta_table_path)&lt;BR /&gt;```&lt;/P&gt;&lt;P&gt;Replace `your-bucket-name` and `your-delta-table-path` with the appropriate values for your Amazon S3 bucket and desired path. Note that you need to configure your S3 authentication and ensure that you have the necessary permissions to read and write to the specified bucket.&lt;/P&gt;</description>
      <pubDate>Wed, 28 Jun 2023 00:19:43 GMT</pubDate>
      <guid>https://community.databricks.com/t5/machine-learning/feature-store-feature-table-location/m-p/35442#M1841</guid>
      <dc:creator>the-sab</dc:creator>
      <dc:date>2023-06-28T00:19:43Z</dc:date>
    </item>
  </channel>
</rss>

