@szymon_dybczak
I am experimenting to see if there is a way for me to create an external table from files written into unity catalog volume. I tried the following but it did not work.
# COMMAND ----------
# DBTITLE 1, Daily Fetch and Write
# sample daily fetch from API call and write to <catlog>.<schema>.<volume>.<folder>
data = [
{
'id':1,
'name':'John Doe',
'age':20
},
{
'id':2,
'name':'Jane Doe',
'age':30
}
]
df = spark.createDataFrame(data)
# write files to volume <Catalog>- workspace; <schema> db_bronze; <volume> volume; <folder> test_01 to store daily fetch from API
df.write\
.format('delta')\
.mode('append')\
.option('mergeSchema','true')\
.clusterBy('id')\
.save('/Volumes/workspace/db_bronze/volume/test_01')
# COMMAND ----------
# DBTITLE 2, Test Read from Volume
# Reading successful
location = 'dbfs:/Volumes/workspace/db_bronze/volume/test_01/'
df = spark.read.format('delta').load(location)
display(df)
# COMMAND ----------
# DBTITLE 3, Create External Table
%sql
/*not successful*/
CREATE TABLE IF NOT EXISTS workspace.db_bronze.people(id long,name string,age long)
using DELTA
location 'dbfs:/Volumes/workspace/db_bronze/volume/test_01/'
I am not sure what am I doing wrong here.