Have you tried something like this already?
Force Cache Invalidation (Recommended)
-- After CREATE OR REPLACE VIEW, execute:
ALTER SHARE <share_name> REMOVE TABLE <schema>.<view_name>;
ALTER SHARE <share_name> ADD TABLE <schema>.<view_name>;
This forces DSFF to drop materialized views and recreate them on next access.
You could also automate this with SDK.
# Use Databricks SDK to trigger cache refresh
from databricks.sdk import WorkspaceClient
def invalidate_dsff_cache(share_name, view_name):
w = WorkspaceClient()
# Remove and re-add to share
w.shares.update(
name=share_name,
updates=[
RemoveTable(name=f"{schema}.{view_name}"),
AddTable(name=f"{schema}.{view_name}")
]
)