from mlflow.models import infer_signature
import mlflow
import langchain
import mlflow.langchain
import os
# ---------------------------------------------------------------------
# Environment + registry setup
# ---------------------------------------------------------------------
os.environ["MLFLOW_HOME"] = "/databricks/mlflow"
mlflow.set_registry_uri("databricks-uc")
model_name = "*************"
# ---------------------------------------------------------------------
# Start MLflow run
# ---------------------------------------------------------------------
with mlflow.start_run(run_name="rfi_chatbot") as run:
question = "What was xyz's total revenue in 2024 Q3?"
result = rfi_chain({"query": question}) # your chain must return dict {"result": ...}
# Create MLflow model signature
signature = infer_signature(
{"query": question},
{"result": result["result"]}
)
# -----------------------------------------------------------------
# Log the LangChain model to Unity Catalog
# -----------------------------------------------------------------
model_info = mlflow.langchain.log_model(
lc_model=rfi_chain,
loader_fn=get_retriever,
artifact_path="model",
registered_model_name=model_name,
signature=signature,
input_example={"query": question},
pip_requirements=[
"mlflow[databricks]==2.14.1",
"langchain==0.3.27",
"langchain-community==0.3.27",
"langchain-databricks==0.1.2",
"databricks-vectorsearch==0.40",
"databricks-sdk==0.58.0",
"pydantic>=2.0.0,<3.0.0",
"pyspark==3.5.0",
"grpcio==1.60.0"
]
)
print("Model successfully logged to Unity Catalog as:", model_name)
print("Run ID:", run.info.run_id)