Can my code be wrapped and served in databricks model serving?

32 Views Asked by At
import mlflow.pyfunc
import mlflow
from langchain.agents import create_sql_agent
from langchain.agents.agent_toolkits import SQLDatabaseToolkit
from langchain.sql_database import SQLDatabase
from langchain import OpenAI

llm = OpenAI(temperature=0)

class UCBot():

    def __init__(self, llm):
        self.llm = llm
        self.toolkit = SQLDatabaseToolkit(db=SQLDatabase.from_databricks(catalog="samples", schema="nyctaxi"), llm=llm)
        self.agent = create_sql_agent(llm=self.llm, toolkit=self.toolkit, verbose=True, top_k=1)

    def get_answer(self, question):
        return self.agent.run(question)
    
class MLflowUCBot(mlflow.pyfunc.PythonModel):
    def __init__(self, llm):
        self.llm = llm

    def predict(self, context, input):
        ucbot = UCBot(self.llm)
        return ucbot.get_answer(input)


# Persist model to mlflow
with mlflow.start_run():
    mlflow.pyfunc.log_model(
        python_model=MLflowUCBot(llm),
        extra_pip_requirements=['langchain', 'databricks-sql-connector', 'sqlalchemy', 'openai'],
        artifact_path='model',
        registered_model_name="my_model",
        input_example={"input":"how many tables?"}
    )
  • The code is able to create a model and predict
  • When I try to create a model serve I get this error: An error occurred while loading the model. No module named 'openai'
  • after adding openai in dependency I get the following error An error occurred while loading the model. No module named 'openai.api_resources'
1

There are 1 best solutions below

0
John Francis On BEST ANSWER

Installed openai==0.27.8 and it is working now

also added this code

conda_env = mlflow.pyfunc.get_default_conda_env()

# define packages required by model
packages = ['langchain', 'langchain_community', 'databricks-sql-connector', 'sqlalchemy', 'openai==0.27.8']

# add required packages to environment configuration
conda_env['dependencies'][-1]['pip'] += packages

# Persist model to mlflow
with mlflow.start_run():
    mlflow.pyfunc.log_model(
        python_model=MLflowUCBot(llm),
        artifact_path='model',
        conda_env=conda_env,
        registered_model_name="my_model",
        signature=signature
    )