Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| from langchain.llms import AzureOpenAI | |
| from langchain.chat_models import AzureChatOpenAI | |
| from langchain.chains.sql_database.base import SQLDatabaseChain | |
| from langchain.agents import Tool, AgentType, initialize_agent | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain.prompts.prompt import PromptTemplate | |
| from load_dotenv import load_dotenv | |
| from sqlalchemy import MetaData, create_engine, inspect, select, text | |
| # The file we created above | |
| from loader import load_database | |
| # Load the .env file to access the keys | |
| load_dotenv() | |
| # Set up the completion and chat llm (optional, experiment with this!) | |
| llm = AzureOpenAI(deployment_name="your_deployment_name", | |
| model_name="text-davinci-003") | |
| chat_llm = AzureChatOpenAI(deployment_name="gpt-35-turbo",temperature=0.1) | |
| # Set up the chat llm | |
| os.environ["OPENAI_API_VERSION"]=os.getenv('OPENAI_CHAT_API_VERSION') | |
| # Create engine and Call the function to ingest the data | |
| engine = create_engine('sqlite:///db', echo=True) | |
| db = load_database(engine) | |
| # OR | |
| # if the database exists somewhere you could do something like; | |
| engine = create_engine("your custom URL, example - postgresql+psycopg2://scott:tiger@localhost:5432/mydatabase") | |
| db = load_database(engine) | |
| # Create an inspector object to inspect the database | |
| inspector = inspect(engine) | |
| # Get the list of table names | |
| table_names = inspector.get_table_names() | |
| # Create SQLDatabaseChain | |
| sql_chain = SQLDatabaseChain.from_llm(llm, db, | |
| verbose=True, use_query_checker=True) | |
| # Create SQLDatabaseChain | |
| one_sql_chain = SQLDatabaseChain.from_llm(llm, car_db, | |
| verbose=True, use_query_checker=True) | |
| two_sql_chain = SQLDatabaseChain.from_llm(llm, bike_db, | |
| verbose=True, use_query_checker=True) | |
| memory = ConversationBufferMemory(memory_key="chat_history", | |
| return_messages=True) | |
| tools = [one_sql_tool, two_sql_tool] | |
| conversational_agent = initialize_agent( | |
| agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, | |
| tools=tools, | |
| llm=llm, | |
| verbose=True, | |
| max_iterations=3, | |
| memory=memory, | |
| dialect='ansi', | |
| early_stopping_method="generate", | |
| handle_parsing_errors=True, | |
| ) | |
| # Define a simple query function that runs the query agent and returns the response | |
| def query_fnr(input_text): | |
| response = conversational_agent.run(input=input_text) | |
| return response | |
| # Build the UI | |
| iface = gr.Interface( | |
| fn=query_fn, | |
| inputs=gr.inputs.Textbox(label="Enter your query"), | |
| outputs=gr.outputs.Textbox(label="Query Result"), | |
| title="Domain-specific chatbot" | |
| ) | |
| # Launch the UI but do not share it publicly | |
| iface.launch(share=False, server_port=8080) |