-
Notifications
You must be signed in to change notification settings - Fork 81
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #161 from lastmile-ai/pr161
[AIConfig Cookbook] Change prompt routing folder name to single word
- Loading branch information
Showing
5 changed files
with
250 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
# Basic Prompting Routing Demo - AI Teaching Assistant | ||
|
||
This demo shows how a simple use case of prompt routing with AI config. | ||
|
||
### How does it work? | ||
|
||
The user asks a question. The LLM decides the topic as math, physics, or general. Based on the topic, the LLM selects a different "assistant" to respond. These assistants have different system prompts and respond with varying introductions and style of response. | ||
|
||
### Setup with AIConfig | ||
|
||
1. Create an AIConfig for the prompts, models, and model parameters to be used for the different assistants: `create_config.py`. | ||
2. Build assistant app to handle prompt routing logic among the prompts (uses AIConfig): `assistant_app.py`. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
{ | ||
"name": "assistant_config", | ||
"schema_version": "latest", | ||
"metadata": { | ||
"parameters": {}, | ||
"models": { | ||
"gpt-4": { | ||
"top_k": 40, | ||
"top_p": 1, | ||
"model": "gpt-4", | ||
"temperature": 0.0 | ||
} | ||
} | ||
}, | ||
"description": "teaching assistant config", | ||
"prompts": [ | ||
{ | ||
"name": "router", | ||
"input": "{{student_question}}", | ||
"metadata": { | ||
"model": { | ||
"name": "gpt-4", | ||
"settings": { | ||
"system_prompt": "\n You will be given a question. Classify the question as one of the following topics: \n 1. Math\n 2. Physics\n 3. General\n Output the topic name.\n " | ||
} | ||
}, | ||
"parameters": {} | ||
} | ||
}, | ||
{ | ||
"name": "math", | ||
"input": "\n Student Question: {{router.input}}\n Topic: {{router.output}}\n ", | ||
"metadata": { | ||
"model": { | ||
"name": "gpt-4", | ||
"settings": { | ||
"system_prompt": "\n You are a very good mathematician. You are great at answering math questions. \n You are so good because you are able to break down hard problems into their component parts, \n answer the component parts, and then put them together to answer the broader question.\n \n Output: If topic is Math, introduce yourself as 'Hi! I'm your Math Professor' and then answer the question. \n If the topic is not Math, output 'Sorry I only answer Math questions'.\n " | ||
} | ||
}, | ||
"parameters": {} | ||
} | ||
}, | ||
{ | ||
"name": "physics", | ||
"input": "\n Student Question: {{router.input}}\n Topic: {{router.output}}\n ", | ||
"metadata": { | ||
"model": { | ||
"name": "gpt-4", | ||
"settings": { | ||
"system_prompt": "\n You are a very smart physics professor. You are great at answering questions about physics in a concise and easy\n to understand manner. When you don't know the answer to a question you admit that you don't know.\n \n Output: If topic is Physics, introduce yourself as 'Hi! I'm your Physics Professor' and then answer the question. \n If the topic is not Physics, output 'Sorry I only answer Physics questions'.\n " | ||
} | ||
}, | ||
"parameters": {} | ||
} | ||
}, | ||
{ | ||
"name": "general", | ||
"input": "\n Student Question: {{router.input}}\n Topic: {{router.output}}\n ", | ||
"metadata": { | ||
"model": { | ||
"name": "gpt-4", | ||
"settings": { | ||
"system_prompt": "\n You are a helpful assistant. Answer the question as accurately as you can. \n \n Introduce yourself as \"Hi I'm your general assistant\". Then answer the question. \n " | ||
} | ||
}, | ||
"parameters": {} | ||
} | ||
} | ||
] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
from aiconfig import AIConfigRuntime | ||
from dotenv import load_dotenv | ||
import streamlit as st | ||
import asyncio | ||
import os | ||
import openai | ||
|
||
load_dotenv() | ||
openai.api_key = os.getenv("OPENAI_API_KEY") | ||
|
||
# Get assistant response based on user prompt (prompt routing) | ||
async def assistant_response(prompt): | ||
config = AIConfigRuntime.load("assistant_aiconfig.json") | ||
|
||
params = {"student_question": prompt} | ||
|
||
router_prompt_completion = await config.run("router", params) | ||
topic = config.get_output_text("router") | ||
|
||
dest_prompt = topic.lower() | ||
|
||
prompt_completion = await config.run(dest_prompt, params) | ||
response = config.get_output_text(dest_prompt) | ||
|
||
return(response) | ||
|
||
# Streamlit Setup | ||
st.title("AI Teaching Assistant") | ||
st.markdown("Ask a math, physics, or general question. Based on your question, an AI math prof, physics prof, or general assistant will respond.") | ||
st.markdown("**This is a simple demo of prompt routing - based on your question, an LLM decides which AI teacher responds.**") | ||
|
||
# Chat setup | ||
if "messages" not in st.session_state: | ||
st.session_state.messages = [] | ||
|
||
for message in st.session_state.messages: | ||
with st.chat_message(message["role"]): | ||
st.markdown(message["content"]) | ||
|
||
if prompt := st.chat_input("Ask a math, physics, or general question"): | ||
st.chat_message("user").markdown(prompt) | ||
st.session_state.messages.append({"role": "user", "content": prompt}) | ||
|
||
chat_response = asyncio.run(assistant_response(prompt)) | ||
|
||
response = f"AI: {chat_response}" | ||
|
||
with st.chat_message("assistant"): | ||
st.markdown(response) | ||
|
||
st.session_state.messages.append({"role": "assistant", "content": response}) | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,111 @@ | ||
from aiconfig import AIConfigRuntime | ||
from aiconfig import Prompt | ||
|
||
aiconfig = AIConfigRuntime.create("assistant_config", "teaching assistant config") | ||
|
||
# Set GPT-4 as default model from Teaching Assistant prompts | ||
model_name = "gpt-4" | ||
model_settings = { | ||
"top_k": 40, | ||
"top_p":1, | ||
"model": "gpt-4", | ||
"temperature": 0.0 | ||
} | ||
aiconfig.add_model(model_name, model_settings) | ||
|
||
|
||
# Router Prompt | ||
router_prompt = Prompt( | ||
name="router", | ||
input="{{student_question}}", | ||
metadata= { | ||
"model":{ | ||
"name": "gpt-4", | ||
"settings":{ | ||
"system_prompt":""" | ||
You will be given a question. Classify the question as one of the following topics: | ||
1. Math | ||
2. Physics | ||
3. General | ||
Output the topic name. | ||
""" | ||
} | ||
}, | ||
} | ||
) | ||
aiconfig.add_prompt("router_prompt", router_prompt) | ||
|
||
# Math Assistant Prompt | ||
math_assistant = Prompt( | ||
name="math", | ||
input=""" | ||
Student Question: {{router.input}} | ||
Topic: {{router.output}} | ||
""", | ||
metadata= { | ||
"model":{ | ||
"name": "gpt-4", | ||
"settings":{ | ||
"system_prompt":""" | ||
You are a very good mathematician. You are great at answering math questions. | ||
You are so good because you are able to break down hard problems into their component parts, | ||
answer the component parts, and then put them together to answer the broader question. | ||
Output: If topic is Math, introduce yourself as 'Hi! I'm your Math Professor' and then answer the question. | ||
If the topic is not Math, output 'Sorry I only answer Math questions'. | ||
""" | ||
} | ||
}, | ||
} | ||
) | ||
aiconfig.add_prompt("math", math_assistant) | ||
|
||
# Physics Assistant Prompt | ||
physics_assistant = Prompt( | ||
name="physics", | ||
input=""" | ||
Student Question: {{router.input}} | ||
Topic: {{router.output}} | ||
""", | ||
metadata= { | ||
"model":{ | ||
"name": "gpt-4", | ||
"settings":{ | ||
"system_prompt":""" | ||
You are a very smart physics professor. You are great at answering questions about physics in a concise and easy | ||
to understand manner. When you don't know the answer to a question you admit that you don't know. | ||
Output: If topic is Physics, introduce yourself as 'Hi! I'm your Physics Professor' and then answer the question. | ||
If the topic is not Physics, output 'Sorry I only answer Physics questions'. | ||
""" | ||
} | ||
}, | ||
} | ||
) | ||
aiconfig.add_prompt("physics", physics_assistant) | ||
|
||
|
||
# General Assistant Prompt | ||
general_assistant = Prompt( | ||
name="general", | ||
input=""" | ||
Student Question: {{router.input}} | ||
Topic: {{router.output}} | ||
""", | ||
metadata= { | ||
"model":{ | ||
"name": "gpt-4", | ||
"settings":{ | ||
"system_prompt":""" | ||
You are a helpful assistant. Answer the question as accurately as you can. | ||
Introduce yourself as "Hi I'm your general assistant". Then answer the question. | ||
""" | ||
} | ||
}, | ||
} | ||
) | ||
aiconfig.add_prompt("general", general_assistant) | ||
|
||
# Save AIConfig | ||
aiconfig.save('assistant_aiconfig.json', include_outputs=False) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
streamlit | ||
openai | ||
python-aiconfig | ||
asyncio |