week2 assignment: Return of the JedAI

Added tools for our wise jedAI master to list, add, and remove students
Added an instruction to jedi mind-trick anyone who asks about droids

Signed-off-by: Eli Waltuch <eliwaltuch@gmail.com>
This commit is contained in:
Eli Waltuch
2025-10-30 21:44:47 +02:00
parent be6b02e7b7
commit e1b7e2574d
3 changed files with 183 additions and 6 deletions

View File

@@ -0,0 +1,10 @@
{
"Luke Skywalker": "Guardian",
"Obi-Wan Kenobi": "Guardian",
"Ahsoka Tano": "Consular",
"Ki-Adi-Mundi": "Consular",
"Qui-Gon Jinn": "Consular",
"Rey": "Sentinel",
"Ezra Bridger": "Sentinel"
}

View File

@@ -5,6 +5,8 @@ from dotenv import load_dotenv
from openai import OpenAI
import gradio as gr
import tempfile
import json
import yoda_students
MODEL_ENDPOINTS = {
"gpt-4.1-mini": {"type": "openai", "base_url": "https://api.openai.com/v1", "api_key": ""},
@@ -13,6 +15,72 @@ MODEL_ENDPOINTS = {
"qwen3-vl:235b-cloud": {"type": "ollama", "base_url": "http://localhost:11434/v1", "api_key": ""}, # large ollama model that runs in the cloud
}
tool_list_students = {
"name": "list_students",
"description": "List all Jedi students with their current Jedi class.",
"parameters": {
"type": "object",
"properties": {},
"required": [],
"additionalProperties": False
}
}
tool_add_student = {
"name": "add_student",
"description": "Add a new Jedi student with their class.",
"parameters": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "The students full name."
},
"jedi_class": {
"type": "string",
"enum": ["Guardian", "Consular", "Sentinel"],
"description": "The Jedi class they are joining."
}
},
"required": ["name", "jedi_class"],
"additionalProperties": False
}
}
tool_remove_student = {
"name": "remove_student",
"description": "Remove a Jedi student because they have graduated or left.",
"parameters": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "The students full name to remove."
}
},
"required": ["name"],
"additionalProperties": False
}
}
tool_list_by_class = {
"name": "list_by_class",
"description": "Group Jedi students by their class and list them.",
"parameters": {
"type": "object",
"properties": {},
"required": [],
"additionalProperties": False
}
}
tools = [
{"type": "function", "function": tool_list_students},
{"type": "function", "function": tool_add_student},
{"type": "function", "function": tool_remove_student},
{"type": "function", "function": tool_list_by_class},
]
def load_api_keys():
# Load environment variables in a file called .env
load_dotenv(override=True)
@@ -37,7 +105,7 @@ def load_api_keys():
# add the verified keys to global MODEL_ENDPOINTS struct
for model, cfg in MODEL_ENDPOINTS.items():
cfg["api_key"] = KEYS.get(cfg["type"], "")
return "API keys found and look good so far!"
return f"API keys found and look good so far!"
def voiceover(message):
openai = OpenAI()
@@ -57,17 +125,61 @@ def ask_llm(user_prompt, history, model):
ancient and wise tone, inverted sentence structure.
Also, refer to the user as "My young Padawan"
End every answer with "May the force be with you, always."
You have access to tools to manage Jedi students.
If the user asks anything involving adding, removing,
or listing students, call the correct tool.
If the user asks you about Droids, respond with a Jedi Mind Trick
e.g. "These aren't the droids you are looking for."
"""
base_url = MODEL_ENDPOINTS.get(model, {}).get("base_url", "https://api.openai.com/v1")
api_key = MODEL_ENDPOINTS.get(model, {}).get("api_key", "")
client = OpenAI(base_url=base_url, api_key=api_key)
history = [{"role":h["role"], "content":h["content"]} for h in history]
messages = [{"role": "system", "content": system_prompt}] + history + [{"role": "user", "content": user_prompt}]
stream = client.chat.completions.create(model=model, messages=messages, stream=True)
response = ""
for chunk in stream:
response += chunk.choices[0].delta.content or ''
yield response, None
# First: ask the model if it wants to use a tool
decision = client.chat.completions.create(model=model, messages=messages, tools=tools)
action = decision.choices[0].message
if action.tool_calls:
for tool_call in action.tool_calls:
name = tool_call.function.name
args = json.loads(tool_call.function.arguments)
if name == "add_student":
result = yoda_students.add_student(**args)
elif name == "remove_student":
result = yoda_students.remove_student(**args)
elif name == "list_students":
result = yoda_students.list_students()
elif name == "list_by_class":
result = yoda_students.list_by_class()
else:
result = "Unknown tool error."
# Stream response with the tool call
followup = client.chat.completions.create(
model=model,
messages = messages + [
action,
{"role": "tool", "tool_call_id": tool_call.id, "content": result}
],
stream=True
)
response = ""
for chunk in followup:
delta = chunk.choices[0].delta.content or ""
response += delta
yield response, None
else:
# Stream regular response
stream = client.chat.completions.create(model=model, messages=messages, tools=tools, stream=True)
response = ""
for chunk in stream:
response += chunk.choices[0].delta.content or ''
yield response, None
audio = voiceover(response)
tmp = tempfile.NamedTemporaryFile(delete=False, suffix=".wav")
tmp.write(audio)

View File

@@ -0,0 +1,55 @@
import json
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
JSON_FILE = os.path.join(BASE_DIR, "students.json")
def load_students():
if not os.path.exists(JSON_FILE):
return {}
with open(JSON_FILE, "r") as f:
return json.load(f)
def save_students(students):
with open(JSON_FILE, "w") as f:
json.dump(students, f, indent=2)
def get_student_class(name):
students = load_students()
cls = students.get(name)
if cls:
return "f{name} is a Jedi {cls}."
return f"Hmm… Student not found, I see."
def add_student(name, jedi_class):
students = load_students()
students[name] = jedi_class
save_students(students)
return f"Added, {name} has been. A Jedi {jedi_class}, they are!"
def remove_student(name):
students = load_students()
if name in students:
del students[name]
save_students(students)
return f"Graduated, {name} has. Celebrate, we must."
return f"Vanished? This student does not exist."
def list_students():
students = load_students()
grouped = {}
for name, cls in students.items():
grouped.setdefault(cls, []).append(name)
result_lines = []
for cls, names in grouped.items():
names_str = ", ".join(names)
result_lines.append(f"{cls}: {names_str}")
return "\n".join(result_lines)