1.Use llm to simulate the function which reserve ticket by mock api. 2. week2 exercise
This commit is contained in:
173
week2/community-contributions/tsungyulin/reserveTicketDemo.ipynb
Normal file
173
week2/community-contributions/tsungyulin/reserveTicketDemo.ipynb
Normal file
@@ -0,0 +1,173 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "27fa33cf",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import openai\n",
|
||||
"from dotenv import load_dotenv\n",
|
||||
"import gradio as gr\n",
|
||||
"import os\n",
|
||||
"import json\n",
|
||||
"from datetime import datetime\n",
|
||||
"\n",
|
||||
"import httpx\n",
|
||||
"from fastapi import FastAPI\n",
|
||||
"import uvicorn\n",
|
||||
"import threading\n",
|
||||
"\n",
|
||||
"load_dotenv('.env',override=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "e9407192",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"app = FastAPI()\n",
|
||||
"\n",
|
||||
"@app.post('/mock/ticket')\n",
|
||||
"def booking(payload:dict):\n",
|
||||
" dt = datetime.strptime(payload.get('date'), \"%Y/%m/%d\") \n",
|
||||
" isoStr = dt.date().isoformat()\n",
|
||||
" return {\"status\": \"success\", \"order_id\": f\"MOCK-FLIGHT-{isoStr}-001\"}\n",
|
||||
"\n",
|
||||
"# start server\n",
|
||||
"def run():\n",
|
||||
" uvicorn.run(app, host=\"127.0.0.1\", port=8000)\n",
|
||||
"\n",
|
||||
"thread = threading.Thread(target=run, daemon=True)\n",
|
||||
"thread.start()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2229b6db",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
||||
"llm = openai.OpenAI(api_key=openai_api_key)\n",
|
||||
"\n",
|
||||
"system_message = \"You are a helpful assistant for an Airline called FlightAI. \"\n",
|
||||
"system_message += \"Give short, courteous answers, no more than 1 sentence. \"\n",
|
||||
"system_message += \"Always be accurate. If you don't know the answer, say so.\"\n",
|
||||
"\n",
|
||||
"async def booking_flight(departure, destination, date):\n",
|
||||
" print(f\"Book the Flight Automatically, {departure} to {destination} at {date}.\")\n",
|
||||
" reqBody = {\n",
|
||||
" \"departure\": departure,\n",
|
||||
" \"destination\": destination,\n",
|
||||
" \"date\": date\n",
|
||||
" }\n",
|
||||
" async with httpx.AsyncClient() as client:\n",
|
||||
" res = await client.post('http://127.0.0.1:8000/mock/ticket', json=reqBody)\n",
|
||||
" print(res.text)\n",
|
||||
" return res.text\n",
|
||||
" \n",
|
||||
"book_function = {\n",
|
||||
" \"name\": \"booking_flight\",\n",
|
||||
" \"description\": \"async function for booking the flight ticket for customers and it will return the status and id of flight. Call this function whenever you were asked to book the flight, and you will automatically tell the status of the order and the book number! if customers don't provide their departure or destination or date you should inquire them courteous. Note that the date format you should keep them with %Y/%m/%d. for example when a customer asks 'Please help me book the ticket from <departure> to <destination>'\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"departure\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city where the customer departure\",\n",
|
||||
" },\n",
|
||||
" \"destination\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city that the customer wants to travel to\",\n",
|
||||
" },\n",
|
||||
" \"date\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The date of the flight \",\n",
|
||||
" },\n",
|
||||
" },\n",
|
||||
" \"required\": [\"destination\", \"departure\", \"date\"],\n",
|
||||
" \"additionalProperties\": False\n",
|
||||
" }\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"async def handle_tool_call(message):\n",
|
||||
" tool_call = message.tool_calls[0]\n",
|
||||
" arguments = json.loads(tool_call.function.arguments)\n",
|
||||
" departure = arguments.get('departure')\n",
|
||||
" destination = arguments.get('destination')\n",
|
||||
" date = arguments.get('date')\n",
|
||||
" res = await booking_flight(departure, destination, date)\n",
|
||||
" response = {\n",
|
||||
" \"role\": \"tool\",\n",
|
||||
" \"content\": json.dumps(res),\n",
|
||||
" \"tool_call_id\": tool_call.id\n",
|
||||
" }\n",
|
||||
" return response\n",
|
||||
"\n",
|
||||
"tools = [{\"type\": \"function\", \"function\": book_function}]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "5bf9656f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = await booking_flight('Taiwan', \"NewYork\", \"2025/12/03\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d2924055",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"async def chat(message, history):\n",
|
||||
" messages = [{\"role\": \"system\", \"content\": system_message}] + history + [{\"role\": \"user\", \"content\": message}]\n",
|
||||
" res = llm.chat.completions.create(messages=messages,\n",
|
||||
" model=\"gpt-4.1-mini\",\n",
|
||||
" tools=tools)\n",
|
||||
" \n",
|
||||
" if res.choices[0].finish_reason == 'tool_calls':\n",
|
||||
" message = res.choices[0].message\n",
|
||||
" toolResponse = await handle_tool_call(message)\n",
|
||||
" messages.append(message)\n",
|
||||
" messages.append(toolResponse)\n",
|
||||
" res = llm.chat.completions.create(messages=messages,\n",
|
||||
" model=\"gpt-4.1-mini\")\n",
|
||||
"\n",
|
||||
" return res.choices[0].message.content\n",
|
||||
"\n",
|
||||
"gr.ChatInterface(fn=chat,type=\"messages\").launch()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "3.10.15",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.15"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
175
week2/community-contributions/tsungyulin/week2 EXERCISE.ipynb
Normal file
175
week2/community-contributions/tsungyulin/week2 EXERCISE.ipynb
Normal file
@@ -0,0 +1,175 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d006b2ea-9dfe-49c7-88a9-a5a0775185fd",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Additional End of week Exercise - week 2\n",
|
||||
"\n",
|
||||
"Now use everything you've learned from Week 2 to build a full prototype for the technical question/answerer you built in Week 1 Exercise.\n",
|
||||
"\n",
|
||||
"This should include a Gradio UI, streaming, use of the system prompt to add expertise, and the ability to switch between models. Bonus points if you can demonstrate use of a tool!\n",
|
||||
"\n",
|
||||
"If you feel bold, see if you can add audio input so you can talk to it, and have it respond with audio. ChatGPT or Claude can help you, or email me if you have questions.\n",
|
||||
"\n",
|
||||
"I will publish a full solution here soon - unless someone beats me to it...\n",
|
||||
"\n",
|
||||
"There are so many commercial applications for this, from a language tutor, to a company onboarding solution, to a companion AI to a course (like this one!) I can't wait to see your results."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "05fc552b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import openai\n",
|
||||
"import anthropic\n",
|
||||
"import gradio as gr\n",
|
||||
"import dotenv\n",
|
||||
"import os"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a07e7793-b8f5-44f4-aded-5562f633271a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class Chatbot:\n",
|
||||
" def __init__(self, apiKey, publisher='openai'):\n",
|
||||
" if publisher not in ['openai', 'claude']:\n",
|
||||
" raise ValueError(f\"publisher must be openai or claude, but got {publisher}\")\n",
|
||||
" self.publisher = publisher\n",
|
||||
" self.systemPrompt = None\n",
|
||||
" self.historyPrompt = []\n",
|
||||
" self.llm = openai.OpenAI(api_key=apiKey) if publisher == 'openai' else anthropic.Anthropic(api_key=apiKey)\n",
|
||||
" \n",
|
||||
" def setSystemPrompt(self, systemPrompt:str):\n",
|
||||
" self.systemPrompt = systemPrompt.strip()\n",
|
||||
" if len(self.historyPrompt) == 0:\n",
|
||||
" self.historyPrompt.append({\"role\": \"system\", \"content\": f\"{systemPrompt}\"})\n",
|
||||
" else:\n",
|
||||
" self.historyPrompt[0] = {\"role\": \"system\", \"content\": f\"{systemPrompt}\"}\n",
|
||||
" \n",
|
||||
" def _prompt2obj(self, role:str, prompt:str):\n",
|
||||
" return {\n",
|
||||
" \"role\": role,\n",
|
||||
" \"content\": prompt.strip()\n",
|
||||
" }\n",
|
||||
" \n",
|
||||
" def unpackText(self, chunk):\n",
|
||||
" text = ''\n",
|
||||
" if self.publisher == 'openai':\n",
|
||||
" text = chunk.choices[0].delta.content or ''\n",
|
||||
" elif self.publisher == 'claude':\n",
|
||||
" if chunk.type == \"content_block_delta\":\n",
|
||||
" text = chunk.delta.text or ''\n",
|
||||
" \n",
|
||||
" return text\n",
|
||||
" \n",
|
||||
" def chat(self, message):\n",
|
||||
" self.historyPrompt.append(self._prompt2obj(\"user\", message))\n",
|
||||
" completeReply = \"\"\n",
|
||||
"\n",
|
||||
" if self.publisher == 'openai':\n",
|
||||
" stream = self.llm.chat.completions.create(model='gpt-4o-mini',\n",
|
||||
" messages=self.historyPrompt,\n",
|
||||
" stream=True)\n",
|
||||
" elif self.publisher == 'claude':\n",
|
||||
" stream = self.llm.messages.create(system=self.historyPrompt[0][\"content\"],\n",
|
||||
" model=\"claude-sonnet-4-20250514\",\n",
|
||||
" max_tokens=200,\n",
|
||||
" messages=self.historyPrompt[1:],\n",
|
||||
" stream=True)\n",
|
||||
" \n",
|
||||
" for chunk in stream:\n",
|
||||
" completeReply += self.unpackText(chunk)\n",
|
||||
" yield completeReply\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" self.historyPrompt.append(self._prompt2obj(\"assistant\", completeReply))\n",
|
||||
" \n",
|
||||
" def _gradioChatWrapper(self):\n",
|
||||
" def gradioChatFn(message, history):\n",
|
||||
" for partial_reply in self.chat(message):\n",
|
||||
" yield partial_reply\n",
|
||||
" return gradioChatFn\n",
|
||||
" \n",
|
||||
" def getAllPrompt(self):\n",
|
||||
" return self.historyPrompt\n",
|
||||
" \n",
|
||||
" def run(self):\n",
|
||||
" gradioFn = self._gradioChatWrapper()\n",
|
||||
" gr.ChatInterface(fn=gradioFn, type=\"messages\").launch()\n",
|
||||
" \n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "1fca53e8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# initial env\n",
|
||||
"dotenv.load_dotenv(\".env\", override=True)\n",
|
||||
"openaiKey = os.getenv(\"OPENAI_API_KEY\")\n",
|
||||
"claudeKey = os.getenv(\"ANTHROPIC_API_KEY\")\n",
|
||||
"openaiInfo = {\n",
|
||||
" 'apiKey': openaiKey,\n",
|
||||
" 'publisher': 'openai'\n",
|
||||
"}\n",
|
||||
"claudeInfo = {\n",
|
||||
" 'apiKey': claudeKey,\n",
|
||||
" 'publisher': 'claude'\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"SYSTEM_PROMPT = \"\"\"\n",
|
||||
"You are a technical experts and responds every question I asked with an explanation.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"openaiChatbot = Chatbot(**openaiInfo)\n",
|
||||
"openaiChatbot.setSystemPrompt(SYSTEM_PROMPT)\n",
|
||||
"openaiChatbot.run()\n",
|
||||
"\n",
|
||||
"# claudeChatbot = Chatbot(**claudeInfo)\n",
|
||||
"# claudeChatbot.setSystemPrompt(SYSTEM_PROMPT)\n",
|
||||
"# claudeChatbot.run()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "59a2ac0f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "3.10.15",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.15"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user