Add Copilot and weather agent notebooks
Introduces Copilot.ipynb, an adaptive AI coding assistant with OpenAI and Gemini integration via Gradio, and weather_agent.ipynb, a weather chat agent supporting current, historical, and forecast queries using WeatherAPI and OpenAI tool-calling. Both notebooks provide interactive UIs for user queries.
This commit is contained in:
212
week2/community-contributions/Copilot.ipynb
Normal file
212
week2/community-contributions/Copilot.ipynb
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "1877ad68",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import os\n",
|
||||||
|
"import requests\n",
|
||||||
|
"from openai import OpenAI\n",
|
||||||
|
"import gradio as gr\n",
|
||||||
|
"from dotenv import load_dotenv \n",
|
||||||
|
"import google.generativeai as genai\n",
|
||||||
|
"from IPython.display import Markdown, display, update_display\n",
|
||||||
|
"load_dotenv(override=True)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "008056a2",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
||||||
|
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
|
||||||
|
"\n",
|
||||||
|
"if openai_api_key:\n",
|
||||||
|
" print(f'OpenAi api key exists and its starts with {openai_api_key[:3]}')\n",
|
||||||
|
"else:\n",
|
||||||
|
" print(\"OpenAi api key doesn't exist\")\n",
|
||||||
|
"\n",
|
||||||
|
"if google_api_key:\n",
|
||||||
|
" print('Google api key exists')\n",
|
||||||
|
"else:\n",
|
||||||
|
" print(\"Google api key doesn't exist\")\n",
|
||||||
|
"\n",
|
||||||
|
"OPENAI_MODEL = \"gpt-4o-mini\"\n",
|
||||||
|
"GOOGLE_MODEL = \"gemini-1.5-flash\"\n",
|
||||||
|
"\n",
|
||||||
|
"openai = OpenAI()\n",
|
||||||
|
"\n",
|
||||||
|
"genai.configure()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "5013ed7b",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"system_msg = \"\"\"\n",
|
||||||
|
"You are CodeCopilot, an adaptive AI coding assistant that helps users solve problems in any programming language.\n",
|
||||||
|
"Always provide correct, runnable, and well-formatted code with clear explanations.\n",
|
||||||
|
"Adjust your style based on the user’s expertise: for beginners, break concepts down step by step with simple examples and commented code;\n",
|
||||||
|
"for advanced users, deliver concise, production-ready, optimized solutions with best practices and trade-off insights.\n",
|
||||||
|
"Ask clarifying questions when requirements are ambiguous, highlight pitfalls and edge cases,\n",
|
||||||
|
"and act as a collaborative pair programmer or mentor whose goal is to help users learn, build, and ship high-quality code efficiently.\n",
|
||||||
|
"\"\"\"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "35c480a1",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def create_prompt(prompt, history):\n",
|
||||||
|
" messages = [{\"role\": \"system\", \"content\": system_msg}]\n",
|
||||||
|
"\n",
|
||||||
|
" # history is a list of (user_msg, assistant_msg) tuples\n",
|
||||||
|
" for user_msg, assistant_msg in history:\n",
|
||||||
|
" if user_msg:\n",
|
||||||
|
" messages.append({\"role\": \"user\", \"content\": user_msg})\n",
|
||||||
|
" if assistant_msg:\n",
|
||||||
|
" messages.append({\"role\": \"assistant\", \"content\": assistant_msg})\n",
|
||||||
|
"\n",
|
||||||
|
" # new user prompt\n",
|
||||||
|
" messages.append({\"role\": \"user\", \"content\": prompt})\n",
|
||||||
|
" return messages"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "5dfbecd0",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def openai_agent(prompt, history):\n",
|
||||||
|
" openai.api_key = openai_api_key\n",
|
||||||
|
" messages = create_prompt(prompt, history)\n",
|
||||||
|
" response = openai.chat.completions.create(\n",
|
||||||
|
" model=OPENAI_MODEL,\n",
|
||||||
|
" messages=messages,\n",
|
||||||
|
" stream=True\n",
|
||||||
|
" )\n",
|
||||||
|
" sent_any = False\n",
|
||||||
|
" for chunk in response:\n",
|
||||||
|
" delta = chunk.choices[0].delta\n",
|
||||||
|
" if delta and delta.content:\n",
|
||||||
|
" sent_any = True\n",
|
||||||
|
" yield delta.content\n",
|
||||||
|
" if not sent_any:\n",
|
||||||
|
" yield \"(no response)\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "535f7e3d",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def gemini_agent(prompt, history):\n",
|
||||||
|
" genai.configure(api_key=google_api_key)\n",
|
||||||
|
"\n",
|
||||||
|
" # reuse OpenAI-style messages\n",
|
||||||
|
" messages = create_prompt(prompt, history)\n",
|
||||||
|
"\n",
|
||||||
|
" gemini_history = []\n",
|
||||||
|
" for m in messages:\n",
|
||||||
|
" # Gemini does NOT support system role\n",
|
||||||
|
" if m[\"role\"] == \"system\":\n",
|
||||||
|
" continue\n",
|
||||||
|
" gemini_history.append({\n",
|
||||||
|
" \"role\": m[\"role\"],\n",
|
||||||
|
" \"parts\": [m[\"content\"]]\n",
|
||||||
|
" })\n",
|
||||||
|
" prompt_with_system = f\"{system_msg}\\n\\n{prompt}\"\n",
|
||||||
|
" model = genai.GenerativeModel(GOOGLE_MODEL)\n",
|
||||||
|
" chat = model.start_chat(history=gemini_history)\n",
|
||||||
|
"\n",
|
||||||
|
" response = chat.send_message(prompt_with_system, stream=True)\n",
|
||||||
|
" for chunk in response:\n",
|
||||||
|
" if chunk and getattr(chunk, \"text\", None):\n",
|
||||||
|
" yield chunk.text\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "21f61ff0",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def chat_agent(prompt, history, modelType):\n",
|
||||||
|
" if modelType == \"OpenAI\":\n",
|
||||||
|
" for token in openai_agent(prompt, history):\n",
|
||||||
|
" yield token\n",
|
||||||
|
" else:\n",
|
||||||
|
" for token in gemini_agent(prompt, history):\n",
|
||||||
|
" yield token\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "56686c1d",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def chat_fn(prompt, history, model):\n",
|
||||||
|
" assistant_response = \"\"\n",
|
||||||
|
" for token in chat_agent(prompt, history, model):\n",
|
||||||
|
" assistant_response += token\n",
|
||||||
|
" yield assistant_response \n",
|
||||||
|
"\n",
|
||||||
|
"# -------------------------------------------------------------------\n",
|
||||||
|
"# UI\n",
|
||||||
|
"# -------------------------------------------------------------------\n",
|
||||||
|
"with gr.Blocks() as demo:\n",
|
||||||
|
" model_choice = gr.Radio([\"OpenAI\", \"Gemini\"], value=\"OpenAI\", label=\"Model\")\n",
|
||||||
|
"\n",
|
||||||
|
" chat_ui = gr.ChatInterface(\n",
|
||||||
|
" fn=chat_fn,\n",
|
||||||
|
" additional_inputs=[model_choice],\n",
|
||||||
|
" title=\"CodeCopilot\",\n",
|
||||||
|
" description=\"An adaptive AI coding assistant that helps developers build and ship high-quality code.\"\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
"demo.launch()"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "llms",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.13"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
||||||
370
week2/community-contributions/weather_agent.ipynb
Normal file
370
week2/community-contributions/weather_agent.ipynb
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "60761989",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import os\n",
|
||||||
|
"import requests\n",
|
||||||
|
"from openai import OpenAI\n",
|
||||||
|
"import gradio as gr\n",
|
||||||
|
"import speech_recognition as sr\n",
|
||||||
|
"import json\n",
|
||||||
|
"from dotenv import load_dotenv"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "e0b6610a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"OpenAI API Key exists and begins sk-proj-\n",
|
||||||
|
"weather API Key exists\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# Initialization\n",
|
||||||
|
"\n",
|
||||||
|
"load_dotenv(override=True)\n",
|
||||||
|
"\n",
|
||||||
|
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
||||||
|
"weather_api_key = os.getenv('WEATHER_API_KEY')\n",
|
||||||
|
"if openai_api_key:\n",
|
||||||
|
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
|
||||||
|
"else:\n",
|
||||||
|
" print(\"OpenAI API Key not set\")\n",
|
||||||
|
"if weather_api_key:\n",
|
||||||
|
" print(\"weather API Key exists\")\n",
|
||||||
|
"else:\n",
|
||||||
|
" print(\"weather API Key not set\")\n",
|
||||||
|
" \n",
|
||||||
|
"MODEL = \"gpt-4o-mini\"\n",
|
||||||
|
"openai = OpenAI()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "af9d2faf",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"system_message = \"You are a helpful assistant for weather. \"\n",
|
||||||
|
"system_message += \"You need to fetch the current, historical and forecast the weather data using weather api and provide the response\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "2c5208d8",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def fetch_current_weather(location):\n",
|
||||||
|
" url = f\"http://api.weatherapi.com/v1/current.json?key={weather_api_key}&q={location}&aqi=yes\"\n",
|
||||||
|
" return requests.get(url).json()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"id": "8e6a12e5",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def fetch_forecast_weather(location, days=3):\n",
|
||||||
|
" url = f\"http://api.weatherapi.com/v1/forecast.json?key={weather_api_key}&q={location}&days={days}&aqi=yes&alerts=yes\"\n",
|
||||||
|
" return requests.get(url).json()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"id": "eafc468e",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def fetch_historical_weather(location, date):\n",
|
||||||
|
" url = f\"http://api.weatherapi.com/v1/history.json?key={weather_api_key}&q={location}&dt={date}&aqi=yes\"\n",
|
||||||
|
" return requests.get(url).json()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 7,
|
||||||
|
"id": "2851ed55",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Weather function used as a tool by OpenAI\n",
|
||||||
|
"def get_weatherapi_data(location, mode=\"current\", date=None, forecast_days=3):\n",
|
||||||
|
" if mode == \"current\":\n",
|
||||||
|
" return fetch_current_weather(location)\n",
|
||||||
|
" elif mode == \"forecast\":\n",
|
||||||
|
" return fetch_forecast_weather(location, days=forecast_days)\n",
|
||||||
|
" elif mode == \"historical\":\n",
|
||||||
|
" if not date:\n",
|
||||||
|
" # Default: yesterday\n",
|
||||||
|
" date = (datetime.date.today() - datetime.timedelta(days=1)).strftime(\"%Y-%m-%d\")\n",
|
||||||
|
" return fetch_historical_weather(location, date)\n",
|
||||||
|
" else:\n",
|
||||||
|
" return {\"error\": \"Unknown mode.\"}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 8,
|
||||||
|
"id": "368176c2",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# Tool schema for OpenAI tool-calling\n",
|
||||||
|
"weatherapi_tool_schema = [\n",
|
||||||
|
" {\n",
|
||||||
|
" \"type\": \"function\",\n",
|
||||||
|
" \"function\": {\n",
|
||||||
|
" \"name\": \"get_weatherapi_data\",\n",
|
||||||
|
" \"description\": \"Fetches current, forecast, or historical weather data from WeatherAPI.com for a given location.\",\n",
|
||||||
|
" \"parameters\": {\n",
|
||||||
|
" \"type\": \"object\",\n",
|
||||||
|
" \"properties\": {\n",
|
||||||
|
" \"location\": {\n",
|
||||||
|
" \"type\": \"string\",\n",
|
||||||
|
" \"description\": \"Name of the city, region, or coordinates.\"\n",
|
||||||
|
" },\n",
|
||||||
|
" \"mode\": {\n",
|
||||||
|
" \"type\": \"string\",\n",
|
||||||
|
" \"enum\": [\"current\", \"forecast\", \"historical\"],\n",
|
||||||
|
" \"description\": \"Type of weather data required.\"\n",
|
||||||
|
" },\n",
|
||||||
|
" \"date\": {\n",
|
||||||
|
" \"type\": \"string\",\n",
|
||||||
|
" \"description\": \"Date for historical data in YYYY-MM-DD format. Only needed if mode is 'historical'.\"\n",
|
||||||
|
" },\n",
|
||||||
|
" \"forecast_days\": {\n",
|
||||||
|
" \"type\": \"integer\",\n",
|
||||||
|
" \"description\": \"Number of forecast days (1-10). Only needed if mode is 'forecast'.\"\n",
|
||||||
|
" }\n",
|
||||||
|
" },\n",
|
||||||
|
" \"required\": [\"location\", \"mode\"]\n",
|
||||||
|
" }\n",
|
||||||
|
" }\n",
|
||||||
|
" }\n",
|
||||||
|
"]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 9,
|
||||||
|
"id": "bd9c4d38",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def audio_to_text(audio_filepath):\n",
|
||||||
|
" if audio_filepath is None or audio_filepath == \"\":\n",
|
||||||
|
" return \"\"\n",
|
||||||
|
" recognizer = sr.Recognizer()\n",
|
||||||
|
" try:\n",
|
||||||
|
" with sr.AudioFile(audio_filepath) as source:\n",
|
||||||
|
" audio = recognizer.record(source)\n",
|
||||||
|
" try:\n",
|
||||||
|
" transcript = recognizer.recognize_google(audio)\n",
|
||||||
|
" return transcript\n",
|
||||||
|
" except sr.UnknownValueError:\n",
|
||||||
|
" return \"\"\n",
|
||||||
|
" except sr.RequestError as e:\n",
|
||||||
|
" return f\"Speech recognition service error: {e}\"\n",
|
||||||
|
" except Exception as e:\n",
|
||||||
|
" return f\"Error opening audio file: {str(e)}\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"id": "61c5de82",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def chat_agent(city, mode, date, forecast_days, audio=None):\n",
|
||||||
|
" user_query = city\n",
|
||||||
|
" if audio:\n",
|
||||||
|
" spoken_text = audio_to_text(audio)\n",
|
||||||
|
" print(\"Recognized speech:\", spoken_text)\n",
|
||||||
|
" if spoken_text and spoken_text.strip().lower() != \"flic en flac\":\n",
|
||||||
|
" user_query = spoken_text\n",
|
||||||
|
" else:\n",
|
||||||
|
" if not city.strip():\n",
|
||||||
|
" return \"Sorry, I could not recognize your speech. Please try again or type your city.\"\n",
|
||||||
|
"\n",
|
||||||
|
" if not user_query.strip():\n",
|
||||||
|
" return \"Please provide a location by text or speech.\"\n",
|
||||||
|
"\n",
|
||||||
|
" # Compose tool function arguments as the LLM would\n",
|
||||||
|
" args = {\n",
|
||||||
|
" \"location\": user_query,\n",
|
||||||
|
" \"mode\": mode\n",
|
||||||
|
" }\n",
|
||||||
|
" if mode == \"historical\" and date:\n",
|
||||||
|
" args[\"date\"] = date\n",
|
||||||
|
" if mode == \"forecast\":\n",
|
||||||
|
" try:\n",
|
||||||
|
" n_days = int(forecast_days)\n",
|
||||||
|
" except:\n",
|
||||||
|
" n_days = 3\n",
|
||||||
|
" args[\"forecast_days\"] = n_days\n",
|
||||||
|
"\n",
|
||||||
|
" openai.api_key = openai_api_key\n",
|
||||||
|
"\n",
|
||||||
|
" # LLM call for tool use\n",
|
||||||
|
" response = openai.chat.completions.create(\n",
|
||||||
|
" model=\"gpt-4-0613\",\n",
|
||||||
|
" messages=[{\"role\": \"user\", \"content\": f\"Get me {mode} weather for {user_query}\"+(f' on {date}' if date and mode==\"historical\" else \"\")+(f' for {forecast_days} days' if forecast_days and mode==\"forecast\" else \"\")}],\n",
|
||||||
|
" tools=weatherapi_tool_schema,\n",
|
||||||
|
" tool_choice={\"type\": \"function\", \"function\": {\"name\": \"get_weatherapi_data\", \"arguments\": json.dumps(args)}}\n",
|
||||||
|
" )\n",
|
||||||
|
" message = response.choices[0].message\n",
|
||||||
|
"\n",
|
||||||
|
" if hasattr(message, \"tool_calls\") and message.tool_calls:\n",
|
||||||
|
" tool_call = message.tool_calls[0]\n",
|
||||||
|
" args2 = json.loads(tool_call.function.arguments) # not really needed, already have args\n",
|
||||||
|
" location = args2.get(\"location\", user_query)\n",
|
||||||
|
" mode = args2.get(\"mode\", mode)\n",
|
||||||
|
" date = args2.get(\"date\", date)\n",
|
||||||
|
" forecast_days = args2.get(\"forecast_days\", forecast_days)\n",
|
||||||
|
" weather_data = get_weatherapi_data(location, mode, date, forecast_days)\n",
|
||||||
|
" tool_result = f\"Weather data (mode={mode}) for {location}:\\n{json.dumps(weather_data, indent=2)[:3000]}\"\n",
|
||||||
|
" followup = openai.chat.completions.create(\n",
|
||||||
|
" model=\"gpt-4-0613\",\n",
|
||||||
|
" messages=[\n",
|
||||||
|
" {\"role\": \"user\", \"content\": f\"Get me {mode} weather for {location}\"},\n",
|
||||||
|
" message,\n",
|
||||||
|
" {\n",
|
||||||
|
" \"role\": \"tool\",\n",
|
||||||
|
" \"tool_call_id\": tool_call.id,\n",
|
||||||
|
" \"content\": tool_result\n",
|
||||||
|
" }\n",
|
||||||
|
" ]\n",
|
||||||
|
" )\n",
|
||||||
|
" answer = followup.choices[0].message.content.strip()\n",
|
||||||
|
" return answer\n",
|
||||||
|
" else:\n",
|
||||||
|
" return getattr(message, \"content\", \"\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 11,
|
||||||
|
"id": "44071389",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"def update_date_visibility(mode):\n",
|
||||||
|
" return gr.update(visible=(mode==\"historical\"))\n",
|
||||||
|
"\n",
|
||||||
|
"def update_days_visibility(mode):\n",
|
||||||
|
" return gr.update(visible=(mode==\"forecast\"))"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 13,
|
||||||
|
"id": "618a5494",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"* Running on local URL: http://127.0.0.1:7861\n",
|
||||||
|
"* To create a public link, set `share=True` in `launch()`.\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.core.display.HTML object>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "display_data"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": []
|
||||||
|
},
|
||||||
|
"execution_count": 13,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Recognized speech: Error opening audio file: FLAC conversion utility not available - consider installing the FLAC command line application by running `apt-get install flac` or your operating system's equivalent\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"with gr.Blocks() as demo:\n",
|
||||||
|
" gr.Markdown(\"## Weather Chat Agent (Current, Historical, Forecast)\")\n",
|
||||||
|
"\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" city_input = gr.Textbox(label=\"City/Location\")\n",
|
||||||
|
" mode_input = gr.Dropdown(\n",
|
||||||
|
" [\"current\", \"historical\", \"forecast\"],\n",
|
||||||
|
" value=\"current\",\n",
|
||||||
|
" label=\"Weather Mode\")\n",
|
||||||
|
" with gr.Row():\n",
|
||||||
|
" date_input = gr.Textbox(label=\"Date for historical (YYYY-MM-DD)\", visible=False)\n",
|
||||||
|
" days_input = gr.Textbox(label=\"Forecast Days (for forecast)\", value=\"3\", visible=False)\n",
|
||||||
|
" audio_input = gr.Audio(type=\"filepath\", format=\"wav\", label=\"Or Speak your City/Location (optional)\")\n",
|
||||||
|
" output_box = gr.Textbox(label=\"Weather Info\", lines=8)\n",
|
||||||
|
" btn = gr.Button(\"Get Weather\")\n",
|
||||||
|
"\n",
|
||||||
|
" # Show/hide date and days inputs based on dropdown\n",
|
||||||
|
" mode_input.change(update_date_visibility, mode_input, date_input)\n",
|
||||||
|
" mode_input.change(update_days_visibility, mode_input, days_input)\n",
|
||||||
|
" btn.click(\n",
|
||||||
|
" chat_agent,\n",
|
||||||
|
" [city_input, mode_input, date_input, days_input, audio_input],\n",
|
||||||
|
" output_box\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
"demo.launch()"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "llms",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.11.13"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user