Files
KiranAyyagari 72eb3562b7 Add Copilot and weather agent notebooks
Introduces Copilot.ipynb, an adaptive AI coding assistant with OpenAI and Gemini integration via Gradio, and weather_agent.ipynb, a weather chat agent supporting current, historical, and forecast queries using WeatherAPI and OpenAI tool-calling. Both notebooks provide interactive UIs for user queries.
2025-08-18 20:16:14 +05:30

213 lines
6.5 KiB
Plaintext
Raw Permalink Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "1877ad68",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import requests\n",
"from openai import OpenAI\n",
"import gradio as gr\n",
"from dotenv import load_dotenv \n",
"import google.generativeai as genai\n",
"from IPython.display import Markdown, display, update_display\n",
"load_dotenv(override=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "008056a2",
"metadata": {},
"outputs": [],
"source": [
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f'OpenAi api key exists and its starts with {openai_api_key[:3]}')\n",
"else:\n",
" print(\"OpenAi api key doesn't exist\")\n",
"\n",
"if google_api_key:\n",
" print('Google api key exists')\n",
"else:\n",
" print(\"Google api key doesn't exist\")\n",
"\n",
"OPENAI_MODEL = \"gpt-4o-mini\"\n",
"GOOGLE_MODEL = \"gemini-1.5-flash\"\n",
"\n",
"openai = OpenAI()\n",
"\n",
"genai.configure()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5013ed7b",
"metadata": {},
"outputs": [],
"source": [
"system_msg = \"\"\"\n",
"You are CodeCopilot, an adaptive AI coding assistant that helps users solve problems in any programming language.\n",
"Always provide correct, runnable, and well-formatted code with clear explanations.\n",
"Adjust your style based on the users expertise: for beginners, break concepts down step by step with simple examples and commented code;\n",
"for advanced users, deliver concise, production-ready, optimized solutions with best practices and trade-off insights.\n",
"Ask clarifying questions when requirements are ambiguous, highlight pitfalls and edge cases,\n",
"and act as a collaborative pair programmer or mentor whose goal is to help users learn, build, and ship high-quality code efficiently.\n",
"\"\"\"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "35c480a1",
"metadata": {},
"outputs": [],
"source": [
"def create_prompt(prompt, history):\n",
" messages = [{\"role\": \"system\", \"content\": system_msg}]\n",
"\n",
" # history is a list of (user_msg, assistant_msg) tuples\n",
" for user_msg, assistant_msg in history:\n",
" if user_msg:\n",
" messages.append({\"role\": \"user\", \"content\": user_msg})\n",
" if assistant_msg:\n",
" messages.append({\"role\": \"assistant\", \"content\": assistant_msg})\n",
"\n",
" # new user prompt\n",
" messages.append({\"role\": \"user\", \"content\": prompt})\n",
" return messages"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5dfbecd0",
"metadata": {},
"outputs": [],
"source": [
"def openai_agent(prompt, history):\n",
" openai.api_key = openai_api_key\n",
" messages = create_prompt(prompt, history)\n",
" response = openai.chat.completions.create(\n",
" model=OPENAI_MODEL,\n",
" messages=messages,\n",
" stream=True\n",
" )\n",
" sent_any = False\n",
" for chunk in response:\n",
" delta = chunk.choices[0].delta\n",
" if delta and delta.content:\n",
" sent_any = True\n",
" yield delta.content\n",
" if not sent_any:\n",
" yield \"(no response)\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "535f7e3d",
"metadata": {},
"outputs": [],
"source": [
"def gemini_agent(prompt, history):\n",
" genai.configure(api_key=google_api_key)\n",
"\n",
" # reuse OpenAI-style messages\n",
" messages = create_prompt(prompt, history)\n",
"\n",
" gemini_history = []\n",
" for m in messages:\n",
" # Gemini does NOT support system role\n",
" if m[\"role\"] == \"system\":\n",
" continue\n",
" gemini_history.append({\n",
" \"role\": m[\"role\"],\n",
" \"parts\": [m[\"content\"]]\n",
" })\n",
" prompt_with_system = f\"{system_msg}\\n\\n{prompt}\"\n",
" model = genai.GenerativeModel(GOOGLE_MODEL)\n",
" chat = model.start_chat(history=gemini_history)\n",
"\n",
" response = chat.send_message(prompt_with_system, stream=True)\n",
" for chunk in response:\n",
" if chunk and getattr(chunk, \"text\", None):\n",
" yield chunk.text\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "21f61ff0",
"metadata": {},
"outputs": [],
"source": [
"def chat_agent(prompt, history, modelType):\n",
" if modelType == \"OpenAI\":\n",
" for token in openai_agent(prompt, history):\n",
" yield token\n",
" else:\n",
" for token in gemini_agent(prompt, history):\n",
" yield token\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "56686c1d",
"metadata": {},
"outputs": [],
"source": [
"def chat_fn(prompt, history, model):\n",
" assistant_response = \"\"\n",
" for token in chat_agent(prompt, history, model):\n",
" assistant_response += token\n",
" yield assistant_response \n",
"\n",
"# -------------------------------------------------------------------\n",
"# UI\n",
"# -------------------------------------------------------------------\n",
"with gr.Blocks() as demo:\n",
" model_choice = gr.Radio([\"OpenAI\", \"Gemini\"], value=\"OpenAI\", label=\"Model\")\n",
"\n",
" chat_ui = gr.ChatInterface(\n",
" fn=chat_fn,\n",
" additional_inputs=[model_choice],\n",
" title=\"CodeCopilot\",\n",
" description=\"An adaptive AI coding assistant that helps developers build and ship high-quality code.\"\n",
" )\n",
"\n",
"demo.launch()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "llms",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}