Files
LLM_Engineering_OLD/week2/community-contributions/technical-question-answerer-with-gradio-v3.ipynb
2025-08-21 10:39:56 -04:00

183 lines
5.9 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"id": "2b57204f-3e19-4d11-8901-c0e153ad9992",
"metadata": {},
"source": [
"## Technical Question Answerer With Gradio\n",
"- Ask a technical question to a chatbot embued with multimodal capabilities.\n",
"- Choose between different models (e.g. OpenAI's GPT, Anthropic's Claude)."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "bd8e9bef-87ab-46d6-9393-bb308d7e5bc4",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import requests\n",
"from bs4 import BeautifulSoup\n",
"from typing import List\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import google.generativeai\n",
"import anthropic\n",
"\n",
"import gradio as gr\n",
"import base64\n",
"from io import BytesIO\n",
"from PIL import Image\n",
"from IPython.display import Audio, display\n",
"\n",
"# Load environment variables in a file called .env\n",
"# Print the key prefixes to help with any debugging\n",
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"else:\n",
" print(\"Google API Key not set\")\n",
"\n",
"# Connect to OpenAI, Anthropic and Google; comment out the Claude or Google lines if you're not using them\n",
"openai = OpenAI()\n",
"claude = anthropic.Anthropic()\n",
"# google.generativeai.configure()\n",
"\n",
"system_message = \"You are a helpful assistant that explains technical contents and responds in markdown\"\n",
"\n",
"def talker(message):\n",
" response = openai.audio.speech.create(\n",
" model=\"tts-1\",\n",
" voice=\"onyx\",\n",
" input=message)\n",
"\n",
" audio_stream = BytesIO(response.content)\n",
" output_filename = \"output_audio.mp3\"\n",
" with open(output_filename, \"wb\") as f:\n",
" f.write(audio_stream.read())\n",
"\n",
" display(Audio(output_filename, autoplay=True))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "20486a61-5d59-4370-b92c-3b7fec63835c",
"metadata": {},
"outputs": [],
"source": [
"# --- Chat functions ---\n",
"def chat_gpt(history):\n",
" messages = [{\"role\": \"system\", \"content\": system_message}] + history\n",
" response = openai.chat.completions.create(\n",
" model=\"gpt-4o-mini\",\n",
" messages=messages\n",
" )\n",
" reply = response.choices[0].message.content\n",
" history = history + [{\"role\": \"assistant\", \"content\": reply}]\n",
" talker(reply) # make it talk\n",
" return history\n",
"\n",
"\n",
"claude_via_openai_client = OpenAI(\n",
" api_key=anthropic_api_key, \n",
" base_url=\"https://api.anthropic.com/v1\"\n",
")\n",
"\n",
"def chat_claude(history):\n",
" messages = [{\"role\": \"system\", \"content\": system_message}] + history\n",
" response = claude_via_openai_client.chat.completions.create(\n",
" model=\"claude-3-haiku-20240307\",\n",
" messages=messages\n",
" )\n",
" reply = response.choices[0].message.content\n",
" history = history + [{\"role\": \"assistant\", \"content\": reply}]\n",
" talker(reply) # make it talk\n",
" return history\n",
"\n",
"\n",
"# --- Gradio UI ---\n",
"with gr.Blocks() as ui:\n",
" with gr.Row():\n",
" chatbot = gr.Chatbot(height=500, type=\"messages\")\n",
" with gr.Row():\n",
" the_model = gr.Dropdown([\"GPT\", \"Claude\"], label=\"Select model\", value=\"GPT\")\n",
" with gr.Row():\n",
" entry = gr.Textbox(label=\"Chat with our AI Assistant:\")\n",
" with gr.Row():\n",
" clear = gr.Button(\"Clear\")\n",
"\n",
" def do_entry(message, history, model):\n",
" # add user turn\n",
" history = history + [{\"role\": \"user\", \"content\": message}]\n",
" # call selected model\n",
" if model == \"GPT\":\n",
" history = chat_gpt(history)\n",
" elif model == \"Claude\":\n",
" history = chat_claude(history)\n",
" return \"\", history\n",
"\n",
" entry.submit(\n",
" fn=do_entry,\n",
" inputs=[entry, chatbot, the_model],\n",
" outputs=[entry, chatbot] # only 2 outputs\n",
" )\n",
"\n",
" clear.click(lambda: [], None, chatbot, queue=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "13974664-2965-46b9-9c56-714c70d3f835",
"metadata": {},
"outputs": [],
"source": [
"ui.launch(inbrowser=True)\n",
"\n",
"# prompt = \"\"\"\n",
"# Please explain what this code does and why:\n",
"# yield from {book.get(\"author\") for book in books if book.get(\"author\")}\n",
"# \"\"\""
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}