224 lines
5.8 KiB
Plaintext
224 lines
5.8 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "057bc09f-a682-4b72-97ed-c69ddef3f03e",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Gemini to Dropdown"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "d66eb067-7bae-4145-b613-6da2f40fbf27",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"import os\n",
|
|
"import requests\n",
|
|
"from bs4 import BeautifulSoup\n",
|
|
"from typing import List\n",
|
|
"from dotenv import load_dotenv\n",
|
|
"from openai import OpenAI\n",
|
|
"import google.generativeai as genai\n",
|
|
"import anthropic"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "e36f8a93-8a65-48f2-bcad-7c47dd72ef3a",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"import gradio as gr "
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "8a5ec1b0-f5b4-46d2-abb0-b28b73cc4d28",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"load_dotenv(override=True)\n",
|
|
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
|
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
|
|
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
|
|
"\n",
|
|
"if openai_api_key:\n",
|
|
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
|
|
"else:\n",
|
|
" print(\"OpenAI API Key not set\")\n",
|
|
" \n",
|
|
"if anthropic_api_key:\n",
|
|
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
|
|
"else:\n",
|
|
" print(\"Anthropic API Key not set\")\n",
|
|
"\n",
|
|
"if google_api_key:\n",
|
|
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
|
|
"else:\n",
|
|
" print(\"Google API Key not set\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "26d0099c-890f-4358-8c1d-7a708abcb105",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"\n",
|
|
"openai = OpenAI()\n",
|
|
"\n",
|
|
"claude = anthropic.Anthropic()\n",
|
|
"\n",
|
|
"google.generativeai.configure()"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "6606bfdb-964e-4d6f-b2a1-5017b99aa23d",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"system_message = \"You are a helpful assistant\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "e0cfb96a-2dbe-4228-8efb-75947dbc3228",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def stream_gpt(prompt):\n",
|
|
" messages = [\n",
|
|
" {\"role\": \"system\", \"content\": system_message},\n",
|
|
" {\"role\": \"user\", \"content\": prompt}\n",
|
|
" ]\n",
|
|
" stream = openai.chat.completions.create(\n",
|
|
" model='gpt-4o-mini',\n",
|
|
" messages=messages,\n",
|
|
" stream=True\n",
|
|
" )\n",
|
|
" result = \"\"\n",
|
|
" for chunk in stream:\n",
|
|
" result += chunk.choices[0].delta.content or \"\"\n",
|
|
" yield result"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "9008a15d-0ee8-44e0-b123-225e7148113e",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def stream_claude(prompt):\n",
|
|
" result = claude.messages.stream(\n",
|
|
" model=\"claude-3-haiku-20240307\",\n",
|
|
" max_tokens=1000,\n",
|
|
" temperature=0.7,\n",
|
|
" system=system_message,\n",
|
|
" messages=[\n",
|
|
" {\"role\": \"user\", \"content\": prompt},\n",
|
|
" ],\n",
|
|
" )\n",
|
|
" response = \"\"\n",
|
|
" with result as stream:\n",
|
|
" for text in stream.text_stream:\n",
|
|
" response += text or \"\"\n",
|
|
" yield response"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "378ad12e-6645-4647-807c-00995e360268",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def stream_gemini(prompt):\n",
|
|
" gemini = genai.GenerativeModel(\n",
|
|
" model_name=\"gemini-2.0-flash\",\n",
|
|
" system_instruction=system_message\n",
|
|
" )\n",
|
|
" \n",
|
|
" stream = gemini.generate_content(prompt, stream=True)\n",
|
|
" \n",
|
|
" result = \"\"\n",
|
|
" for chunk in stream:\n",
|
|
" try:\n",
|
|
" part = chunk.text\n",
|
|
" if part:\n",
|
|
" result += part\n",
|
|
" yield result \n",
|
|
" except Exception as e:\n",
|
|
" print(\"Chunk error:\", e)\n",
|
|
" \n",
|
|
" \n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "fd50e143-eead-49b1-8ea3-b440becd4bc9",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def stream_model(prompt, model):\n",
|
|
" if model==\"GPT\":\n",
|
|
" result = stream_gpt(prompt)\n",
|
|
" elif model==\"Claude\":\n",
|
|
" result = stream_claude(prompt)\n",
|
|
" elif model==\"Gemini\":\n",
|
|
" result = stream_gemini(prompt)\n",
|
|
" else:\n",
|
|
" raise ValueError(\"Unknown model\")\n",
|
|
" yield from result"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "c7fc9cb4-fbb8-4301-86a6-96c90f67eb3b",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"view = gr.Interface(\n",
|
|
" fn=stream_model,\n",
|
|
" inputs=[gr.Textbox(label=\"Your message:\"), gr.Dropdown([\"GPT\", \"Claude\",\"Gemini\"], label=\"Select model\", value=\"GPT\")],\n",
|
|
" outputs=[gr.Markdown(label=\"Response:\")],\n",
|
|
" flagging_mode=\"never\"\n",
|
|
")\n",
|
|
"view.launch()"
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3 (ipykernel)",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.11.13"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 5
|
|
}
|