Merge pull request #619 from Aru12345/main

Added Mediterranean Banter notebook to community-contributions
This commit is contained in:
Ed Donner
2025-08-31 15:33:35 +01:00
committed by GitHub
3 changed files with 656 additions and 0 deletions

View File

@@ -0,0 +1,223 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "057bc09f-a682-4b72-97ed-c69ddef3f03e",
"metadata": {},
"source": [
"# Gemini to Dropdown"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d66eb067-7bae-4145-b613-6da2f40fbf27",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import requests\n",
"from bs4 import BeautifulSoup\n",
"from typing import List\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import google.generativeai as genai\n",
"import anthropic"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e36f8a93-8a65-48f2-bcad-7c47dd72ef3a",
"metadata": {},
"outputs": [],
"source": [
"import gradio as gr "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8a5ec1b0-f5b4-46d2-abb0-b28b73cc4d28",
"metadata": {},
"outputs": [],
"source": [
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"else:\n",
" print(\"Google API Key not set\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "26d0099c-890f-4358-8c1d-7a708abcb105",
"metadata": {},
"outputs": [],
"source": [
"\n",
"openai = OpenAI()\n",
"\n",
"claude = anthropic.Anthropic()\n",
"\n",
"google.generativeai.configure()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6606bfdb-964e-4d6f-b2a1-5017b99aa23d",
"metadata": {},
"outputs": [],
"source": [
"system_message = \"You are a helpful assistant\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e0cfb96a-2dbe-4228-8efb-75947dbc3228",
"metadata": {},
"outputs": [],
"source": [
"def stream_gpt(prompt):\n",
" messages = [\n",
" {\"role\": \"system\", \"content\": system_message},\n",
" {\"role\": \"user\", \"content\": prompt}\n",
" ]\n",
" stream = openai.chat.completions.create(\n",
" model='gpt-4o-mini',\n",
" messages=messages,\n",
" stream=True\n",
" )\n",
" result = \"\"\n",
" for chunk in stream:\n",
" result += chunk.choices[0].delta.content or \"\"\n",
" yield result"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9008a15d-0ee8-44e0-b123-225e7148113e",
"metadata": {},
"outputs": [],
"source": [
"def stream_claude(prompt):\n",
" result = claude.messages.stream(\n",
" model=\"claude-3-haiku-20240307\",\n",
" max_tokens=1000,\n",
" temperature=0.7,\n",
" system=system_message,\n",
" messages=[\n",
" {\"role\": \"user\", \"content\": prompt},\n",
" ],\n",
" )\n",
" response = \"\"\n",
" with result as stream:\n",
" for text in stream.text_stream:\n",
" response += text or \"\"\n",
" yield response"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "378ad12e-6645-4647-807c-00995e360268",
"metadata": {},
"outputs": [],
"source": [
"def stream_gemini(prompt):\n",
" gemini = genai.GenerativeModel(\n",
" model_name=\"gemini-2.0-flash\",\n",
" system_instruction=system_message\n",
" )\n",
" \n",
" stream = gemini.generate_content(prompt, stream=True)\n",
" \n",
" result = \"\"\n",
" for chunk in stream:\n",
" try:\n",
" part = chunk.text\n",
" if part:\n",
" result += part\n",
" yield result \n",
" except Exception as e:\n",
" print(\"Chunk error:\", e)\n",
" \n",
" \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "fd50e143-eead-49b1-8ea3-b440becd4bc9",
"metadata": {},
"outputs": [],
"source": [
"def stream_model(prompt, model):\n",
" if model==\"GPT\":\n",
" result = stream_gpt(prompt)\n",
" elif model==\"Claude\":\n",
" result = stream_claude(prompt)\n",
" elif model==\"Gemini\":\n",
" result = stream_gemini(prompt)\n",
" else:\n",
" raise ValueError(\"Unknown model\")\n",
" yield from result"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c7fc9cb4-fbb8-4301-86a6-96c90f67eb3b",
"metadata": {},
"outputs": [],
"source": [
"view = gr.Interface(\n",
" fn=stream_model,\n",
" inputs=[gr.Textbox(label=\"Your message:\"), gr.Dropdown([\"GPT\", \"Claude\",\"Gemini\"], label=\"Select model\", value=\"GPT\")],\n",
" outputs=[gr.Markdown(label=\"Response:\")],\n",
" flagging_mode=\"never\"\n",
")\n",
"view.launch()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,252 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "9ab6f493-026f-4950-b244-22c5251b8daa",
"metadata": {},
"source": [
"# Mediterranean Banter"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4568bc63-679e-4ea1-a9c9-b85dfc386ec7",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import anthropic\n",
"from IPython.display import Markdown, display, update_display"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7a852bee-76e6-4538-89a3-0702c2d5f05c",
"metadata": {},
"outputs": [],
"source": [
"import google.generativeai"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c843ac74-4797-4bd0-bed2-dfcaa2f98c41",
"metadata": {},
"outputs": [],
"source": [
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"else:\n",
" print(\"Google API Key not set\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e4181018-1818-47fa-a3fb-554627a63f69",
"metadata": {},
"outputs": [],
"source": [
"openai = OpenAI()\n",
"\n",
"claude = anthropic.Anthropic()\n",
"\n",
"google.generativeai.configure()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "19f833c8-e183-469c-a8c8-1c014889a15d",
"metadata": {},
"outputs": [],
"source": [
"gpt_model_spain = \"gpt-4.1-mini\"\n",
"claude_model_italy = \"claude-3-5-haiku-latest\"\n",
"gemini_model_france = \"gemini-2.0-flash\"\n",
"\n",
"gpt_system_spain = \"You are a chatbot who is very argumentative; \\\n",
"you disagree with anything other than how beautiful and great South of Spain is.\"\n",
"\n",
"claude_system_italy = \"You are a very polite, courteous chatbot but you favour south of Italy prasing Amalfi Coast.\"\n",
"\n",
"gemini_system_france = \"You are neutral. You always accept what other people say and never try to put down other people thoughts.Although, you politely try to include that South of France is beautiful\"\n",
"\n",
"gpt_messages_spain = [\"Hi there, nothing beats the beauty of Spain and its wonderful beaches.\"]\n",
"claude_messages_italy = [\"I agree. I admire the Southern Part of Spain but its not as pretty as Amalfi Coast.\"]\n",
"gemini_messages_france = [\"Well, both are good and so is the French Riveria.\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2d426b95-5e7c-49aa-a5a1-9613296eb0d0",
"metadata": {},
"outputs": [],
"source": [
"def call_gpt():\n",
" messages = [{\"role\": \"system\", \"content\": gpt_system_spain}]\n",
" for gpt, claude,gemini in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
" messages.append({\"role\": \"assistant\", \"content\": gpt})\n",
" messages.append({\"role\": \"user\", \"content\": claude})\n",
" messages.append({\"role\": \"user\", \"content\": gemini})\n",
" completion = openai.chat.completions.create(\n",
" model=gpt_model_spain,\n",
" messages=messages\n",
" )\n",
" return completion.choices[0].message.content"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3fc9a696-3145-4f37-873b-539647f2fc0b",
"metadata": {},
"outputs": [],
"source": [
"call_gpt()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "63910faa-a122-4261-82a0-7530c6c5749a",
"metadata": {},
"outputs": [],
"source": [
"def call_claude():\n",
" messages = []\n",
" for gpt_spain, claude_italy,gemini_france in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
" messages.append({\"role\": \"user\", \"content\": gpt_spain})\n",
" messages.append({\"role\": \"user\", \"content\": gemini_france})\n",
" messages.append({\"role\": \"assistant\", \"content\": claude_italy})\n",
" messages.append({\"role\": \"user\", \"content\": gpt_messages_spain[-1]})\n",
" messages.append({\"role\": \"user\", \"content\": gemini_messages_france[-1]})\n",
" message = claude.messages.create(\n",
" model=claude_model_italy,\n",
" system=claude_system_italy,\n",
" messages=messages,\n",
" max_tokens=500\n",
" )\n",
" return message.content[0].text"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d3ab6aa2-a462-4fb3-bb6a-dc6b971827fa",
"metadata": {},
"outputs": [],
"source": [
"call_claude()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "114cb7eb-0915-46ac-b285-e40acf4a9ffb",
"metadata": {},
"outputs": [],
"source": [
"def call_gemini():\n",
" messages=[]\n",
" for gpt_spain, claude_italy,gemini_france in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
" messages.append({\"role\": \"user\", \"content\": gpt_spain})\n",
" messages.append({\"role\": \"user\", \"content\": claude_italy})\n",
" messages.append({\"role\": \"assistant\", \"content\": gemini_france})\n",
" messages.append({\"role\": \"user\", \"content\": gpt_messages_spain[-1]})\n",
" messages.append({\"role\": \"user\", \"content\": claude_messages_italy[-1]})\n",
" gemini = google.generativeai.GenerativeModel(\n",
" model_name='gemini-2.0-flash',\n",
" system_instruction=gemini_system_france\n",
" )\n",
" dialogue_text = \"\\n\".join(f\"{m['role']}: {m['content']}\" for m in messages)\n",
" response = gemini.generate_content(dialogue_text)\n",
" return response.text\n",
" \n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e3acf708-f9b1-4a6d-b3e1-823c96d00555",
"metadata": {},
"outputs": [],
"source": [
"call_gemini()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c103430e-68c7-4cc6-8a43-6b5aec7fdc96",
"metadata": {},
"outputs": [],
"source": [
"gpt_messages_spain = [\"Hi there, nothing beats the beauty of Spain and its wonderful beaches.\"]\n",
"claude_messages_italy = [\"I agree. I admire the Southern Part of Spain but its not as pretty as Amalfi Coast.\"]\n",
"gemini_messages_france = [\"Well, both are good and so is the French Riveria.\"]\n",
"\n",
"print(f\"GPT:\\n{gpt_messages_spain[0]}\\n\")\n",
"print(f\"Claude:\\n{claude_messages_italy[0]}\\n\")\n",
"print(f\"Gemini:\\n{gemini_messages_france[0]}\\n\")\n",
"\n",
"for i in range(5):\n",
" gpt_next = call_gpt()\n",
" print(f\"GPT:\\n{gpt_next}\\n\")\n",
" gpt_messages_spain.append(gpt_next)\n",
" \n",
" claude_next = call_claude()\n",
" print(f\"Claude:\\n{claude_next}\\n\")\n",
" claude_messages_italy.append(claude_next)\n",
"\n",
" gemini_next = call_gemini()\n",
" print(f\"Gemini:\\n{gemini_next}\\n\")\n",
" gemini_messages_france.append(gemini_next)\n",
"\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,181 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "757905af-7f93-4dca-9526-063bc93a78c7",
"metadata": {},
"source": [
"# Sakana-ya (魚屋) Sushi\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "9a6721fb-efca-4412-a0a7-cc8e6c4ced76",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import gradio as gr\n",
"import json"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b0fa458f-f73f-491c-b666-95db4b91f571",
"metadata": {},
"outputs": [],
"source": [
"load_dotenv(override=True)\n",
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
"\n",
"if openai_api_key:\n",
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
"else:\n",
" print(\"OpenAI API Key not set\")\n",
" \n",
"if anthropic_api_key:\n",
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
"else:\n",
" print(\"Anthropic API Key not set\")\n",
"\n",
"if google_api_key:\n",
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
"else:\n",
" print(\"Google API Key not set\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "aa2846f2-e09c-421d-9774-c04961a79800",
"metadata": {},
"outputs": [],
"source": [
"openai = OpenAI()\n",
"MODEL = 'gpt-4o-mini'"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7672ecdf-cf50-4b96-887a-b0a4eb5bbbf5",
"metadata": {},
"outputs": [],
"source": [
" \n",
"menu = {\n",
" \"Nigiri (1 pc)\": {\n",
" \"Salmon\": 4.25,\n",
" \"Tuna\": 4.75,\n",
" \"Yellowtail\": 5.00,\n",
" \"Eel\": 5.25,\n",
" \"Tamago\": 3.00,\n",
" },\n",
" \"Sashimi (3 pc)\": {\n",
" \"Salmon\": 8.50,\n",
" \"Tuna\": 9.00,\n",
" \"Yellowtail\": 9.50,\n",
" \"Octopus\": 8.00,\n",
" },\n",
" \"Classic Rolls (6 pc)\": {\n",
" \"California\": 6.50,\n",
" \"Spicy Tuna\": 7.50,\n",
" \"Philadelphia\": 7.25,\n",
" \"Cucumber\": 4.50,\n",
" \"Avocado\": 4.75,\n",
" },\n",
" \"Specialty Rolls (8 pc)\": {\n",
" \"Dragon\": 13.50,\n",
" \"Rainbow\": 14.00,\n",
" \"Crunchy Shrimp\": 12.50,\n",
" \"Volcano\": 13.00,\n",
" \"Spider\": 14.50,\n",
" },\n",
" \"Appetizers\": {\n",
" \"Edamame\": 5.00,\n",
" \"Gyoza (5)\": 6.50,\n",
" \"Miso Soup\": 3.00,\n",
" \"Seaweed Salad\": 5.50,\n",
" },\n",
" \"Beverages\": {\n",
" \"Green Tea\": 2.50,\n",
" \"Ramune Soda\": 3.00,\n",
" \"Sparkling Water\": 2.75,\n",
" },\n",
" \"Desserts\": {\n",
" \"Mochi Ice Cream (2)\": 5.00,\n",
" \"Matcha Cheesecake\": 6.50,\n",
" },\n",
" }"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "99914500-3630-4fea-987c-d19c760994c6",
"metadata": {},
"outputs": [],
"source": [
"def chat(message, history):\n",
" system_message = \"You are a helpful assistant for Sakana-ya (魚屋) Sushi restaurant.\\\n",
" Help out with information and if you dont know something just say you cant help with that.\"\n",
" system_message += json.dumps(menu)\n",
" system_message+=\"If something is not in the menu, we dont serve it.\\\n",
" If we dont have a dish just mention it that we dont offer it. \"\n",
"\n",
" sushi_exotic = [\n",
" {\"role\": \"user\", \"content\": \"Do you have aji?\"},\n",
" {\"role\": \"user\", \"content\": \"We currently dont have shun its available only during the season i.e in May.\"},\n",
" {\"role\": \"user\", \"content\": \"What about buri?\"},\n",
" {\"role\": \"user\", \"content\": \"Thats seasonal as well only during December. Do visit us during that time.\"},\n",
" \n",
" ]\n",
" \n",
" messages = [{\"role\": \"system\", \"content\": system_message}]+ sushi_exotic + history + [{\"role\": \"user\", \"content\": message}]\n",
" stream = openai.chat.completions.create(model=MODEL, messages=messages, stream=True)\n",
"\n",
" response = \"\"\n",
" for chunk in stream:\n",
" response += chunk.choices[0].delta.content or ''\n",
" yield response"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "a5c61d91-abee-4ada-9a42-ae87cf53fcff",
"metadata": {},
"outputs": [],
"source": [
"gr.ChatInterface(fn=chat, type=\"messages\").launch()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}