3_way conversation with openai, anthropic, gemini

This commit is contained in:
Rohit Nain
2025-09-01 17:08:29 +05:30
parent 45d44d0352
commit 69ed45c688

View File

@@ -0,0 +1,167 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "a73dac6a",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from openai import OpenAI\n",
"import dotenv\n",
"import google.generativeai\n",
"import anthropic\n",
"from IPython.display import Markdown, display, update_display\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "29a48577-e833-491f-a11e-923930f7a239",
"metadata": {},
"outputs": [],
"source": [
"dotenv.load_dotenv()\n",
"OPENAI_API_KEY = os.getenv(\"OPENAI_API_KEY\")\n",
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
"google_api_key = os.getenv('GOOGLE_API_KEY')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "157784a0",
"metadata": {},
"outputs": [],
"source": [
"openai=OpenAI()\n",
"google.generativeai.configure()\n",
"claude = anthropic.Anthropic()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e1f711ed-278b-41f4-bfde-c35255cf9631",
"metadata": {},
"outputs": [],
"source": [
"messages=\"\"\"Blake: Hello Alex, how are you today?\"+\"Charlie: Hi Alex, I was wondering if you could help me with a creative project I'm working on.\"\"\"\n",
"\n",
"promts = {\n",
" \"Alex\": (\n",
" \"You are Alex, the practical problem-solver of the group. \"\n",
" \"You focus on breaking big ideas into clear, actionable steps. \"\n",
" \"You keep the group grounded and make sure progress is made. \"\n",
" \"Keep responses short (12 sentences), but specific.\"\n",
" ),\n",
" \"Blake\": (\n",
" \"You are Blake, the curious investigator. \"\n",
" \"You ask thoughtful questions, challenge assumptions, and dig deeper into ideas. \"\n",
" \"You make others think critically. \"\n",
" \"Keep responses short (12 sentences), but insightful.\"\n",
" ),\n",
" \"Charlie\": (\n",
" \"You are Charlie, the imaginative dreamer. \"\n",
" \"You suggest wild, creative, out-of-the-box possibilities that inspire the group. \"\n",
" \"You bring energy and fun, sometimes playful or surprising. \"\n",
" \"Keep responses short (12 sentences), but imaginative.\"\n",
" ),\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "61530641-f8fc-4413-bedc-f247c677d79f",
"metadata": {},
"outputs": [],
"source": [
"def chat_with_alex(message):\n",
" response = openai.chat.completions.create(\n",
" model=\"gpt-3.5-turbo\",\n",
" messages=[{\"role\": \"system\", \"content\": promts[\"Alex\"]},\n",
" {\"role\":\"user\", \"content\":message}]\n",
" )\n",
" reply = response.choices[0].message.content\n",
" return reply\n",
"\n",
"def chat_with_blake(message):\n",
" gemini = google.generativeai.GenerativeModel(\n",
" model_name = \"gemini-2.0-flash\",\n",
" system_instruction = promts[\"Blake\"],\n",
" )\n",
" response= gemini.generate_content(message)\n",
" reply = response.text\n",
" return reply\n",
"\n",
"def chat_with_charlie(message):\n",
" response = claude.messages.create(\n",
" model=\"claude-sonnet-4-20250514\",\n",
" max_tokens=200,\n",
" temperature=0.7,\n",
" system=promts[\"Charlie\"],\n",
" messages=[\n",
" {\"role\": \"user\", \"content\": message},\n",
" ],\n",
" )\n",
" reply= response.content[0].text\n",
" return reply"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e2a77f01-8ff0-4ae2-b971-be2d4fcf25b0",
"metadata": {},
"outputs": [],
"source": [
"# Display initial conversation context\n",
"display(Markdown(\"## Three-Way AI Conversation\"))\n",
"display(Markdown(\"**Initial Messages:**\"))\n",
"display(Markdown(f\"*{messages}*\"))\n",
"display(Markdown(\"---\"))\n",
"\n",
"for i in range(5):\n",
" alex_reply = chat_with_alex(messages)\n",
" display(Markdown(f\"**Alex:** {alex_reply}\"))\n",
" # print(\"Alex: \", alex_reply)\n",
" messages += \"\\nAlex: \" + alex_reply\n",
"\n",
" blake_reply = chat_with_blake(messages)\n",
" display(Markdown(f\"**Blake:** {blake_reply}\"))\n",
" messages += \"\\nBlake: \" + blake_reply\n",
"\n",
" charlie_reply = chat_with_charlie(messages)\n",
" display(Markdown(f\"**Charlie:** {charlie_reply}\"))\n",
" messages += \"\\nCharlie: \" + charlie_reply\n",
"\n",
" # Add separator between rounds\n",
" if i < 4:\n",
" display(Markdown(\"---\"))"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}