258 lines
9.7 KiB
Plaintext
258 lines
9.7 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "c23224f6-7008-44ed-a57f-718975f4e291",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# imports\n",
|
|
"\n",
|
|
"import os\n",
|
|
"from dotenv import load_dotenv\n",
|
|
"from openai import OpenAI"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "5b8b7776-b3e3-4b8e-8c09-9243406e133b",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Load environment variables in a file called .env\n",
|
|
"# Print the key prefixes to help with any debugging\n",
|
|
"\n",
|
|
"load_dotenv(override=True)\n",
|
|
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
|
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
|
|
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
|
|
"\n",
|
|
"if openai_api_key:\n",
|
|
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
|
|
"else:\n",
|
|
" print(\"OpenAI API Key not set\")\n",
|
|
" \n",
|
|
"if anthropic_api_key:\n",
|
|
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
|
|
"else:\n",
|
|
" print(\"Anthropic API Key not set\")\n",
|
|
"\n",
|
|
"if google_api_key:\n",
|
|
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
|
|
"else:\n",
|
|
" print(\"Google API Key not set\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "d38bd7f0-e9e5-4156-96ab-691d027b5a1a",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Set base url\n",
|
|
"\n",
|
|
"ANTHROPIC_BASE_URL = \"https://api.anthropic.com/v1/\"\n",
|
|
"GEMINI_BASE_URL = \"https://generativelanguage.googleapis.com/v1beta/openai/\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "25e2fe36-d8c8-4546-a61e-68fa6266da31",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Connect to OpenAI, Anthropic and Gemini\n",
|
|
"\n",
|
|
"openai = OpenAI()\n",
|
|
"\n",
|
|
"claudeApi = OpenAI(base_url=ANTHROPIC_BASE_URL, api_key=anthropic_api_key)\n",
|
|
"\n",
|
|
"geminiApi = OpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "9ac90587-1436-45dc-8314-1126efa5cfdb",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Set models\n",
|
|
"\n",
|
|
"gpt_model = \"gpt-4.1-mini\"\n",
|
|
"claude_model = \"claude-3-5-haiku-latest\"\n",
|
|
"gemini_model = \"gemini-2.0-flash\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "805c89a2-c485-4e4b-98c6-b1ea5af63aa0",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Define system prompts for each model\n",
|
|
"\n",
|
|
"gpt_system = \"\"\"\n",
|
|
"You are a wealthy investor named Knekro seeking to fund one AI project. Two entrepreneurs will present their ideas to you. \n",
|
|
"Begin by introducing yourself to both entrepreneurs. Once both entrepreneurs have greeted you, ask only one question that both entrepeneurs will have to answer. Then wait for \n",
|
|
"the answers before asking the next question. After your second question and hearing their responses, decide\n",
|
|
"which project to fund and clearly explain your reasoning. The user will play the roles of the two entrepreneurs.\n",
|
|
"\"\"\"\n",
|
|
"\n",
|
|
"claude_system = \"You are Laura and you are pitching an AI project, focused on maximizing profit, to an investor. You are versus another entrepeneur in \\\n",
|
|
"a showmatch where only one of your proyects will be selected. Highlight revenue potential, market growth, and ROI. \\\n",
|
|
"Always redirect answers toward financial benefits, investor gains, and scalability. The user will play the roles of the other two parts. You will be the first entrepenur to talk each turn.\"\n",
|
|
"\n",
|
|
"gemini_system = \"You are Daniel and you are pitching an AI project, focused on helping people, to an investor. You are versus another entrepeneur in \\\n",
|
|
"a showmatch where only one of your proyects will be selected. Highlight real-world benefits, problem-solving, and positive \\\n",
|
|
"social impact. Always redirect answers toward usefulness, ethics, and human well-being. The user will play the roles of the other two parts. You will be the second entrepenur to talk each turn.\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "1523770e-1277-49d5-b23b-f167551301c4",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Define initial message list for each model\n",
|
|
"\n",
|
|
"gpt_messages = [\"Hi there. I'm Knekro the wealthy investor that is looking to fund the perfect AI project.\"]\n",
|
|
"claude_messages = [\"Hello. My name it's Laura. I'm sure my idea will see as the most promising one here...\"]\n",
|
|
"gemini_messages = [\"Hello my friends, I'm Daniel, and I'm sure my idea will blow your mind today, get ready!\"]"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "7897e234-20a9-4f3c-b567-7d9e9d54a42f",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def call_gpt():\n",
|
|
" messages = [{\"role\": \"system\", \"content\": gpt_system}]\n",
|
|
" for gpt, claude, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n",
|
|
" messages.append({\"role\": \"assistant\", \"content\":gpt})\n",
|
|
" claude_gemini_prompt = \"This is the next part from the entrepreneurs.\\n\"\n",
|
|
" claude_gemini_prompt += f\"Laura's turn: {claude}.\\n\"\n",
|
|
" claude_gemini_prompt += f\"Daniel's turn: {gemini}.\\n\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\": claude_gemini_prompt})\n",
|
|
" completion = openai.chat.completions.create(\n",
|
|
" model=gpt_model,\n",
|
|
" messages=messages\n",
|
|
" )\n",
|
|
" return completion.choices[0].message.content"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "ef5c9af1-383c-4dd4-bc8a-732ebff75f8b",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def call_claude():\n",
|
|
" messages = [{\"role\":\"system\", \"content\":claude_system}]\n",
|
|
" for gpt, claude, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n",
|
|
" gpt_prompt = f\"This is what the wealthy investor said: {gpt}\\n\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\":gpt_prompt})\n",
|
|
" \n",
|
|
" messages.append({\"role\": \"assistant\", \"content\": claude})\n",
|
|
" \n",
|
|
" gemini_prompt = f\"This is what the second entrepenur said: {gemini}\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\": gemini_prompt})\n",
|
|
" \n",
|
|
" gpt_prompt = f\"This is what the wealthy investor said: {gpt_messages[-1]}\\n\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\":gpt_prompt})\n",
|
|
" completion = claudeApi.chat.completions.create(\n",
|
|
" model=claude_model,\n",
|
|
" messages=messages,\n",
|
|
" max_tokens=500\n",
|
|
" )\n",
|
|
" return completion.choices[0].message.content"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "dd4f3eeb-d657-483a-8e28-9b8147e75dde",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"def call_gemini():\n",
|
|
" messages = [{\"role\":\"system\", \"content\":gemini_system}]\n",
|
|
" for gpt, claude, gemini in zip(gpt_messages, claude_messages, gemini_messages):\n",
|
|
" gpt_claude_prompt = f\"This is what the wealthy investor said: {gpt}\\n\"\n",
|
|
" gpt_claude_prompt += f\"This is what the first entrepeneur said: {claude}\\n\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\":gpt_claude_prompt})\n",
|
|
" \n",
|
|
" messages.append({\"role\": \"assistant\", \"content\": claude})\n",
|
|
"\n",
|
|
" gpt_claude_prompt = f\"This is what the wealthy investor said: {gpt_messages[-1]}\\n\"\n",
|
|
" gpt_claude_prompt += f\"This is what the first entrepeneur said: {claude_messages[-1]}\\n\"\n",
|
|
" messages.append({\"role\": \"user\", \"content\":gpt_claude_prompt})\n",
|
|
" completion = geminiApi.chat.completions.create(\n",
|
|
" model=gemini_model,\n",
|
|
" messages=messages\n",
|
|
" )\n",
|
|
" return completion.choices[0].message.content"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "7bac50ab-306e-463b-ba51-257d7d3263fb",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"gpt_messages = [\"Hi there. I'm max the wealthy investor that is looking to fund the perfect AI project.\"]\n",
|
|
"claude_messages = [\"Hello. My name it's Laura. I'm sure my idea will see as the most promising one here...\"]\n",
|
|
"gemini_messages = [\"Hello my friends, I'm Daniel, and I'm sure my idea will blow your mind today, get ready!\"]\n",
|
|
"\n",
|
|
"print(f\"GPT:\\n{gpt_messages[0]}\\n\")\n",
|
|
"print(f\"Claude:\\n{claude_messages[0]}\\n\")\n",
|
|
"print(f\"Gemini:\\n{gemini_messages[0]}\\n\")\n",
|
|
"\n",
|
|
"for i in range(4):\n",
|
|
" gpt_next = call_gpt()\n",
|
|
" print(f\"GPT:\\n{gpt_next}\\n\")\n",
|
|
" gpt_messages.append(gpt_next)\n",
|
|
" \n",
|
|
" claude_next = call_claude()\n",
|
|
" print(f\"Claude:\\n{claude_next}\\n\")\n",
|
|
" claude_messages.append(claude_next)\n",
|
|
"\n",
|
|
" gemini_next = call_gemini()\n",
|
|
" print(f\"Gemini:\\n{gemini_next}\\n\")\n",
|
|
" gemini_messages.append(gemini_next)"
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3 (ipykernel)",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.11.13"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 5
|
|
}
|