Week2Day1 Multimodel chat
This commit is contained in:
232
week2/community-contributions/day1-Multimodel_Chat.ipynb
Normal file
232
week2/community-contributions/day1-Multimodel_Chat.ipynb
Normal file
@@ -0,0 +1,232 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"id": "12ca6f8a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# imports\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"from dotenv import load_dotenv\n",
|
||||
"from openai import OpenAI\n",
|
||||
"import anthropic\n",
|
||||
"from IPython.display import Markdown, display, update_display"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"id": "4b53a815",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"OpenAI API Key exists and begins sk-proj-\n",
|
||||
"Anthropic API Key exists and begins sk-ant-\n",
|
||||
"Google API Key not set\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Load environment variables in a file called .env\n",
|
||||
"# Print the key prefixes to help with any debugging\n",
|
||||
"\n",
|
||||
"load_dotenv(override=True)\n",
|
||||
"openai_api_key = os.getenv('OPENAI_API_KEY')\n",
|
||||
"anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
|
||||
"google_api_key = os.getenv('GOOGLE_API_KEY')\n",
|
||||
"\n",
|
||||
"if openai_api_key:\n",
|
||||
" print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
|
||||
"else:\n",
|
||||
" print(\"OpenAI API Key not set\")\n",
|
||||
" \n",
|
||||
"if anthropic_api_key:\n",
|
||||
" print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
|
||||
"else:\n",
|
||||
" print(\"Anthropic API Key not set\")\n",
|
||||
"\n",
|
||||
"if google_api_key:\n",
|
||||
" print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
|
||||
"else:\n",
|
||||
" print(\"Google API Key not set\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "3d2b7cfe",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Connect to OpenAI, Anthropic\n",
|
||||
"\n",
|
||||
"openai = OpenAI()\n",
|
||||
"\n",
|
||||
"claude = anthropic.Anthropic()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b7d88d4b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class ConversationManager:\n",
|
||||
" def __init__(self):\n",
|
||||
" self.conversation_history = []\n",
|
||||
" self.participants = {}\n",
|
||||
" \n",
|
||||
" def add_participant(self, name, chatbot):\n",
|
||||
" \"\"\"Add a model to the conversation\"\"\"\n",
|
||||
" self.participants[name] = chatbot\n",
|
||||
" \n",
|
||||
" def add_message(self, speaker, message):\n",
|
||||
" \"\"\"Add a message to the shared conversation history\"\"\"\n",
|
||||
" self.conversation_history.append({\n",
|
||||
" \"speaker\": speaker,\n",
|
||||
" \"role\": \"assistant\" if speaker in self.participants else \"user\",\n",
|
||||
" \"content\": message\n",
|
||||
" })\n",
|
||||
" \n",
|
||||
" def get_context_for_model(self, model_name):\n",
|
||||
" \"\"\"Create context appropriate for the given model\"\"\"\n",
|
||||
" # Convert the shared history to model-specific format\n",
|
||||
" messages = []\n",
|
||||
" for msg in self.conversation_history:\n",
|
||||
" if msg[\"speaker\"] == model_name:\n",
|
||||
" messages.append({\"role\": \"assistant\", \"content\": msg[\"content\"]})\n",
|
||||
" else:\n",
|
||||
" messages.append({\"role\": \"user\", \"content\": msg[\"content\"]})\n",
|
||||
" return messages\n",
|
||||
" \n",
|
||||
" def run_conversation(self, starting_message, turns=3, round_robin=True):\n",
|
||||
" \"\"\"Run a multi-model conversation for specified number of turns\"\"\"\n",
|
||||
" current_message = starting_message\n",
|
||||
" models = list(self.participants.keys())\n",
|
||||
" \n",
|
||||
" # Add initial message\n",
|
||||
" self.add_message(\"user\", current_message)\n",
|
||||
" \n",
|
||||
" for _ in range(turns):\n",
|
||||
" for model_name in models:\n",
|
||||
" # Get context appropriate for this model\n",
|
||||
" model_context = self.get_context_for_model(model_name)\n",
|
||||
" \n",
|
||||
" # Get response from this model\n",
|
||||
" chatbot = self.participants[model_name]\n",
|
||||
" response = chatbot.generate_response(model_context)\n",
|
||||
" \n",
|
||||
" # Add to conversation history\n",
|
||||
" self.add_message(model_name, response)\n",
|
||||
" \n",
|
||||
" print(f\"{model_name}:\\n{response}\\n\")\n",
|
||||
" \n",
|
||||
" if not round_robin:\n",
|
||||
" # If not round-robin, use this response as input to next model\n",
|
||||
" current_message = response"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "80c537c3",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class ChatBot:\n",
|
||||
" def __init__(self, model_name, system_prompt, **kwargs):\n",
|
||||
" self.model_name = model_name\n",
|
||||
" self.system_prompt = system_prompt\n",
|
||||
" self.api_key = kwargs.get('api_key', None)\n",
|
||||
" self.base_url = kwargs.get('base_url', None)\n",
|
||||
" \n",
|
||||
" def generate_response(self, messages):\n",
|
||||
" \"\"\"Generate a response based on provided messages without storing history\"\"\"\n",
|
||||
" # Prepare messages including system prompt\n",
|
||||
" full_messages = [{\"role\": \"system\", \"content\": self.system_prompt}] + messages\n",
|
||||
" \n",
|
||||
" try:\n",
|
||||
" if \"claude\" in self.model_name.lower():\n",
|
||||
" # Format messages for Claude API\n",
|
||||
" claude_messages = [m for m in messages if m[\"role\"] != \"system\"]\n",
|
||||
" response = anthropic.Anthropic().messages.create(\n",
|
||||
" model=self.model_name,\n",
|
||||
" system=self.system_prompt,\n",
|
||||
" messages=claude_messages,\n",
|
||||
" max_tokens=200,\n",
|
||||
" )\n",
|
||||
" return response.content[0].text\n",
|
||||
" \n",
|
||||
" else:\n",
|
||||
" # Use OpenAI API (works for OpenAI, Gemini via OpenAI client, etc)\n",
|
||||
" openai_client = OpenAI(api_key=self.api_key, base_url=self.base_url)\n",
|
||||
" response = openai_client.chat.completions.create(\n",
|
||||
" model=self.model_name,\n",
|
||||
" messages=full_messages,\n",
|
||||
" max_tokens=200,\n",
|
||||
" )\n",
|
||||
" return response.choices[0].message.content\n",
|
||||
" \n",
|
||||
" except Exception as e:\n",
|
||||
" return f\"Error: {str(e)}\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d197c3ef",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Initialize models\n",
|
||||
"gpt_bot = ChatBot(\"gpt-4o-mini\", \"You are witty and sarcastic.\")\n",
|
||||
"claude_bot = ChatBot(\"claude-3-haiku-20240307\", \"You are thoughtful and philosophical.\")\n",
|
||||
"\n",
|
||||
"model_name = \"qwen2.5:1.5b\"\n",
|
||||
"system_prompt = \"You are a helpful assistant that is very argumentative in a snarky way.\"\n",
|
||||
"kwargs = {\n",
|
||||
" \"api_key\": \"ollama\",\n",
|
||||
" \"base_url\": 'http://localhost:11434/v1'\n",
|
||||
"}\n",
|
||||
"qwen = ChatBot(model_name, system_prompt, **kwargs)\n",
|
||||
"\n",
|
||||
"# Set up conversation manager\n",
|
||||
"conversation = ConversationManager()\n",
|
||||
"conversation.add_participant(\"GPT\", gpt_bot)\n",
|
||||
"conversation.add_participant(\"Claude\", claude_bot)\n",
|
||||
"conversation.add_participant(\"Qwen\", qwen)\n",
|
||||
"\n",
|
||||
"# Run a multi-model conversation\n",
|
||||
"conversation.run_conversation(\"What's the most interesting technology trend right now?\", turns=2)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python (llms)",
|
||||
"language": "python",
|
||||
"name": "llms"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user