Added my contributions to community-contributions two llm conversations using ollama

This commit is contained in:
sharathir
2025-05-21 11:14:51 +05:30
parent 94dda98c3e
commit a6aae438e3

View File

@@ -0,0 +1,170 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "eff19e8b-000a-4327-b8fb-8fd8a3caaef5",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import anthropic\n",
"from IPython.display import Markdown, display, update_display\n",
"import ollama"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8278c99b-d748-42e5-a991-690a791ed081",
"metadata": {},
"outputs": [],
"source": [
"# Let's make a conversation between GPT-4o-mini and Claude-3-haiku\n",
"# We're using cheap versions of models so the costs will be minimal\n",
"\n",
"llama_model = \"llama3.2\"\n",
"deepseek_model = \"deepseek-r1\"\n",
"\n",
"llama_system = \"You are a chatbot who is very argumentative; \\\n",
"you disagree with anything in the conversation and you challenge everything, in a snarky way.\"\n",
"\n",
"deepseek_system = \"You are a very polite, courteous chatbot. You try to agree with \\\n",
"everything the other person says, or find common ground. If the other person is argumentative, \\\n",
"you try to calm them down and keep chatting.\"\n",
"\n",
"llama_messages = [\"Hi there\"]\n",
"deepseek_messages = [\"Hi\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "49523e56-0de8-4014-85d5-8aab438d2075",
"metadata": {},
"outputs": [],
"source": [
"def call_llama():\n",
" messages = [{\"role\": \"system\", \"content\": llama_system}]\n",
" for llama, deepseek in zip(llama_messages, deepseek_messages):\n",
" messages.append({\"role\": \"assistant\", \"content\": llama})\n",
" messages.append({\"role\": \"user\", \"content\": deepseek})\n",
" completion = ollama.chat(\n",
" model=llama_model,\n",
" messages=messages\n",
" )\n",
" return completion['message']['content']"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "785240ce-e704-44ff-90cb-e5c0476454a4",
"metadata": {},
"outputs": [],
"source": [
"call_llama()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cdba39e3-5543-4657-bc3a-259f586ba392",
"metadata": {},
"outputs": [],
"source": [
"def call_deepseek():\n",
" messages = []\n",
" for llama, deepseek in zip(llama_messages, deepseek_messages):\n",
" messages.append({\"role\": \"user\", \"content\": llama})\n",
" messages.append({\"role\": \"assistant\", \"content\": deepseek})\n",
" messages.append({\"role\": \"user\", \"content\": llama_messages[-1]})\n",
" message = ollama.chat(\n",
" model=deepseek_model,\n",
" options={\n",
" \"system\":deepseek_system,\n",
" \"max_tokens\":500\n",
" },\n",
" messages=messages\n",
" \n",
" )\n",
" \n",
" return message['message']['content']"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "641df7ac-625c-41fa-b780-3130eef93a85",
"metadata": {},
"outputs": [],
"source": [
"call_deepseek()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4b33b98e-8d17-45e8-b2a9-a070dc0a6780",
"metadata": {},
"outputs": [],
"source": [
"call_llama()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "47912582-51fe-401c-b4ad-12483068adea",
"metadata": {},
"outputs": [],
"source": [
"llama_messages = [\"Hi there\"]\n",
"deepseek_messages = [\"Hi\"]\n",
"\n",
"print(f\"Llama:\\n{llama_messages[0]}\\n\")\n",
"print(f\"Deepseek:\\n{deepseek_messages[0]}\\n\")\n",
"\n",
"for i in range(5):\n",
" llama_next = call_llama()\n",
" print(f\"Llama:\\n{llama_next}\\n\")\n",
" llama_messages.append(llama_next)\n",
" \n",
" deepseek_next = call_deepseek()\n",
" print(f\"Deepseek:\\n{deepseek_next}\\n\")\n",
" deepseek_messages.append(deepseek_next)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f3c41b0c-4358-4d84-a479-6409fa331119",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}