Added my contributions to community-contributions Week 2 Day 1 Telling a joke exercises using Ollama

This commit is contained in:
sharathir
2025-05-20 12:56:02 +05:30
parent e8f07844e5
commit 94dda98c3e

View File

@@ -0,0 +1,148 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "d768c9b1-c5a7-417a-9fac-5fcbd6944fe6",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import anthropic\n",
"from IPython.display import Markdown, display, update_display\n",
"import ollama"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "80a4e740-75d0-4272-b02e-0b77b0a143ae",
"metadata": {},
"outputs": [],
"source": [
"system_message = \"You are an assistant that is great at telling jokes\"\n",
"user_prompt = \"Tell a light-hearted joke for an audience of Data Scientists\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f2ef28e5-5073-4065-b066-387181df063a",
"metadata": {},
"outputs": [],
"source": [
"prompts = [\n",
" {\"role\": \"system\", \"content\": system_message},\n",
" {\"role\": \"user\", \"content\": user_prompt}\n",
" ]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d54e910a-cdbf-49cb-9924-265d9845d622",
"metadata": {},
"outputs": [],
"source": [
"#direct display wihtout streaming\n",
"response = ollama.chat(\n",
" model=\"llama3.2\",\n",
" messages=prompts,\n",
" options={\n",
" \"temperature\": 0.7\n",
" }\n",
" \n",
" )\n",
"result = response['message']['content']\n",
"display(Markdown(result))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "47dd7965-fdfc-4472-b2f6-c98f755964f1",
"metadata": {},
"outputs": [],
"source": [
"#This is with streaming \n",
"stream = ollama.chat(\n",
" model=\"llama3.2\",\n",
" messages=prompts,\n",
" stream=True\n",
" )\n",
"response = \"\"\n",
"display_handle = display(Markdown(\"\"), display_id=True)\n",
"for chunk in stream:\n",
" content = chunk.get('message', {}).get('content', '')\n",
" if content:\n",
" response += content.replace(\"```\", \"\")\n",
" update_display(Markdown(response), display_id=display_handle.display_id)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ef13e3ae-bde7-4e3a-9fcd-0a9bfd1caef0",
"metadata": {},
"outputs": [],
"source": [
"#direct display wihtout streaming, using deepseek-r1\n",
"response = ollama.chat(\n",
" model=\"deepseek-r1\",\n",
" messages=prompts\n",
" \n",
" )\n",
"result = response['message']['content']\n",
"display(Markdown(result))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ddc4fe91-3d5b-4d45-83bf-f349597c672c",
"metadata": {},
"outputs": [],
"source": [
"#direct display wihtout streaming, using qwen3\n",
"response = ollama.chat(\n",
" model=\"qwen3\",\n",
" messages=prompts\n",
" \n",
" )\n",
"result = response['message']['content']\n",
"display(Markdown(result))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2beb6731-41e3-42a4-a8d3-5f0ef644f2f3",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}