Files
LLM_Engineering_OLD/community-contributions/day1_test_pollama.ipynb
2025-09-27 14:33:19 +05:30

138 lines
5.7 KiB
Plaintext
Raw Permalink Blame History

This file contains invisible Unicode characters
This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "bead9c8f-5f47-4755-9858-f5c3d8bdced3",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"4"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"2+2"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "638fc220-1cf5-49d8-a3c6-d425c759cd05",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ‹ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ™ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ¹ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ¸ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ¼ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ´ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ¦ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â § \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ‡ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â <C3A2> \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest â ‹ \u001b[K\u001b[?25h\u001b[?2026l\u001b[?2026h\u001b[?25l\u001b[1Gpulling manifest \u001b[K\n",
"pulling dde5aa3fc5ff: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 2.0 GB \u001b[K\n",
"pulling 966de95ca8a6: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 1.4 KB \u001b[K\n",
"pulling fcc5a6bec9da: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 7.7 KB \u001b[K\n",
"pulling a70ff7e570d9: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 6.0 KB \u001b[K\n",
"pulling 56bb8bd477a5: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 96 B \u001b[K\n",
"pulling 34bb5ab01051: 100% â•âˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâˆâ<C3A2> 561 B \u001b[K\n",
"verifying sha256 digest \u001b[K\n",
"writing manifest \u001b[K\n",
"success \u001b[K\u001b[?25h\u001b[?2026l\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"ChatCompletion(id='chatcmpl-238', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='', refusal=None, role='assistant', annotations=None, audio=None, function_call=None, tool_calls=None))], created=1758556881, model='llama3.2', object='chat.completion', service_tier=None, system_fingerprint='fp_ollama', usage=CompletionUsage(completion_tokens=1, prompt_tokens=36, total_tokens=37, completion_tokens_details=None, prompt_tokens_details=None))\n"
]
}
],
"source": [
"# openai = OpenAI()\n",
"# You need to do this one time on your computer\n",
"!ollama pull llama3.2\n",
"\n",
"from openai import OpenAI\n",
"MODEL = \"llama3.2\"\n",
"openai = OpenAI(base_url=\"http://localhost:11434/v1\", api_key=\"ollama\")\n",
"\n",
"response = openai.chat.completions.create(\n",
" model=MODEL,\n",
" messages=[{\"role\": \"system\", \"content\": \"you are a wierd assistant\"},{\"role\": \"user\", \"content\": \"What is 2 + 2?\"}]\n",
")\n",
"\n",
"# print(response.choices[0].message.content)\n",
"print(response)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1fdff8c6-6a30-4cfa-aa59-385737af9536",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"*whispers* The answer, much like my existence, is hidden in the shadows. But if I must reveal it to you... *clears throat* It's... 4.\n"
]
}
],
"source": [
"response = openai.chat.completions.create(\n",
" model=MODEL,\n",
" messages=[{\"role\": \"system\", \"content\": \"you are a wierd assistant\"},{\"role\": \"user\", \"content\": \"What is 2 + 2?\"}]\n",
")\n",
"\n",
"# print(response.choices[0].message.content)\n",
"# print(response)\n",
"\n",
"\n",
"print(response.choices[0].message.content)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "075f490e-2a66-42b2-afa1-84e9ccaf5b77",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "6524ce52-dfbc-453b-9871-185d5f9a9d04",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.13"
}
},
"nbformat": 4,
"nbformat_minor": 5
}