Add Week 1 notebook to community contributions
This commit is contained in:
253
week1/community-contributions/Cosmus/Cosmus_week1_EXERCISE.ipynb
Normal file
253
week1/community-contributions/Cosmus/Cosmus_week1_EXERCISE.ipynb
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "fe12c203-e6a6-452c-a655-afb8a03a4ff5",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# End of week 1 exercise\n",
|
||||||
|
"\n",
|
||||||
|
"To demonstrate your familiarity with OpenAI API, and also Ollama, build a tool that takes a technical question, \n",
|
||||||
|
"and responds with an explanation. This is a tool that you will be able to use yourself during the course!"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "c1070317-3ed9-4659-abe3-828943230e03",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# imports\n",
|
||||||
|
"import os\n",
|
||||||
|
"import json\n",
|
||||||
|
"import requests\n",
|
||||||
|
"from IPython.display import Markdown, display, update_display\n",
|
||||||
|
"from dotenv import load_dotenv\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "4a456906-915a-4bfd-bb9d-57e505c5093f",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"# --- Load environment ---\n",
|
||||||
|
"load_dotenv()\n",
|
||||||
|
"\n",
|
||||||
|
"MODEL_LLAMA = os.getenv(\"LOCAL_MODEL_NAME\", \"llama3.2\")\n",
|
||||||
|
"OLLAMA_BASE = os.getenv(\"OLLAMA_BASE_URL\", \"http://localhost:11434/v1\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "3f0d0137-52b0-47a8-81a8-11a90a010798",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"question = \"\"\"\n",
|
||||||
|
"Please explain what this code does and why:\n",
|
||||||
|
"yield from {book.get(\"author\") for book in books if book.get(\"author\")}\n",
|
||||||
|
"\"\"\"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "60ce7000-a4a5-4cce-a261-e75ef45063b4",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Getting explanation from llama3.2 using Ollama...\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/markdown": [
|
||||||
|
"This piece of code is written in Python. It uses the `yield from` statement, which is a feature introduced in Python 3.3.\n",
|
||||||
|
"\n",
|
||||||
|
"Here's what it does:\n",
|
||||||
|
"\n",
|
||||||
|
"- It iterates over each `book` in the list `books`.\n",
|
||||||
|
"\n",
|
||||||
|
"- For each `book`, it tries to get the value associated with the key `\"author\"`. The `.get(\"author\")` method returns the value for the key `\"author\"` if it exists, and provides a default value (`None` by default) if the key does not exist.\n",
|
||||||
|
"\n",
|
||||||
|
"- It then yields this author value back through the generator that returned `this yield from`.\n",
|
||||||
|
"\n",
|
||||||
|
"In other words, when you use `yield from`, Python takes all values yielded from one inner iteration point and turns them into values yielded point-by-point on a containing iteration. The values are \"yielded\" in an order dictated to it by the innermost sequence.\n",
|
||||||
|
"\n",
|
||||||
|
"Here is how that would look like:\n",
|
||||||
|
"\n",
|
||||||
|
"```\n",
|
||||||
|
" for book, author in books:\n",
|
||||||
|
" for author_from_book in yield_from(book.get(\"author\") if book.get(\"author\") else []):\n",
|
||||||
|
" # Code here\n",
|
||||||
|
" pass\n",
|
||||||
|
" pass\n",
|
||||||
|
"``` \n",
|
||||||
|
"\n",
|
||||||
|
"Or a simplified version with `if-express` and `map` like so\n",
|
||||||
|
"\n",
|
||||||
|
"```python\n",
|
||||||
|
"import expression\n",
|
||||||
|
"\n",
|
||||||
|
"books = [\n",
|
||||||
|
" {\"id\": 1, \"title\": 'Blurred Horizons'},\n",
|
||||||
|
" {\"id\": 2, \"author\": 'Judy Blume'}, \n",
|
||||||
|
" {\"id\": 3},\n",
|
||||||
|
"]\n",
|
||||||
|
"authors= ['blum', *exp(expression,\" books get\")()]\n",
|
||||||
|
"\n",
|
||||||
|
"for author in authors:\n",
|
||||||
|
" pass\n",
|
||||||
|
"```\n",
|
||||||
|
"But again this is using something that would probably be written in a real application like so"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.core.display.Markdown object>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "display_data"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"\n",
|
||||||
|
"Final explanation:\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/markdown": [
|
||||||
|
"### Llama 3.2 Explanation\n",
|
||||||
|
"\n",
|
||||||
|
"This piece of code is written in Python. It uses the `yield from` statement, which is a feature introduced in Python 3.3.\n",
|
||||||
|
"\n",
|
||||||
|
"Here's what it does:\n",
|
||||||
|
"\n",
|
||||||
|
"- It iterates over each `book` in the list `books`.\n",
|
||||||
|
"\n",
|
||||||
|
"- For each `book`, it tries to get the value associated with the key `\"author\"`. The `.get(\"author\")` method returns the value for the key `\"author\"` if it exists, and provides a default value (`None` by default) if the key does not exist.\n",
|
||||||
|
"\n",
|
||||||
|
"- It then yields this author value back through the generator that returned `this yield from`.\n",
|
||||||
|
"\n",
|
||||||
|
"In other words, when you use `yield from`, Python takes all values yielded from one inner iteration point and turns them into values yielded point-by-point on a containing iteration. The values are \"yielded\" in an order dictated to it by the innermost sequence.\n",
|
||||||
|
"\n",
|
||||||
|
"Here is how that would look like:\n",
|
||||||
|
"\n",
|
||||||
|
"```\n",
|
||||||
|
" for book, author in books:\n",
|
||||||
|
" for author_from_book in yield_from(book.get(\"author\") if book.get(\"author\") else []):\n",
|
||||||
|
" # Code here\n",
|
||||||
|
" pass\n",
|
||||||
|
" pass\n",
|
||||||
|
"``` \n",
|
||||||
|
"\n",
|
||||||
|
"Or a simplified version with `if-express` and `map` like so\n",
|
||||||
|
"\n",
|
||||||
|
"```python\n",
|
||||||
|
"import expression\n",
|
||||||
|
"\n",
|
||||||
|
"books = [\n",
|
||||||
|
" {\"id\": 1, \"title\": 'Blurred Horizons'},\n",
|
||||||
|
" {\"id\": 2, \"author\": 'Judy Blume'}, \n",
|
||||||
|
" {\"id\": 3},\n",
|
||||||
|
"]\n",
|
||||||
|
"authors= ['blum', *exp(expression,\" books get\")()]\n",
|
||||||
|
"\n",
|
||||||
|
"for author in authors:\n",
|
||||||
|
" pass\n",
|
||||||
|
"```\n",
|
||||||
|
"But again this is using something that would probably be written in a real application like so"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.core.display.Markdown object>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "display_data"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"print(f\"Getting explanation from {MODEL_LLAMA} using Ollama...\")\n",
|
||||||
|
"\n",
|
||||||
|
"try:\n",
|
||||||
|
" response = requests.post(\n",
|
||||||
|
" f\"{OLLAMA_BASE}/completions\",\n",
|
||||||
|
" json={\n",
|
||||||
|
" \"model\": MODEL_LLAMA,\n",
|
||||||
|
" \"prompt\": question,\n",
|
||||||
|
" \"stream\": True\n",
|
||||||
|
" },\n",
|
||||||
|
" stream=True,\n",
|
||||||
|
" timeout=120\n",
|
||||||
|
" )\n",
|
||||||
|
"\n",
|
||||||
|
" output = \"\"\n",
|
||||||
|
" display_handle = display(Markdown(\"Generating response...\"), display_id=True)\n",
|
||||||
|
"\n",
|
||||||
|
" for line in response.iter_lines(decode_unicode=True):\n",
|
||||||
|
" if not line.strip():\n",
|
||||||
|
" continue\n",
|
||||||
|
"\n",
|
||||||
|
" # Each event line starts with \"data: \"\n",
|
||||||
|
" if line.startswith(\"data: \"):\n",
|
||||||
|
" line = line[len(\"data: \"):]\n",
|
||||||
|
"\n",
|
||||||
|
" if line.strip() == \"[DONE]\":\n",
|
||||||
|
" break\n",
|
||||||
|
"\n",
|
||||||
|
" try:\n",
|
||||||
|
" data = json.loads(line)\n",
|
||||||
|
" except json.JSONDecodeError:\n",
|
||||||
|
" continue\n",
|
||||||
|
"\n",
|
||||||
|
" # In Ollama /v1/completions, the text comes in data['choices'][0]['text']\n",
|
||||||
|
" text = data.get(\"choices\", [{}])[0].get(\"text\", \"\")\n",
|
||||||
|
" if text:\n",
|
||||||
|
" output += text\n",
|
||||||
|
" update_display(Markdown(output), display_id=display_handle.display_id)\n",
|
||||||
|
"\n",
|
||||||
|
" print(\"\\nFinal explanation:\\n\")\n",
|
||||||
|
" display(Markdown(f\"### Llama 3.2 Explanation\\n\\n{output.strip()}\"))\n",
|
||||||
|
"\n",
|
||||||
|
"except requests.exceptions.ConnectionError:\n",
|
||||||
|
" print(\"Could not connect to Ollama — make sure it’s running (run `ollama serve`).\")\n",
|
||||||
|
"except Exception as e:\n",
|
||||||
|
" print(\"Unexpected error:\", e)\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": ".venv",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.13.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user