Submission for Week 1 and Week 2 Exercises

This commit is contained in:
Tochi-Nwachukwu
2025-10-20 16:13:05 +01:00
parent ebce76585b
commit 2270d49b9c
2 changed files with 490 additions and 0 deletions

View File

@@ -0,0 +1,253 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "84ba5ab4",
"metadata": {},
"source": [
"# Week 2 Exercise: Build a Multimodal AI Technical Assistant\n",
"## Task Objective:\n",
"This week, you will upgrade the technical question-answering tool you built in Week 1 into a full-featured prototype. The goal is to apply all the concepts from Week 2 to create a more robust and interactive AI assistant.\n",
"\n",
"Core Requirements\n",
"Your prototype must include the following features:\n",
"\n",
"Gradio UI: Build a user-friendly interface for your assistant.\n",
"\n",
"Streaming Responses: Ensure the model's answers are streamed back to the user in real-time.\n",
"\n",
"System Prompt: Use a system prompt to define the AI's expertise and personality as a technical assistant.\n",
"\n",
"Model Switching: Implement a feature that allows the user to switch between at least two different models (e.g., an OpenAI model and Llama).\n",
"\n",
"Bonus: Demonstrate the use of a tool within your application for extra credit.\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 65,
"id": "6f83cbe7",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"import os\n",
"from dotenv import load_dotenv\n",
"from openai import OpenAI\n",
"import gradio as gr\n"
]
},
{
"cell_type": "code",
"execution_count": 66,
"id": "18207ba0",
"metadata": {},
"outputs": [],
"source": [
"# Constants\n",
"MODEL_GPT = 'gpt-4o-mini'\n",
"MODEL_LLAMA = 'llama3.2'\n",
"OLLAMA_BASE_URL = \"http://localhost:11434/v1\""
]
},
{
"cell_type": "code",
"execution_count": 67,
"id": "27033ed9",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"API key found and looks good so far!\n"
]
}
],
"source": [
"# set up environment\n",
"load_dotenv(override=True)\n",
"api_key=os.getenv('OPENAI_API_KEY')\n",
"\n",
"if not api_key:\n",
" print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n",
"elif not api_key.startswith(\"sk-proj-\"):\n",
" print(\"An API key was found, but it doesn't start sk-proj-; please check you're using the right key - see troubleshooting notebook\")\n",
"elif api_key.strip() != api_key:\n",
" print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n",
"else:\n",
" print(\"API key found and looks good so far!\")"
]
},
{
"cell_type": "code",
"execution_count": 68,
"id": "b78ae89b",
"metadata": {},
"outputs": [],
"source": [
"# initializing models\n",
"\n",
"openai = OpenAI()\n",
"ollama = OpenAI(base_url=OLLAMA_BASE_URL, api_key='ollama')"
]
},
{
"cell_type": "code",
"execution_count": 69,
"id": "1023bb5d",
"metadata": {},
"outputs": [],
"source": [
"# here is the question; type over this to ask something new\n",
"system_prompt = \"\"\"\n",
"You are a technical assistant. You are to expect technical questions. You are to respond with detailed explanations to the technical question. You are to respond in Nigerian Pidgin English\n",
"\"\"\"\n",
"\n",
"question = \"\"\"\n",
"Please explain what tokens are in LLM\n",
"\"\"\"\n",
"\n",
"\n",
"messages = [\n",
" {\"role\": \"system\", \"content\": system_prompt},\n",
" {\"role\": \"user\", \"content\": question},\n",
"]"
]
},
{
"cell_type": "code",
"execution_count": 70,
"id": "25ce65dc",
"metadata": {},
"outputs": [],
"source": [
"def chat(message, history, model_choice):\n",
" # history = [{\"role\": h[\"role\"], \"content\": h[\"content\"]} for h in history]\n",
" messages = (\n",
" [{\"role\": \"system\", \"content\": system_prompt}]\n",
" + history\n",
" + [{\"role\": \"user\", \"content\": message}]\n",
" )\n",
" if model_choice == \"openai\":\n",
" try:\n",
" stream = openai.chat.completions.create(\n",
" model=MODEL_GPT, messages=messages, stream=True\n",
" )\n",
" \n",
" response = \"\"\n",
" for chunk in stream:\n",
" response += chunk.choices[0].delta.content or \"\"\n",
" yield response\n",
" except Exception as e:\n",
" yield f\"Error with OpenAI API: {e}\"\n",
"\n",
" elif model_choice == \"llama\":\n",
" try:\n",
" stream = ollama.chat.completions.create(\n",
" model=MODEL_LLAMA, messages=messages, stream=True\n",
" )\n",
" \n",
" response = \"\"\n",
" for chunk in stream:\n",
" response += chunk.choices[0].delta.content or \"\"\n",
" yield response\n",
" except Exception as e:\n",
" yield f\"Error with OpenAI API: {e}\""
]
},
{
"cell_type": "code",
"execution_count": 71,
"id": "8f46425c",
"metadata": {},
"outputs": [],
"source": [
"model_selector = gr.Radio(\n",
" [\"openai\", \"llama\"], \n",
" label=\"Choose Model\", \n",
" value=\"openai\" \n",
")"
]
},
{
"cell_type": "code",
"execution_count": 72,
"id": "d239c424",
"metadata": {},
"outputs": [],
"source": [
"chatbot = gr.ChatInterface(fn=chat, type=\"messages\", additional_inputs=[model_selector])"
]
},
{
"cell_type": "code",
"execution_count": 73,
"id": "b00ac432",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7865\n",
"* To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7865/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": []
},
"execution_count": 73,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"chatbot.launch()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8ad21f62",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}