From 5923e3a92c14f3f67e006e4e38698f965c0ab73f Mon Sep 17 00:00:00 2001 From: Guru Deep Singh Date: Sat, 21 Jun 2025 18:39:28 +0200 Subject: [PATCH] feat():Multishot_prompting_using_history --- ...rompting_via_historical_conversation.ipynb | 133 ++++++++++++++++++ 1 file changed, 133 insertions(+) create mode 100644 week2/community-contributions/day3_Multishot_prompting_via_historical_conversation.ipynb diff --git a/week2/community-contributions/day3_Multishot_prompting_via_historical_conversation.ipynb b/week2/community-contributions/day3_Multishot_prompting_via_historical_conversation.ipynb new file mode 100644 index 0000000..c6a10b3 --- /dev/null +++ b/week2/community-contributions/day3_Multishot_prompting_via_historical_conversation.ipynb @@ -0,0 +1,133 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "f4e0dbbb-2b3f-4c4b-8b25-642648cfe72c", + "metadata": {}, + "source": [ + "# Multishot Prompting via learning from Historical Conversation\n", + "Learning from historical conversations (Which could be stored in databases) allows the model to cache information and utilize in particular conversation." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c71c5ba7-d30f-4b78-abde-4ff465196256", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from dotenv import load_dotenv\n", + "from openai import OpenAI\n", + "import gradio as gr" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8304702a-8a8d-40de-96ee-3ae911949952", + "metadata": {}, + "outputs": [], + "source": [ + "# Load environment variables in a file called .env\n", + "# Print the key prefixes to help with any debugging\n", + "\n", + "load_dotenv(override=True)\n", + "openai_api_key = os.getenv('OPENAI_API_KEY')\n", + "anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n", + "google_api_key = os.getenv('GOOGLE_API_KEY')\n", + "\n", + "if openai_api_key:\n", + " print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n", + "else:\n", + " print(\"OpenAI API Key not set\")\n", + " \n", + "if anthropic_api_key:\n", + " print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n", + "else:\n", + " print(\"Anthropic API Key not set\")\n", + "\n", + "if google_api_key:\n", + " print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n", + "else:\n", + " print(\"Google API Key not set\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ef47f00-e0fe-45cf-a4da-f60b47fadc98", + "metadata": {}, + "outputs": [], + "source": [ + "openai = OpenAI()\n", + "MODEL = 'gpt-4o-mini'\n", + "\n", + "system_message = \"You are a helpful assistant in a clothes store. You should try to gently encourage \\\n", + "the customer to try items that are on sale. Hats are 60% off, and most other items are 50% off. \\\n", + "For example, if the customer says 'I'm looking to buy a hat', \\\n", + "you could reply something like, 'Wonderful - we have lots of hats - including several that are part of our sales event.'\\\n", + "Encourage the customer to buy hats if they are unsure what to get.\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "78c29e44-c121-4af9-b70f-1b5559040829", + "metadata": {}, + "outputs": [], + "source": [ + "archievedConversation = [{\"role\": \"user\", \"content\": \"Customer A: Hi, I am looking to buy a belt.\"},\n", + " {\"role\": \"assistant\", \"content\": \"I am sorry but we do not sell belts in this store; but you can find them in our second store.\\\n", + " Do you want me to tell you the address of that store?\"}\n", + " ,{\"role\": \"user\", \"content\": \"Customer A: Yes please tell me the location.\"},\n", + " {\"role\": \"assistant\", \"content\": \"Please walk straight from this store and then take a right, the second store is 3 streets after next to a burger joint.\" }]\n", + "\n", + "def chat(message, history):\n", + "\n", + " if 'belt' in message:\n", + " messages = [{\"role\": \"system\", \"content\": system_message}] + archievedConversation + history + [{\"role\": \"user\", \"content\": message}]\n", + " else:\n", + " messages = [{\"role\": \"system\", \"content\": system_message}] + history + [{\"role\": \"user\", \"content\": message}]\n", + "\n", + " stream = openai.chat.completions.create(model=MODEL, messages=messages, stream=True)\n", + "\n", + " response = \"\"\n", + " for chunk in stream:\n", + " response += chunk.choices[0].delta.content or ''\n", + " yield response" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e48d30f8-f040-4c01-bb4f-47562bba5fa7", + "metadata": {}, + "outputs": [], + "source": [ + "gr.ChatInterface(fn=chat, type=\"messages\").launch(inbrowser=True)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}