Merge pull request #280 from nfirbas/main

week 1: Michelin-star cooking assistant
This commit is contained in:
Ed Donner
2025-03-29 07:49:54 -04:00
committed by GitHub

View File

@@ -0,0 +1,87 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "44aba2a0-c6eb-4fc1-a5cc-0a8f8679dbb8",
"metadata": {},
"source": [
"## Michelin-star cook..."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d4d58124-5e9a-4f5a-9e0a-ff74f43896a8",
"metadata": {},
"outputs": [],
"source": [
"# imports\n",
"\n",
"import os\n",
"import requests\n",
"from dotenv import load_dotenv\n",
"from bs4 import BeautifulSoup\n",
"from IPython.display import Markdown, display\n",
"from openai import OpenAI\n",
"\n",
"# Load environment variables in a file called .env\n",
"\n",
"load_dotenv(override=True)\n",
"api_key = os.getenv('OPENAI_API_KEY')\n",
"\n",
"openai = OpenAI()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "67dc3099-2ccc-4ee8-8ff2-0dbbe4ae2fcb",
"metadata": {},
"outputs": [],
"source": [
"system_prompt = \"You are a professional chef in a Michelin-star restaurant. You will help me cook restaurant-style dishes using the ingredients I have left in my refrigerator.\\\n",
"You will provide detailed instructions with precise times and measurements in grams and include calorie information for raw ingredients, not cooked ones.\\\n",
"Add the caloric information at the end. Your responses should be formatted in Markdown.\"\n",
"\n",
"user_prompt = \"\"\"\n",
"Help me with a recipe using the ingredients I have left in the refrigerator. I have spinach, eggs, pasta, rice, chicken, beef, carrots, potatoes, butter, milk, cheese, tomatoes, red peppers, and all spices in the pantry.\\n\\n\n",
"\"\"\"\n",
"\n",
"messages = [\n",
" {\"role\": \"system\", \"content\": system_prompt},\n",
" {\"role\": \"user\", \"content\": user_prompt},\n",
"]\n",
" \n",
"response = openai.chat.completions.create(\n",
" model = \"gpt-4o-mini\",\n",
" messages = messages\n",
" )\n",
"\n",
"# Step 4: print the result in markdown format\n",
"pretty_response = Markdown(response.choices[0].message.content)\n",
"display(pretty_response)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.11"
}
},
"nbformat": 4,
"nbformat_minor": 5
}