Updates and added day1 with ollama

This commit is contained in:
Edward Donner
2025-04-05 10:01:00 -04:00
parent 750f9a6062
commit 344219c9f3
9 changed files with 627 additions and 22 deletions

View File

@@ -274,12 +274,12 @@
"metadata": {},
"outputs": [],
"source": [
"# Claude 3.5 Sonnet\n",
"# Claude 3.7 Sonnet\n",
"# API needs system message provided separately from user prompt\n",
"# Also adding max_tokens\n",
"\n",
"message = claude.messages.create(\n",
" model=\"claude-3-5-sonnet-latest\",\n",
" model=\"claude-3-7-sonnet-latest\",\n",
" max_tokens=200,\n",
" temperature=0.7,\n",
" system=system_message,\n",
@@ -298,12 +298,12 @@
"metadata": {},
"outputs": [],
"source": [
"# Claude 3.5 Sonnet again\n",
"# Claude 3.7 Sonnet again\n",
"# Now let's add in streaming back results\n",
"# If the streaming looks strange, then please see the note below this cell!\n",
"\n",
"result = claude.messages.stream(\n",
" model=\"claude-3-5-sonnet-latest\",\n",
" model=\"claude-3-7-sonnet-latest\",\n",
" max_tokens=200,\n",
" temperature=0.7,\n",
" system=system_message,\n",
@@ -350,7 +350,7 @@
"# If that happens to you, please skip this cell and use the next cell instead - an alternative approach.\n",
"\n",
"gemini = google.generativeai.GenerativeModel(\n",
" model_name='gemini-2.0-flash-exp',\n",
" model_name='gemini-2.0-flash',\n",
" system_instruction=system_message\n",
")\n",
"response = gemini.generate_content(user_prompt)\n",
@@ -373,7 +373,7 @@
")\n",
"\n",
"response = gemini_via_openai_client.chat.completions.create(\n",
" model=\"gemini-2.0-flash-exp\",\n",
" model=\"gemini-2.0-flash\",\n",
" messages=prompts\n",
")\n",
"print(response.choices[0].message.content)"
@@ -521,7 +521,7 @@
"# Have it stream back results in markdown\n",
"\n",
"stream = openai.chat.completions.create(\n",
" model='gpt-4o',\n",
" model='gpt-4o-mini',\n",
" messages=prompts,\n",
" temperature=0.7,\n",
" stream=True\n",

View File

@@ -16,7 +16,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"id": "c44c5494-950d-4d2f-8d4f-b87b57c5b330",
"metadata": {},
"outputs": [],
@@ -35,7 +35,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"id": "d1715421-cead-400b-99af-986388a97aff",
"metadata": {},
"outputs": [],
@@ -45,10 +45,20 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 3,
"id": "337d5dfc-0181-4e3b-8ab9-e78e0c3f657b",
"metadata": {},
"outputs": [],
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"OpenAI API Key exists and begins sk-proj-\n",
"Anthropic API Key exists and begins sk-ant-\n",
"Google API Key exists and begins AIzaSyA5\n"
]
}
],
"source": [
"# Load environment variables in a file called .env\n",
"# Print the key prefixes to help with any debugging\n",
@@ -76,7 +86,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 4,
"id": "22586021-1795-4929-8079-63f5bb4edd4c",
"metadata": {},
"outputs": [],
@@ -92,7 +102,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"id": "b16e6021-6dc4-4397-985a-6679d6c8ffd5",
"metadata": {},
"outputs": [],
@@ -104,7 +114,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 6,
"id": "02ef9b69-ef31-427d-86d0-b8c799e1c1b1",
"metadata": {},
"outputs": [],

View File

@@ -120,6 +120,9 @@
"# Simpler than in my video - we can easily create this function that calls OpenAI\n",
"# It's now just 1 line of code to prepare the input to OpenAI!\n",
"\n",
"# Student Octavio O. has pointed out that this isn't quite as straightforward for Claude -\n",
"# see the excellent contribution in community-contributions \"Gradio_issue_with_Claude\" that handles Claude.\n",
"\n",
"def chat(message, history):\n",
" messages = [{\"role\": \"system\", \"content\": system_message}] + history + [{\"role\": \"user\", \"content\": message}]\n",
"\n",