Updates and added day1 with ollama

This commit is contained in:
Edward Donner
2025-04-05 10:01:00 -04:00
parent 750f9a6062
commit 344219c9f3
9 changed files with 627 additions and 22 deletions

View File

@@ -274,12 +274,12 @@
"metadata": {},
"outputs": [],
"source": [
"# Claude 3.5 Sonnet\n",
"# Claude 3.7 Sonnet\n",
"# API needs system message provided separately from user prompt\n",
"# Also adding max_tokens\n",
"\n",
"message = claude.messages.create(\n",
" model=\"claude-3-5-sonnet-latest\",\n",
" model=\"claude-3-7-sonnet-latest\",\n",
" max_tokens=200,\n",
" temperature=0.7,\n",
" system=system_message,\n",
@@ -298,12 +298,12 @@
"metadata": {},
"outputs": [],
"source": [
"# Claude 3.5 Sonnet again\n",
"# Claude 3.7 Sonnet again\n",
"# Now let's add in streaming back results\n",
"# If the streaming looks strange, then please see the note below this cell!\n",
"\n",
"result = claude.messages.stream(\n",
" model=\"claude-3-5-sonnet-latest\",\n",
" model=\"claude-3-7-sonnet-latest\",\n",
" max_tokens=200,\n",
" temperature=0.7,\n",
" system=system_message,\n",
@@ -350,7 +350,7 @@
"# If that happens to you, please skip this cell and use the next cell instead - an alternative approach.\n",
"\n",
"gemini = google.generativeai.GenerativeModel(\n",
" model_name='gemini-2.0-flash-exp',\n",
" model_name='gemini-2.0-flash',\n",
" system_instruction=system_message\n",
")\n",
"response = gemini.generate_content(user_prompt)\n",
@@ -373,7 +373,7 @@
")\n",
"\n",
"response = gemini_via_openai_client.chat.completions.create(\n",
" model=\"gemini-2.0-flash-exp\",\n",
" model=\"gemini-2.0-flash\",\n",
" messages=prompts\n",
")\n",
"print(response.choices[0].message.content)"
@@ -521,7 +521,7 @@
"# Have it stream back results in markdown\n",
"\n",
"stream = openai.chat.completions.create(\n",
" model='gpt-4o',\n",
" model='gpt-4o-mini',\n",
" messages=prompts,\n",
" temperature=0.7,\n",
" stream=True\n",