From c663379828e31992e9c3d9ffcd56f4882a1313b4 Mon Sep 17 00:00:00 2001 From: Carlos Bazaga Date: Tue, 12 Aug 2025 01:26:22 +0200 Subject: [PATCH] Update Week 6 Day 5 Notebook to make use of WandB sync. --- week6/day5.ipynb | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/week6/day5.ipynb b/week6/day5.ipynb index 5d5619e..14abeab 100644 --- a/week6/day5.ipynb +++ b/week6/day5.ipynb @@ -149,7 +149,7 @@ "source": [ "# First let's work on a good prompt for a Frontier model\n", "# Notice that I'm removing the \" to the nearest dollar\"\n", - "# When we train our own models, we'll need to make the problem as easy as possible, \n", + "# When we train our own models, we'll need to make the problem as easy as possible,\n", "# but a Frontier model needs no such simplification.\n", "\n", "def messages_for(item):\n", @@ -393,6 +393,22 @@ "openai.fine_tuning.jobs.list_events(fine_tuning_job_id=job_id, limit=10).data" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "b19ea9e9", + "metadata": {}, + "outputs": [], + "source": [ + "import wandb\n", + "from wandb.integration.openai.fine_tuning import WandbLogger\n", + "\n", + "# Log in to Weights & Biases.\n", + "wandb.login()\n", + "# Sync the fine-tuning job with Weights & Biases.\n", + "WandbLogger.sync(fine_tune_job_id=job_id, project=\"gpt-pricer\")" + ] + }, { "cell_type": "markdown", "id": "066fef03-8338-4526-9df3-89b649ad4f0a", @@ -490,7 +506,7 @@ "\n", "def gpt_fine_tuned(item):\n", " response = openai.chat.completions.create(\n", - " model=fine_tuned_model_name, \n", + " model=fine_tuned_model_name,\n", " messages=messages_for(item),\n", " seed=42,\n", " max_tokens=7\n",