Merge pull request #585 from Carbaz/feature/new-wandb-integration

New Wandb integration to fix issues on Week's 6 Day 5 Notebook.
This commit is contained in:
Ed Donner
2025-08-12 09:49:30 -04:00
committed by GitHub
4 changed files with 23 additions and 2 deletions

3
.gitignore vendored
View File

@@ -189,3 +189,6 @@ nohup.out
*.png
scraper_cache/
# WandB local sync data.
wandb/

View File

@@ -44,3 +44,4 @@ dependencies:
- twilio
- pydub
- protobuf==3.20.2
- wandb

View File

@@ -36,3 +36,4 @@ speedtest-cli
sentence_transformers
feedparser
protobuf==3.20.2
wandb

View File

@@ -149,7 +149,7 @@
"source": [
"# First let's work on a good prompt for a Frontier model\n",
"# Notice that I'm removing the \" to the nearest dollar\"\n",
"# When we train our own models, we'll need to make the problem as easy as possible, \n",
"# When we train our own models, we'll need to make the problem as easy as possible,\n",
"# but a Frontier model needs no such simplification.\n",
"\n",
"def messages_for(item):\n",
@@ -393,6 +393,22 @@
"openai.fine_tuning.jobs.list_events(fine_tuning_job_id=job_id, limit=10).data"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b19ea9e9",
"metadata": {},
"outputs": [],
"source": [
"import wandb\n",
"from wandb.integration.openai.fine_tuning import WandbLogger\n",
"\n",
"# Log in to Weights & Biases.\n",
"wandb.login()\n",
"# Sync the fine-tuning job with Weights & Biases.\n",
"WandbLogger.sync(fine_tune_job_id=job_id, project=\"gpt-pricer\")"
]
},
{
"cell_type": "markdown",
"id": "066fef03-8338-4526-9df3-89b649ad4f0a",
@@ -490,7 +506,7 @@
"\n",
"def gpt_fine_tuned(item):\n",
" response = openai.chat.completions.create(\n",
" model=fine_tuned_model_name, \n",
" model=fine_tuned_model_name,\n",
" messages=messages_for(item),\n",
" seed=42,\n",
" max_tokens=7\n",