still working
This commit is contained in:
@@ -0,0 +1,456 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# The Price is Right - Fixed Version\n",
|
||||
"\n",
|
||||
"This notebook fixes the issue where existing deals disappear from the table when the system makes new calls.\n",
|
||||
"\n",
|
||||
"**Key Fix**: The table now continuously shows current memory during updates, so existing deals remain visible while new ones are being searched.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Imports\n",
|
||||
"import sys\n",
|
||||
"sys.path.append('../..')\n",
|
||||
"\n",
|
||||
"import logging\n",
|
||||
"import queue\n",
|
||||
"import threading\n",
|
||||
"import time\n",
|
||||
"import gradio as gr\n",
|
||||
"from deal_agent_framework import DealAgentFramework\n",
|
||||
"from agents.deals import Opportunity, Deal\n",
|
||||
"from log_utils import reformat\n",
|
||||
"import plotly.graph_objects as go\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Helper Functions\n",
|
||||
"\n",
|
||||
"class QueueHandler(logging.Handler):\n",
|
||||
" def __init__(self, log_queue):\n",
|
||||
" super().__init__()\n",
|
||||
" self.log_queue = log_queue\n",
|
||||
"\n",
|
||||
" def emit(self, record):\n",
|
||||
" self.log_queue.put(self.format(record))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def html_for(log_data):\n",
|
||||
" \"\"\"Convert log data to HTML format for display\"\"\"\n",
|
||||
" output = '<br>'.join(log_data[-18:])\n",
|
||||
" return f\"\"\"\n",
|
||||
" <div id=\"scrollContent\" style=\"height: 400px; overflow-y: auto; border: 1px solid #ccc; background-color: #222229; padding: 10px;\">\n",
|
||||
" {output}\n",
|
||||
" </div>\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def setup_logging(log_queue):\n",
|
||||
" \"\"\"Setup logging to capture messages in a queue\"\"\"\n",
|
||||
" handler = QueueHandler(log_queue)\n",
|
||||
" formatter = logging.Formatter(\n",
|
||||
" \"[%(asctime)s] %(message)s\",\n",
|
||||
" datefmt=\"%Y-%m-%d %H:%M:%S %z\",\n",
|
||||
" )\n",
|
||||
" handler.setFormatter(formatter)\n",
|
||||
" logger = logging.getLogger()\n",
|
||||
" logger.addHandler(handler)\n",
|
||||
" logger.setLevel(logging.INFO)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def get_plot():\n",
|
||||
" \"\"\"Generate 3D visualization of vector database - handles empty database gracefully\"\"\"\n",
|
||||
" try:\n",
|
||||
" documents, vectors, colors = DealAgentFramework.get_plot_data(max_datapoints=1000)\n",
|
||||
"\n",
|
||||
" print(vectors, flush=True)\n",
|
||||
" \n",
|
||||
" # Check if we have any data\n",
|
||||
" if len(vectors) == 0:\n",
|
||||
" # Return placeholder plot if database is empty\n",
|
||||
" fig = go.Figure()\n",
|
||||
" fig.update_layout(\n",
|
||||
" title='Vector Database Empty',\n",
|
||||
" height=400,\n",
|
||||
" annotations=[dict(\n",
|
||||
" text=\"The vector database is empty.<br>Run the data loading notebook (day2.0) to populate it.\",\n",
|
||||
" x=0.5,\n",
|
||||
" y=0.5,\n",
|
||||
" xref=\"paper\",\n",
|
||||
" yref=\"paper\",\n",
|
||||
" showarrow=False,\n",
|
||||
" font=dict(size=14)\n",
|
||||
" )]\n",
|
||||
" )\n",
|
||||
" return fig\n",
|
||||
" \n",
|
||||
" # Normal case: create 3D scatter plot\n",
|
||||
" fig = go.Figure(data=[go.Scatter3d(\n",
|
||||
" x=vectors[:, 0],\n",
|
||||
" y=vectors[:, 1],\n",
|
||||
" z=vectors[:, 2],\n",
|
||||
" mode='markers',\n",
|
||||
" marker=dict(size=2, color=colors, opacity=0.7),\n",
|
||||
" )])\n",
|
||||
" \n",
|
||||
" fig.update_layout(\n",
|
||||
" scene=dict(xaxis_title='x', \n",
|
||||
" yaxis_title='y', \n",
|
||||
" zaxis_title='z',\n",
|
||||
" aspectmode='manual',\n",
|
||||
" aspectratio=dict(x=2.2, y=2.2, z=1),\n",
|
||||
" camera=dict(\n",
|
||||
" eye=dict(x=1.6, y=1.6, z=0.8)\n",
|
||||
" )),\n",
|
||||
" height=400,\n",
|
||||
" margin=dict(r=5, b=1, l=5, t=2)\n",
|
||||
" )\n",
|
||||
" return fig\n",
|
||||
" except Exception as e:\n",
|
||||
" # Handle any errors gracefully\n",
|
||||
" fig = go.Figure()\n",
|
||||
" fig.update_layout(\n",
|
||||
" title='Error Loading Vector Database',\n",
|
||||
" height=400,\n",
|
||||
" annotations=[dict(\n",
|
||||
" text=f\"Error: {str(e)}<br><br>Make sure the vector database is set up correctly.<br>Run day2.0 notebook to populate it.\",\n",
|
||||
" x=0.5,\n",
|
||||
" y=0.5,\n",
|
||||
" xref=\"paper\",\n",
|
||||
" yref=\"paper\",\n",
|
||||
" showarrow=False,\n",
|
||||
" font=dict(size=12)\n",
|
||||
" )]\n",
|
||||
" )\n",
|
||||
" return fig\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def create_opportunity_from_dict(data: dict) -> Opportunity:\n",
|
||||
" \"\"\"Helper function to create Opportunity from dictionary - uses Deal and Opportunity classes\"\"\"\n",
|
||||
" deal = Deal(**data['deal']) if isinstance(data['deal'], dict) else data['deal']\n",
|
||||
" return Opportunity(deal=deal, estimate=data['estimate'], discount=data['discount'])\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def validate_opportunities(opportunities) -> list:\n",
|
||||
" \"\"\"Validate and ensure all items are Opportunity instances - uses Opportunity class\"\"\"\n",
|
||||
" validated = []\n",
|
||||
" for opp in opportunities:\n",
|
||||
" if not isinstance(opp, Opportunity):\n",
|
||||
" if isinstance(opp, dict):\n",
|
||||
" opp = create_opportunity_from_dict(opp)\n",
|
||||
" else:\n",
|
||||
" continue\n",
|
||||
" validated.append(opp)\n",
|
||||
" return validated\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Main App Class\n",
|
||||
"\n",
|
||||
"class App:\n",
|
||||
"\n",
|
||||
" def __init__(self): \n",
|
||||
" self.agent_framework = None\n",
|
||||
"\n",
|
||||
" def get_agent_framework(self):\n",
|
||||
" \"\"\"Get or initialize the agent framework\"\"\"\n",
|
||||
" if not self.agent_framework:\n",
|
||||
" self.agent_framework = DealAgentFramework()\n",
|
||||
" self.agent_framework.init_agents_as_needed()\n",
|
||||
" return self.agent_framework\n",
|
||||
"\n",
|
||||
" def table_for(self, opps):\n",
|
||||
" \"\"\"Convert opportunities to table format - uses Opportunity and Deal classes\"\"\"\n",
|
||||
" # Validate opportunities are Opportunity instances\n",
|
||||
" validated_opps = validate_opportunities(opps)\n",
|
||||
" return [[opp.deal.product_description, f\"${opp.deal.price:.2f}\", f\"${opp.estimate:.2f}\", f\"${opp.discount:.2f}\", opp.deal.url] \n",
|
||||
" for opp in validated_opps \n",
|
||||
" if isinstance(opp, Opportunity)]\n",
|
||||
"\n",
|
||||
" def update_output(self, log_data, log_queue, result_queue):\n",
|
||||
" \"\"\"Keep showing current memory during updates - fixes disappearing table issue\"\"\"\n",
|
||||
" framework = self.get_agent_framework()\n",
|
||||
" current_table = self.table_for(framework.memory)\n",
|
||||
" \n",
|
||||
" while True:\n",
|
||||
" try:\n",
|
||||
" message = log_queue.get_nowait()\n",
|
||||
" log_data.append(reformat(message))\n",
|
||||
" # Always refresh table from current memory during updates\n",
|
||||
" current_table = self.table_for(framework.memory)\n",
|
||||
" yield log_data, html_for(log_data), current_table\n",
|
||||
" except queue.Empty:\n",
|
||||
" try:\n",
|
||||
" # When result is ready, update with final result\n",
|
||||
" final_result = result_queue.get_nowait()\n",
|
||||
" yield log_data, html_for(log_data), final_result\n",
|
||||
" return\n",
|
||||
" except queue.Empty:\n",
|
||||
" # Continue showing current memory while waiting\n",
|
||||
" current_table = self.table_for(framework.memory)\n",
|
||||
" yield log_data, html_for(log_data), current_table\n",
|
||||
" time.sleep(0.1)\n",
|
||||
"\n",
|
||||
" def do_run(self):\n",
|
||||
" \"\"\"Run framework and return updated table\"\"\"\n",
|
||||
" framework = self.get_agent_framework()\n",
|
||||
" new_opportunities = framework.run()\n",
|
||||
" return self.table_for(new_opportunities)\n",
|
||||
"\n",
|
||||
" def run_with_logging(self, initial_log_data):\n",
|
||||
" \"\"\"Run agent framework with logging in a separate thread\"\"\"\n",
|
||||
" log_queue = queue.Queue()\n",
|
||||
" result_queue = queue.Queue()\n",
|
||||
" setup_logging(log_queue)\n",
|
||||
" \n",
|
||||
" def worker():\n",
|
||||
" result = self.do_run()\n",
|
||||
" result_queue.put(result)\n",
|
||||
" \n",
|
||||
" thread = threading.Thread(target=worker)\n",
|
||||
" thread.start()\n",
|
||||
" \n",
|
||||
" for log_data, output, final_result in self.update_output(initial_log_data, log_queue, result_queue):\n",
|
||||
" yield log_data, output, final_result\n",
|
||||
"\n",
|
||||
" def do_select(self, selected_index: gr.SelectData):\n",
|
||||
" \"\"\"Handle deal selection - send alert\"\"\"\n",
|
||||
" framework = self.get_agent_framework()\n",
|
||||
" opportunities = framework.memory\n",
|
||||
" row = selected_index.index[0]\n",
|
||||
" if row < len(opportunities):\n",
|
||||
" opportunity = opportunities[row]\n",
|
||||
" framework.planner.messenger.alert(opportunity)\n",
|
||||
" return f\"Alert sent for: {opportunity.deal.product_description[:50]}...\"\n",
|
||||
" return \"No opportunity found at that index\"\n",
|
||||
"\n",
|
||||
" def load_initial(self):\n",
|
||||
" \"\"\"Load initial state with existing deals - uses Opportunity and Deal classes\"\"\"\n",
|
||||
" framework = self.get_agent_framework()\n",
|
||||
" # Ensure memory contains Opportunity instances\n",
|
||||
" opportunities = validate_opportunities(framework.memory)\n",
|
||||
" initial_table = self.table_for(opportunities)\n",
|
||||
" return [], \"\", initial_table\n",
|
||||
"\n",
|
||||
" def run(self):\n",
|
||||
" \"\"\"Launch the Gradio interface\"\"\"\n",
|
||||
" with gr.Blocks(title=\"The Price is Right\", fill_width=True) as ui:\n",
|
||||
" \n",
|
||||
" log_data = gr.State([])\n",
|
||||
" \n",
|
||||
" with gr.Row():\n",
|
||||
" gr.Markdown('<div style=\"text-align: center;font-size:24px\"><strong>The Price is Right</strong> - Autonomous Agent Framework that hunts for deals</div>')\n",
|
||||
" with gr.Row():\n",
|
||||
" gr.Markdown('<div style=\"text-align: center;font-size:14px\">A proprietary fine-tuned LLM deployed on Modal and a RAG pipeline with a frontier model collaborate to send push notifications with great online deals.</div>')\n",
|
||||
" with gr.Row():\n",
|
||||
" opportunities_dataframe = gr.Dataframe(\n",
|
||||
" headers=[\"Deals found so far\", \"Price\", \"Estimate\", \"Discount\", \"URL\"],\n",
|
||||
" wrap=True,\n",
|
||||
" column_widths=[6, 1, 1, 1, 3],\n",
|
||||
" row_count=10,\n",
|
||||
" col_count=5,\n",
|
||||
" max_height=400,\n",
|
||||
" )\n",
|
||||
" with gr.Row():\n",
|
||||
" with gr.Column(scale=1):\n",
|
||||
" logs = gr.HTML()\n",
|
||||
" with gr.Column(scale=1):\n",
|
||||
" plot = gr.Plot(value=get_plot(), show_label=False)\n",
|
||||
" \n",
|
||||
" # Initial load - show existing deals\n",
|
||||
" ui.load(self.load_initial, inputs=[], outputs=[log_data, logs, opportunities_dataframe])\n",
|
||||
"\n",
|
||||
" # Timer that runs every 5 minutes (300 seconds)\n",
|
||||
" timer = gr.Timer(value=300, active=True)\n",
|
||||
" timer.tick(self.run_with_logging, inputs=[log_data], outputs=[log_data, logs, opportunities_dataframe])\n",
|
||||
"\n",
|
||||
" # Selection handler\n",
|
||||
" selection_feedback = gr.Textbox(visible=False)\n",
|
||||
" opportunities_dataframe.select(self.do_select, inputs=[], outputs=[selection_feedback])\n",
|
||||
" \n",
|
||||
" ui.launch(share=False, inbrowser=True)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[2025-10-30 12:15:06 +0100] [Agents] [INFO] HTTP Request: GET https://api.gradio.app/pkg-version \"HTTP/1.1 200 OK\"\n",
|
||||
"[2025-10-30 12:15:06 +0100] [Agents] [INFO] HTTP Request: GET https://api.gradio.app/pkg-version \"HTTP/1.1 200 OK\"\n",
|
||||
"* Running on local URL: http://127.0.0.1:7862\n",
|
||||
"[2025-10-30 12:15:07 +0100] [Agents] [INFO] HTTP Request: GET http://127.0.0.1:7862/gradio_api/startup-events \"HTTP/1.1 200 OK\"\n",
|
||||
"[2025-10-30 12:15:07 +0100] [Agents] [INFO] HTTP Request: GET http://127.0.0.1:7862/gradio_api/startup-events \"HTTP/1.1 200 OK\"\n",
|
||||
"[2025-10-30 12:15:07 +0100] [Agents] [INFO] HTTP Request: HEAD http://127.0.0.1:7862/ \"HTTP/1.1 200 OK\"\n",
|
||||
"[2025-10-30 12:15:07 +0100] [Agents] [INFO] HTTP Request: HEAD http://127.0.0.1:7862/ \"HTTP/1.1 200 OK\"\n",
|
||||
"* To create a public link, set `share=True` in `launch()`.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"<div><iframe src=\"http://127.0.0.1:7862/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
||||
],
|
||||
"text/plain": [
|
||||
"<IPython.core.display.HTML object>"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[44m\u001b[37m[Agent Framework] Initializing Agent Framework\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[44m\u001b[37m[Agent Framework] Initializing Agent Framework\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[44m\u001b[37m[Agent Framework] Initializing Agent Framework\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[32m[Planning Agent] Planning Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[32m[Planning Agent] Planning Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[32m[Planning Agent] Planning Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:10 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[36m[Scanner Agent] Scanner Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[33m[Ensemble Agent] Initializing Ensemble Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[33m[Ensemble Agent] Initializing Ensemble Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[33m[Ensemble Agent] Initializing Ensemble Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is initializing - connecting to modal\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is initializing - connecting to modal\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is initializing - connecting to modal\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[31m[Specialist Agent] Specialist Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Initializing Frontier Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Initializing Frontier Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Initializing Frontier Agent\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is set up with DeepSeek\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is set up with DeepSeek\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is set up with DeepSeek\u001b[0m\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n",
|
||||
"[2025-10-30 12:15:11 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[34m[Frontier Agent] Frontier Agent is ready\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[35m[Random Forest Agent] Random Forest Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[35m[Random Forest Agent] Random Forest Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] \u001b[40m\u001b[35m[Random Forest Agent] Random Forest Agent is initializing\u001b[0m\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Use pytorch device_name: cpu\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n",
|
||||
"[2025-10-30 12:15:17 +0100] [Agents] [INFO] Load pretrained SentenceTransformer: sentence-transformers/all-MiniLM-L6-v2\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Traceback (most recent call last):\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\gradio\\queueing.py\", line 745, in process_events\n",
|
||||
" response = await route_utils.call_process_api(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\gradio\\route_utils.py\", line 354, in call_process_api\n",
|
||||
" output = await app.get_blocks().process_api(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\gradio\\blocks.py\", line 2116, in process_api\n",
|
||||
" result = await self.call_function(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\gradio\\blocks.py\", line 1623, in call_function\n",
|
||||
" prediction = await anyio.to_thread.run_sync( # type: ignore\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\anyio\\to_thread.py\", line 56, in run_sync\n",
|
||||
" return await get_async_backend().run_sync_in_worker_thread(\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 2485, in run_sync_in_worker_thread\n",
|
||||
" return await future\n",
|
||||
" ^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 976, in run\n",
|
||||
" result = context.run(func, *args)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\gradio\\utils.py\", line 915, in wrapper\n",
|
||||
" response = f(*args, **kwargs)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"C:\\Users\\hp\\AppData\\Local\\Temp\\ipykernel_560\\1866679463.py\", line 82, in load_initial\n",
|
||||
" framework = self.get_agent_framework()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"C:\\Users\\hp\\AppData\\Local\\Temp\\ipykernel_560\\1866679463.py\", line 12, in get_agent_framework\n",
|
||||
" self.agent_framework.init_agents_as_needed()\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\week8\\community_contributions\\solisoma\\../..\\deal_agent_framework.py\", line 54, in init_agents_as_needed\n",
|
||||
" self.log(\"Agent Framework is ready\")\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\week8\\community_contributions\\solisoma\\../..\\agents\\planning_agent.py\", line 21, in __init__\n",
|
||||
" self.ensemble = EnsembleAgent(collection)\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\week8\\community_contributions\\solisoma\\../..\\agents\\ensemble_agent.py\", line 23, in __init__\n",
|
||||
" self.random_forest = RandomForestAgent()\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\week8\\community_contributions\\solisoma\\../..\\agents\\random_forest_agent.py\", line 24, in __init__\n",
|
||||
" self.model = joblib.load('random_forest_model.pkl')\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n",
|
||||
" File \"c:\\Users\\hp\\projects\\gen-ai\\llm_engineering\\.venv\\Lib\\site-packages\\joblib\\numpy_pickle.py\", line 735, in load\n",
|
||||
" with open(filename, \"rb\") as f:\n",
|
||||
" ^^^^^^^^^^^^^^^^^^^^\n",
|
||||
"FileNotFoundError: [Errno 2] No such file or directory: 'random_forest_model.pkl'\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Run the application\n",
|
||||
"app = App()\n",
|
||||
"app.run()\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.12"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
Reference in New Issue
Block a user