Merge pull request #829 from sammurira/samuel_bootcamp_wk2
Week 2 Task: Trending Coins with OpenAI and Llama, using Gradio Chat
This commit is contained in:
Binary file not shown.
|
After Width: | Height: | Size: 408 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 437 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 483 KiB |
@@ -0,0 +1,551 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d006b2ea-9dfe-49c7-88a9-a5a0775185fd",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Additional End of week Exercise - week 2\n",
|
||||
"\n",
|
||||
"Now use everything you've learned from Week 2 to build a full prototype for the technical question/answerer you built in Week 1 Exercise.\n",
|
||||
"\n",
|
||||
"This should include a Gradio UI, streaming, use of the system prompt to add expertise, and the ability to switch between models. Bonus points if you can demonstrate use of a tool!\n",
|
||||
"\n",
|
||||
"If you feel bold, see if you can add audio input so you can talk to it, and have it respond with audio. ChatGPT or Claude can help you, or email me if you have questions.\n",
|
||||
"\n",
|
||||
"I will publish a full solution here soon - unless someone beats me to it...\n",
|
||||
"\n",
|
||||
"There are so many commercial applications for this, from a language tutor, to a company onboarding solution, to a companion AI to a course (like this one!) I can't wait to see your results."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "f69a564870ec63b0",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T16:15:26.039019Z",
|
||||
"start_time": "2025-10-24T16:15:25.888596Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#Imports\n",
|
||||
"from IPython.display import Markdown, display\n",
|
||||
"from openai import OpenAI\n",
|
||||
"import os\n",
|
||||
"import json\n",
|
||||
"import requests\n",
|
||||
"import gradio as gr\n",
|
||||
"from dotenv import load_dotenv\n",
|
||||
"from typing import List\n",
|
||||
"import time\n",
|
||||
"from datetime import datetime, timedelta\n",
|
||||
"import requests\n",
|
||||
"from bs4 import BeautifulSoup\n",
|
||||
"from datetime import datetime\n",
|
||||
"import json\n",
|
||||
"import re\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "fa60913187dbe71d",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T16:14:27.703743Z",
|
||||
"start_time": "2025-10-24T16:14:27.677172Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"OLLAMA_BASE_URL=\"http://localhost:11434/v1/completions\"\n",
|
||||
"LOCAL_MODEL_NAME=\"llama3.2\"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Load environment variables in a file called .env\n",
|
||||
"\n",
|
||||
"load_dotenv(override=True)\n",
|
||||
"api_key = os.getenv('OPENAI_API_KEY')\n",
|
||||
"OPENAI_API_KEY=api_key\n",
|
||||
"\n",
|
||||
"load_dotenv(override=True)\n",
|
||||
"coin_key = os.getenv('COINMARKETCAP_API_KEY')\n",
|
||||
"COINMARKETCAP_API_KEY = coin_key\n",
|
||||
"\n",
|
||||
"# Check the key\n",
|
||||
"\n",
|
||||
"if not api_key:\n",
|
||||
" print(\"No API key was found - please head over to the troubleshooting notebook in this folder to identify & fix!\")\n",
|
||||
"elif not api_key.startswith(\"sk-proj-\"):\n",
|
||||
" print(\"An API key was found, but it doesn't start sk-proj-; please check you're using the right key - see troubleshooting notebook\")\n",
|
||||
"elif api_key.strip() != api_key:\n",
|
||||
" print(\"An API key was found, but it looks like it might have space or tab characters at the start or end - please remove them - see troubleshooting notebook\")\n",
|
||||
"else:\n",
|
||||
" print(\"API key found and looks good so far!\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "1bf8ccf240e982da",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T16:14:35.695654Z",
|
||||
"start_time": "2025-10-24T16:14:35.681319Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Ollama configuration\n",
|
||||
"OLLAMA_URL = os.getenv(\"OLLAMA_BASE_URL\", \"http://localhost:11434/v1/completions\")\n",
|
||||
"OLLAMA_MODEL = os.getenv(\"LOCAL_MODEL_NAME\", \"llama3.2\")\n",
|
||||
"\n",
|
||||
"# OpenAI configuration\n",
|
||||
"OPENAI_API_KEY = os.getenv(\"OPENAI_API_KEY\")\n",
|
||||
"OPENAI_MODEL = \"gpt-4\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "98d8f6481681ed57",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T16:14:49.865353Z",
|
||||
"start_time": "2025-10-24T16:14:49.848662Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Crypto Analysis Prompt\n",
|
||||
"CRYPTO_SYSTEM_PROMPT = \"\"\"You are a specialized AI assistant with expertise in cryptocurrency markets and data analysis.\n",
|
||||
"Your role is to help users identify and understand cryptocurrencies with the strongest growth patterns over recent weeks.\n",
|
||||
"Provide clear, data-driven insights about market trends and performance metrics.\"\"\"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "7729697aa8937c3",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T16:15:37.367235Z",
|
||||
"start_time": "2025-10-24T16:15:35.409542Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"def scrape_coingecko(limit=10, debug=False):\n",
|
||||
" try:\n",
|
||||
" headers = {\n",
|
||||
" 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',\n",
|
||||
" 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n",
|
||||
" 'Accept-Language': 'en-US,en;q=0.5',\n",
|
||||
" 'Referer': 'https://www.coingecko.com/'\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" url = \"https://www.coingecko.com/en/coins/trending\"\n",
|
||||
" response = requests.get(url, headers=headers, timeout=30)\n",
|
||||
" response.raise_for_status()\n",
|
||||
"\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Status: {response.status_code}\")\n",
|
||||
" with open(\"debug_coingecko.html\", \"w\", encoding=\"utf-8\") as f:\n",
|
||||
" f.write(response.text)\n",
|
||||
" print(\"HTML saved to debug_coingecko.html\")\n",
|
||||
"\n",
|
||||
" soup = BeautifulSoup(response.content, 'html.parser')\n",
|
||||
" top_performers = []\n",
|
||||
"\n",
|
||||
" # Try multiple selectors\n",
|
||||
" rows = (soup.find_all('tr', {'data-sort-by': True}) or\n",
|
||||
" soup.find_all('tr', class_=re.compile('hover')) or\n",
|
||||
" soup.select('table tbody tr'))[:limit]\n",
|
||||
"\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Found {len(rows)} rows\")\n",
|
||||
"\n",
|
||||
" for row in rows:\n",
|
||||
" try:\n",
|
||||
" # Find all text in row\n",
|
||||
" texts = [t.strip() for t in row.stripped_strings]\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Row texts: {texts[:5]}\")\n",
|
||||
"\n",
|
||||
" # Extract data from text list\n",
|
||||
" name = texts[1] if len(texts) > 1 else \"Unknown\"\n",
|
||||
" symbol = texts[2] if len(texts) > 2 else \"N/A\"\n",
|
||||
"\n",
|
||||
" # Find price\n",
|
||||
" price = 0\n",
|
||||
" for text in texts:\n",
|
||||
" if '$' in text:\n",
|
||||
" price_str = text.replace('$', '').replace(',', '')\n",
|
||||
" try:\n",
|
||||
" price = float(price_str)\n",
|
||||
" break\n",
|
||||
" except:\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" # Find percentage change\n",
|
||||
" change_30d = 0\n",
|
||||
" for text in texts:\n",
|
||||
" if '%' in text:\n",
|
||||
" change_str = text.replace('%', '').replace('+', '')\n",
|
||||
" try:\n",
|
||||
" change_30d = float(change_str)\n",
|
||||
" except:\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" if name != \"Unknown\":\n",
|
||||
" top_performers.append({\n",
|
||||
" \"name\": name,\n",
|
||||
" \"symbol\": symbol,\n",
|
||||
" \"current_price\": price,\n",
|
||||
" \"price_change_percentage_30d\": change_30d,\n",
|
||||
" \"source\": \"coingecko\"\n",
|
||||
" })\n",
|
||||
" except Exception as e:\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Row error: {e}\")\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" return {\"timeframe\": \"30d\", \"timestamp\": datetime.now().isoformat(), \"count\": len(top_performers), \"top_performers\": top_performers}\n",
|
||||
" except Exception as e:\n",
|
||||
" return {\"error\": str(e)}\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def get_top_performers(source=\"coingecko\", limit=10, save=False, debug=False):\n",
|
||||
" sources = {\"coingecko\": scrape_coingecko, \"coinmarketcap\": scrape_coinmarketcap}\n",
|
||||
" result = sources[source](limit, debug)\n",
|
||||
"\n",
|
||||
" if save and \"error\" not in result:\n",
|
||||
" filename = f\"crypto_{source}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json\"\n",
|
||||
" with open(filename, 'w') as f:\n",
|
||||
" json.dump(result, f, indent=2)\n",
|
||||
" print(f\"Saved to {filename}\")\n",
|
||||
"\n",
|
||||
" return result\n",
|
||||
"\n",
|
||||
"if __name__ == \"__main__\":\n",
|
||||
" print(\"Testing CoinGecko with debug...\")\n",
|
||||
" result = get_top_performers(\"coingecko\", 10, True, debug=True)\n",
|
||||
" print(json.dumps(result, indent=2))\n",
|
||||
"\n",
|
||||
" print(\"\\n\" + \"=\"*60 + \"\\n\")\n",
|
||||
"\n",
|
||||
" print(\"Testing CoinMarketCap with debug...\")\n",
|
||||
" result = get_top_performers(\"coinmarketcap\", 10, True, debug=True)\n",
|
||||
" print(json.dumps(result, indent=2))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2e3de36fa13f2dec",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def scrape_coinmarketcap(limit=10, debug=False):\n",
|
||||
" try:\n",
|
||||
" headers = {\n",
|
||||
" 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',\n",
|
||||
" 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n",
|
||||
" 'Accept-Language': 'en-US,en;q=0.5',\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" url = \"https://coinmarketcap.com/gainers-losers/\"\n",
|
||||
" response = requests.get(url, headers=headers, timeout=30)\n",
|
||||
" response.raise_for_status()\n",
|
||||
"\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Status: {response.status_code}\")\n",
|
||||
" with open(\"debug_coinmarketcap.html\", \"w\", encoding=\"utf-8\") as f:\n",
|
||||
" f.write(response.text)\n",
|
||||
" print(\"HTML saved to debug_coinmarketcap.html\")\n",
|
||||
"\n",
|
||||
" soup = BeautifulSoup(response.content, 'html.parser')\n",
|
||||
" top_performers = []\n",
|
||||
"\n",
|
||||
" # Find all table rows\n",
|
||||
" rows = soup.find_all('tr')\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Total rows found: {len(rows)}\")\n",
|
||||
"\n",
|
||||
" for row in rows[1:limit+1]:\n",
|
||||
" try:\n",
|
||||
" texts = [t.strip() for t in row.stripped_strings]\n",
|
||||
" if debug and len(texts) > 0:\n",
|
||||
" print(f\"Row texts: {texts[:5]}\")\n",
|
||||
"\n",
|
||||
" if len(texts) < 3:\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" # Usually: rank, name, symbol, price, change...\n",
|
||||
" name = texts[1] if len(texts) > 1 else \"Unknown\"\n",
|
||||
" symbol = texts[2] if len(texts) > 2 else \"N/A\"\n",
|
||||
"\n",
|
||||
" price = 0\n",
|
||||
" change_30d = 0\n",
|
||||
"\n",
|
||||
" for text in texts:\n",
|
||||
" if '$' in text and price == 0:\n",
|
||||
" try:\n",
|
||||
" price = float(text.replace('$', '').replace(',', ''))\n",
|
||||
" except:\n",
|
||||
" continue\n",
|
||||
" if '%' in text:\n",
|
||||
" try:\n",
|
||||
" change_30d = float(text.replace('%', '').replace('+', ''))\n",
|
||||
" except:\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" if name != \"Unknown\":\n",
|
||||
" top_performers.append({\n",
|
||||
" \"name\": name,\n",
|
||||
" \"symbol\": symbol,\n",
|
||||
" \"current_price\": price,\n",
|
||||
" \"price_change_percentage_30d\": change_30d,\n",
|
||||
" \"source\": \"coinmarketcap\"\n",
|
||||
" })\n",
|
||||
" except Exception as e:\n",
|
||||
" if debug:\n",
|
||||
" print(f\"Row error: {e}\")\n",
|
||||
" continue\n",
|
||||
"\n",
|
||||
" return {\"timeframe\": \"30d\", \"timestamp\": datetime.now().isoformat(), \"count\": len(top_performers), \"top_performers\": top_performers}\n",
|
||||
" except Exception as e:\n",
|
||||
" return {\"error\": str(e)}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "4a63cbcc7ae04c7e",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T15:23:22.157803Z",
|
||||
"start_time": "2025-10-24T15:23:22.147500Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"# Tool detection and execution\n",
|
||||
"def detect_and_run_tool(user_message: str):\n",
|
||||
" user_message_lower = user_message.lower().strip()\n",
|
||||
"\n",
|
||||
" # Detect crypto growth queries\n",
|
||||
" crypto_keywords = [\"crypto growth\", \"top gainers\", \"best performing\", \"crypto performance\", \"trending coins\"]\n",
|
||||
"\n",
|
||||
" if any(keyword in user_message_lower for keyword in crypto_keywords):\n",
|
||||
" return True, get_top_performers(\"coingecko\", 10, True, debug=True)\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "626a022b562bf73d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "e5c6db45fb4d53d9",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T15:23:25.205927Z",
|
||||
"start_time": "2025-10-24T15:23:25.199801Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def ask_ollama(prompt: str) -> str:\n",
|
||||
" try:\n",
|
||||
" payload = {\"model\": OLLAMA_MODEL, \"prompt\": prompt, \"stream\": False}\n",
|
||||
" r = requests.post(OLLAMA_URL, json=payload, timeout=120)\n",
|
||||
" r.raise_for_status()\n",
|
||||
" data = r.json()\n",
|
||||
" return data.get(\"choices\", [{}])[0].get(\"text\", \"\").strip()\n",
|
||||
" except Exception as e:\n",
|
||||
" return f\"[Ollama error: {e}]\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2f81a00e9584d184",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c2686a6503cf62a4",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T15:23:29.556036Z",
|
||||
"start_time": "2025-10-24T15:23:29.552763Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def ask_openai(prompt: str) -> str:\n",
|
||||
" try:\n",
|
||||
" from openai import OpenAI\n",
|
||||
" client = OpenAI(api_key=OPENAI_API_KEY)\n",
|
||||
"\n",
|
||||
" response = client.chat.completions.create(\n",
|
||||
" model=OPENAI_MODEL,\n",
|
||||
" messages=[\n",
|
||||
" {\"role\": \"system\", \"content\": CRYPTO_SYSTEM_PROMPT},\n",
|
||||
" {\"role\": \"user\", \"content\": prompt}\n",
|
||||
" ],\n",
|
||||
" max_tokens=512,\n",
|
||||
" )\n",
|
||||
" return response.choices[0].message.content\n",
|
||||
" except Exception as e:\n",
|
||||
" return f\"[OpenAI error: {e}]\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2313e5940e9fa3da",
|
||||
"metadata": {
|
||||
"ExecuteTime": {
|
||||
"end_time": "2025-10-24T15:27:33.546418Z",
|
||||
"start_time": "2025-10-24T15:27:18.318834Z"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def chat_fn(user_message: str, history: List[List[str]], model_choice: str):\n",
|
||||
" tool_used, tool_output = detect_and_run_tool(user_message)\n",
|
||||
"\n",
|
||||
" if tool_used:\n",
|
||||
" if \"error\" in tool_output:\n",
|
||||
" reply = f\"Data fetch error: {tool_output['error']}\"\n",
|
||||
" else:\n",
|
||||
" # Format the crypto data for AI analysis\n",
|
||||
" crypto_data_str = json.dumps(tool_output, indent=2)\n",
|
||||
"\n",
|
||||
" # Create analysis prompt\n",
|
||||
" analysis_prompt = f\"\"\"\n",
|
||||
" Analyze this cryptocurrency growth data and provide insights:\n",
|
||||
"\n",
|
||||
" {crypto_data_str}\n",
|
||||
"\n",
|
||||
" Please identify:\n",
|
||||
" 1. The strongest performers and their growth patterns\n",
|
||||
" 2. Any notable trends across different timeframes\n",
|
||||
" 3. Risk considerations or notable observations\n",
|
||||
" 4. Simple, actionable insights for the user\n",
|
||||
"\n",
|
||||
" Keep the analysis clear and data-driven.\n",
|
||||
" User's original question: {user_message}\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
" # Get AI analysis\n",
|
||||
" if model_choice == \"openai\":\n",
|
||||
" analysis = ask_openai(analysis_prompt)\n",
|
||||
" else:\n",
|
||||
" ollama_prompt = f\"{CRYPTO_SYSTEM_PROMPT}\\n\\nUser: {analysis_prompt}\\nAssistant:\"\n",
|
||||
" analysis = ask_ollama(ollama_prompt)\n",
|
||||
"\n",
|
||||
" reply = f\"📊 **Crypto Growth Analysis**\\n\\n{analysis}\\n\\n*Raw data for reference:*\\n```json\\n{crypto_data_str}\\n```\"\n",
|
||||
"\n",
|
||||
" else:\n",
|
||||
" # Regular conversation\n",
|
||||
" if model_choice == \"openai\":\n",
|
||||
" reply = ask_openai(user_message)\n",
|
||||
" else:\n",
|
||||
" prompt = f\"{CRYPTO_SYSTEM_PROMPT}\\n\\nUser: {user_message}\\nAssistant:\"\n",
|
||||
" reply = ask_ollama(prompt)\n",
|
||||
"\n",
|
||||
" history.append([user_message, reply])\n",
|
||||
" return history\n",
|
||||
"\n",
|
||||
"# Enhanced Gradio UI with crypto focus\n",
|
||||
"def main():\n",
|
||||
" with gr.Blocks(title=\"Crypto Growth Analyst Chatbot\") as demo:\n",
|
||||
" gr.Markdown(\"\"\"\n",
|
||||
" # Samuel Week 2 Task: Crypto Growth Analyst Chatbot\n",
|
||||
" **Analyze cryptocurrency performance with dual AI models** (Ollama & OpenAI)\n",
|
||||
"\n",
|
||||
" *Try questions like:*\n",
|
||||
" - \"Show me cryptocurrencies with strongest growth\"\n",
|
||||
" - \"What are the top performing coins this month?\"\n",
|
||||
" - \"Analyze crypto market trends\"\n",
|
||||
" \"\"\")\n",
|
||||
"\n",
|
||||
" # Message input\n",
|
||||
" msg = gr.Textbox(\n",
|
||||
" placeholder=\"Ask about crypto growth trends or type /ticket <city>\",\n",
|
||||
" label=\"Your message\",\n",
|
||||
" lines=2,\n",
|
||||
" autofocus=True\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" # Model selection\n",
|
||||
" with gr.Row():\n",
|
||||
" model_choice = gr.Radio(\n",
|
||||
" [\"ollama\", \"openai\"],\n",
|
||||
" value=\"ollama\",\n",
|
||||
" label=\"AI Model\"\n",
|
||||
" )\n",
|
||||
" send = gr.Button(\"Analyze Crypto Data\", variant=\"primary\")\n",
|
||||
"\n",
|
||||
" # Chatbot area\n",
|
||||
" chatbot = gr.Chatbot(label=\"Crypto Analysis Conversation\", height=500, type=\"messages\")\n",
|
||||
"\n",
|
||||
" # Wrapper function\n",
|
||||
" def wrapped_chat_fn(user_message, history, model_choice):\n",
|
||||
" updated_history = chat_fn(user_message, history, model_choice)\n",
|
||||
" return updated_history, gr.update(value=\"\")\n",
|
||||
"\n",
|
||||
" # Event handlers\n",
|
||||
" send.click(wrapped_chat_fn, inputs=[msg, chatbot, model_choice], outputs=[chatbot, msg])\n",
|
||||
" msg.submit(wrapped_chat_fn, inputs=[msg, chatbot, model_choice], outputs=[chatbot, msg])\n",
|
||||
"\n",
|
||||
" demo.launch(server_name=\"0.0.0.0\", share=False)\n",
|
||||
"\n",
|
||||
"if __name__ == \"__main__\":\n",
|
||||
" main()\n",
|
||||
"\n",
|
||||
" "
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.12"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user