Added my contributions to community-contributions
This commit is contained in:
@@ -19,9 +19,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# imports\n",
|
||||
"import os, re, requests, json, ollama\n",
|
||||
"from typing import List\n",
|
||||
"from dotenv import load_dotenv\n",
|
||||
"\n",
|
||||
"import re, requests, ollama\n",
|
||||
"from bs4 import BeautifulSoup\n",
|
||||
"from IPython.display import Markdown, display, update_display\n",
|
||||
"from openai import OpenAI"
|
||||
@@ -84,6 +83,12 @@
|
||||
"# yield from {book.get(\"author\") for book in books if book.get(\"author\")}\n",
|
||||
"# \"\"\"\n",
|
||||
"\n",
|
||||
"# question = \"\"\"\n",
|
||||
"# Please explain what this code does and why:\n",
|
||||
"# yield from {book.get(\"author\") for book in books if book.get(\"author\")}\n",
|
||||
"# Popular dev site https://projecteuler.net/\n",
|
||||
"# \"\"\"\n",
|
||||
"\n",
|
||||
"question = \"\"\"\n",
|
||||
"How good at Software Development is Elijah Rwothoromo? \\\n",
|
||||
"He has a Wordpress site https://rwothoromo.wordpress.com/. \\\n",
|
||||
@@ -103,43 +108,50 @@
|
||||
"\n",
|
||||
"# Extract all URLs from the question string using regular expressions\n",
|
||||
"urls = re.findall(r'https?://[^\\s)]+', question)\n",
|
||||
"# print(urls)\n",
|
||||
"\n",
|
||||
"# Fetch the content for each URL using the Website class\n",
|
||||
"scraped_content = []\n",
|
||||
"for url in urls:\n",
|
||||
" print(f\"Scraping: {url}\")\n",
|
||||
" try:\n",
|
||||
" site = Website(url)\n",
|
||||
" content = f\"Content from {url}:\\n---\\n{site.text}\\n---\\n\" # delimiter ---\n",
|
||||
" scraped_content.append(content)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"Could not scrape {url}: {e}\")\n",
|
||||
" scraped_content.append(f\"Could not retrieve content from {url}.\\n\")\n",
|
||||
"if len(urls) > 0:\n",
|
||||
" \n",
|
||||
" # Fetch the content for each URL using the Website class\n",
|
||||
" scraped_content = []\n",
|
||||
" for url in urls:\n",
|
||||
" print(f\"Scraping: {url}\")\n",
|
||||
" try:\n",
|
||||
" site = Website(url)\n",
|
||||
" content = f\"Content from {url}:\\n---\\n{site.text}\\n---\\n\" # delimiter ---\n",
|
||||
" scraped_content.append(content)\n",
|
||||
" except Exception as e:\n",
|
||||
" print(f\"Could not scrape {url}: {e}\")\n",
|
||||
" scraped_content.append(f\"Could not retrieve content from {url}.\\n\")\n",
|
||||
" \n",
|
||||
" # Combine all the scraped text into one string\n",
|
||||
" all_scraped_text = \"\\n\".join(scraped_content)\n",
|
||||
" \n",
|
||||
" # Update the question with the scraped content\n",
|
||||
" updated_question = f\"\"\"\n",
|
||||
" Based on the following information, please answer the user's original question.\n",
|
||||
" \n",
|
||||
" --- TEXT FROM WEBSITES ---\n",
|
||||
" {all_scraped_text}\n",
|
||||
" --- END TEXT FROM WEBSITES ---\n",
|
||||
" \n",
|
||||
" --- ORIGINAL QUESTION ---\n",
|
||||
" {question}\n",
|
||||
" \"\"\"\n",
|
||||
"else:\n",
|
||||
" updated_question = question\n",
|
||||
"\n",
|
||||
"# Combine all the scraped text into one string\n",
|
||||
"all_scraped_text = \"\\n\".join(scraped_content)\n",
|
||||
"\n",
|
||||
"# Update the question with the scraped content\n",
|
||||
"augmented_question = f\"\"\"\n",
|
||||
"Based on the following information, please answer the user's original question.\n",
|
||||
"\n",
|
||||
"--- TEXT FROM WEBSITES ---\n",
|
||||
"{all_scraped_text}\n",
|
||||
"--- END TEXT FROM WEBSITES ---\n",
|
||||
"\n",
|
||||
"--- ORIGINAL QUESTION ---\n",
|
||||
"{question}\n",
|
||||
"\"\"\"\n",
|
||||
"# print(updated_question)\n",
|
||||
"\n",
|
||||
"# system prompt to be more accurate for AI to just analyze the provided text.\n",
|
||||
"system_prompt = \"You are an expert assistant. \\\n",
|
||||
"Analyze the user's question and the provided text from relevant websites to synthesize a comprehensive answer in markdown format.\\\n",
|
||||
"Provides a short summary, ignoring text that might be navigation-related.\"\n",
|
||||
"Provide a short summary, ignoring text that might be navigation-related.\"\n",
|
||||
"\n",
|
||||
"# Create the messages list with the new augmented prompt\n",
|
||||
"# Create the messages list with the newly updated prompt\n",
|
||||
"messages = [\n",
|
||||
" {\"role\": \"system\", \"content\": system_prompt},\n",
|
||||
" {\"role\": \"user\", \"content\": augmented_question},\n",
|
||||
" {\"role\": \"user\", \"content\": updated_question},\n",
|
||||
"]\n"
|
||||
]
|
||||
},
|
||||
@@ -153,8 +165,6 @@
|
||||
"# Get gpt-4o-mini to answer, with streaming\n",
|
||||
"\n",
|
||||
"def get_gpt_response(question):\n",
|
||||
" # return response.choices[0].message.content\n",
|
||||
"\n",
|
||||
" stream = openai.chat.completions.create(\n",
|
||||
" model=MODEL_GPT,\n",
|
||||
" messages=messages,\n",
|
||||
@@ -179,6 +189,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Get Llama 3.2 to answer\n",
|
||||
"\n",
|
||||
"def get_llama_response(question):\n",
|
||||
" response = ollama.chat(\n",
|
||||
" model=MODEL_LLAMA,\n",
|
||||
|
||||
Reference in New Issue
Block a user