Package updates, more Ollama, fixes

This commit is contained in:
Edward Donner
2024-12-08 22:57:40 -05:00
parent 3980508170
commit 7f8697654d
54 changed files with 1003 additions and 152 deletions

View File

@@ -50,7 +50,8 @@
"import numpy as np\n",
"import plotly.graph_objects as go\n",
"from langchain.memory import ConversationBufferMemory\n",
"from langchain.chains import ConversationalRetrievalChain"
"from langchain.chains import ConversationalRetrievalChain\n",
"from langchain.embeddings import HuggingFaceEmbeddings"
]
},
{
@@ -147,6 +148,10 @@
"\n",
"embeddings = OpenAIEmbeddings()\n",
"\n",
"# If you would rather use the free Vector Embeddings from HuggingFace sentence-transformers\n",
"# Then uncomment this line instead\n",
"# embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-MiniLM-L6-v2\")\n",
"\n",
"# Delete if already exists\n",
"\n",
"if os.path.exists(db_name):\n",
@@ -289,6 +294,9 @@
"# create a new Chat with OpenAI\n",
"llm = ChatOpenAI(temperature=0.7, model_name=MODEL)\n",
"\n",
"# Alternative - if you'd like to use Ollama locally, uncomment this line instead\n",
"# llm = ChatOpenAI(temperature=0.7, model_name='llama3.2', base_url='http://localhost:11434/v1', api_key='ollama')\n",
"\n",
"# set up the conversation memory for the chat\n",
"memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)\n",
"\n",
@@ -427,7 +435,7 @@
"metadata": {},
"outputs": [],
"source": [
"view = gr.ChatInterface(chat).launch()"
"view = gr.ChatInterface(chat, type=\"messages\").launch(inbrowser=True)"
]
},
{
@@ -465,7 +473,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.10"
"version": "3.11.11"
}
},
"nbformat": 4,