Files
LLM_Engineering_OLD/week1/community-contributions/ag-w1d2-use-local-ollama-url-py
bepeace fe8344f62d Day 2 work using py for ollama
- using url
- using library
- using openai
- using ollama to summarize website
2025-05-14 20:58:48 -07:00

23 lines
565 B
Plaintext

import ollama
import requests
from IPython.display import Markdown, display
OLLAMA_API = "http://localhost:11434/api/chat"
HEADERS = {"Content-Type": "application/json"}
MODEL = "llama3.2"
# Create a messages list (Note that "system" role is not required)
messages = [
{ "role": "user", "content": "Describe some of the business applications of Generative AI"}
]
payload = {
"model": MODEL,
"messages": messages,
"stream": False
}
response = requests.post(OLLAMA_API, json=payload, headers=HEADERS)
print(response.json()['message']['content'])