Zack Saadioui
8/26/2024
1
curl http://localhost:11434/api/generate -d '{ "model": "llama3.1", "prompt":"Why is the sky blue?" }'
1
curl http://localhost:11434/api/chat -d '{ "model": "llama3.1", "messages": [{ "role": "user", "content": "Why is the sky blue?" }] }'
1
ollama pull llama3.1
1
ollama list
1
curl -fsSL https://ollama.com/install.sh | sh
1
ollama pull llama3.1
1
curl
1
requests
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
import requests import json url = 'http://localhost:11434/api/generate' headers = {"Content-Type": "application/json"} data = { "model": "llama3.1", "prompt": "What is the capital of France?" } response = requests.post(url, headers=headers, data=json.dumps(data)) if response.status_code == 200: print(response.text) else: print("Error:", response.status_code)
Copyright © Arsturn 2024