Zack Saadioui
4/25/2025
1
ollama
1
2
bash
curl -fsSL https://ollama.com/install.sh | sh
1
2
bash
ollama run llama3.3
1
2
3
4
5
python
test_cases = [
{"prompt": "What is the capital of France?", "expected_output": "Paris"},
{"prompt": "Who wrote 'Pride and Prejudice'?", "expected_output": "Jane Austen"}
]
1
curl
1
2
bash
curl -X POST http://localhost:11434/api/generate -d '{"model": "llama3.3", "prompt": "What is the capital of France?"}'
1
2
bash
ollama run llama2:7b-q4_0
1
2
3
bash
export OLLAMA_CUDA=1
export OLLAMA_NUM_THREADS=8
Copyright © Arsturn 2025