Zack Saadioui
4/25/2025
1
2
bash
curl -fsSL https://ollama.com/install.sh | sh
1
2
bash
ollama pull gemma:2b
1
graphrag
1
2
bash
python -m graphrag.index --init --root ./ragtest
1
2
3
python
response = ollama.query(model='gemma:2b', prompt='What about LLMs and their evolution?')
print(response['response'])
Copyright © Arsturn 2025