Zack Saadioui
8/27/2024
1
2
bash
ollama pull llama3.1
1
2
bash
ollama serve
1
/api
1
ollama-list-models
1
ollama-conversation
1
ollama-generate
1
ollama-conversation
1
2
3
4
5
python
from llama_index.embeddings.ollama import OllamaEmbedding
ollama_embedding = OllamaEmbedding(model_name="llama2")
response = ollama_embedding.get_query_embedding("What can you do?")
print(response)
Copyright © Arsturn 2024