Zack Saadioui
8/27/2024
1
2
3
bash
brew install ollama
brew services start ollama
1
2
bash
ollama pull llama2
1
2
bash
pip install -U langchain-ollama
1
2
3
python
from langchain_core.prompts import ChatPromptTemplate
from langchain_ollama.llms import OllamaLLM
1
2
3
4
python
template = """Question: {question}
Answer: Let's think step by step."""
prompt = ChatPromptTemplate.from_template(template)
1
2
python
model = OllamaLLM(model="llama2")
1
2
3
4
python
chain = prompt | model
response = chain.invoke({"question": "What is LangChain?"})
print(response)
1
ollama pull <model-name>
1
model-name
1
nomic-embed-text
1
bakllava
1
2
bash
ollama pull bakllava
Copyright © Arsturn 2024