Zack Saadioui
8/27/2024
1
2
bash
ollama pull llama3
1
2
bash
ollama serve
1
2
bash
pip install fastapi uvicorn requests
1
main.py
1
2
bash
uvicorn main:app --reload
1
2
bash
pip install ShopifyAPI
1 2 3 4 5
# Handle incoming data from Shopify here # Make a request to Ollama through the previously created route prompt = f'Customer inquiry: {data["inquiry"]}' response = await generate_text(Query(prompt=prompt)) return response
Copyright © Arsturn 2024