forked from letta-ai/letta
-
Notifications
You must be signed in to change notification settings - Fork 0
38 lines (32 loc) · 970 Bytes
/
test_ollama.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
name: Endpoint (Ollama)
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Start Ollama Server
run: |
curl -fsSL https://ollama.com/install.sh | sh
ollama serve &
sleep 10 # wait for server
ollama pull dolphin2.2-mistral:7b-q6_K
ollama pull mxbai-embed-large
- name: "Setup Python, Poetry and Dependencies"
uses: packetcoders/action-setup-cache-python-poetry@main
with:
python-version: "3.12"
poetry-version: "1.8.2"
install-args: "-E dev -E ollama"
- name: Test LLM endpoint
run: |
poetry run pytest -s -vv tests/test_endpoints.py::test_llm_endpoint_ollama
- name: Test embedding endpoint
run: |
poetry run pytest -s -vv tests/test_endpoints.py::test_embedding_endpoint_ollama