Skip to content

Commit

Permalink
Add a skip test with oai chat completions endpoint mocked out.
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeljin committed Jan 30, 2024
1 parent 0581f0e commit 03033a2
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 2 deletions.
16 changes: 15 additions & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ backoff = "^2.2.1"
anthropic = "^0.3.11"
mosaicml-cli = "^0.5.30"
together = "^0.2.7"
respx = "^0.20.2"

[tool.ruff]
# Never enforce `E501` (line length violations).
Expand Down
27 changes: 26 additions & 1 deletion tests/test_requests.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import asyncio
import os

import httpx
import pytest
import requests_mock

from log10.load import log_sync, log_async
from log10.load import log_sync, log_async, OpenAI
from log10.llm import LLM, Log10Config


Expand Down Expand Up @@ -49,3 +51,26 @@ def fake_logging():
llm.log_end(completion_id=completion_id, response=mock_resp, duration=5)

await asyncio.gather(*[loop.run_in_executor(None, fake_logging) for _ in range(simultaneous_calls)])


@pytest.mark.skip(reason="This is a very simple load test and doesn't need to be run as part of the test suite.")
@pytest.mark.asyncio
async def test_log_async_httpx_multiple_calls(respx_mock):
simultaneous_calls = 100

mock_resp = {
"role": "user",
"content": "Say this is a test",
}


client = OpenAI()

respx_mock.post("https://api.openai.com/v1/chat/completions").mock(return_value=httpx.Response(200, json=mock_resp))

def better_logging():
completion = client.chat.completions.create(model="gpt-3.5-turbo", messages=[
{"role": "user", "content": "Say pong"}])

loop = asyncio.get_event_loop()
await asyncio.gather(*[loop.run_in_executor(None, better_logging) for _ in range(simultaneous_calls)])

0 comments on commit 03033a2

Please sign in to comment.