Skip to content

Commit

Permalink
update async logging examples with finalize at the end
Browse files Browse the repository at this point in the history
  • Loading branch information
wenzhe-log10 committed Jun 7, 2024
1 parent 7172b7e commit 8dab25e
Show file tree
Hide file tree
Showing 13 changed files with 26 additions and 2 deletions.
2 changes: 2 additions & 0 deletions examples/logging/anthropic_async_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import anthropic

from log10._httpx_utils import finalize
from log10.load import log10


Expand All @@ -18,6 +19,7 @@ async def main() -> None:
)

print(message)
await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/anthropic_async_messages_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import anthropic

from log10._httpx_utils import finalize
from log10.load import log10


Expand Down Expand Up @@ -30,6 +31,7 @@ async def main() -> None:
# inside of the context manager
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())
await finalize()


asyncio.run(main())
3 changes: 3 additions & 0 deletions examples/logging/anthropic_async_messages_stream_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from anthropic.types import MessageStreamEvent
from typing_extensions import override

from log10._httpx_utils import finalize
from log10.load import log10


Expand Down Expand Up @@ -34,5 +35,7 @@ async def main() -> None:
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())

await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/anthropic_async_tools_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import anthropic
from anthropic import AsyncAnthropic

from log10._httpx_utils import finalize
from log10.load import log10


Expand Down Expand Up @@ -42,6 +43,7 @@ async def run_conversation():
max_tokens=1024,
) as stream:
await stream.until_done()
await finalize()


asyncio.run(run_conversation())
2 changes: 2 additions & 0 deletions examples/logging/anthropic_async_tools_stream_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from anthropic.lib.streaming.beta import AsyncToolsBetaMessageStream
from typing_extensions import override

from log10._httpx_utils import finalize
from log10.load import log10


Expand Down Expand Up @@ -49,6 +50,7 @@ async def main() -> None:
) as stream:
await stream.until_done()

await finalize()
print()


Expand Down
2 changes: 2 additions & 0 deletions examples/logging/magentic_async_chat_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from magentic import UserMessage, chatprompt
from magentic.chat_model.anthropic_chat_model import AnthropicChatModel

from log10._httpx_utils import finalize
from log10.load import log10


Expand All @@ -18,6 +19,7 @@ async def main(topic: str) -> str:
async def tell_joke(topic: str) -> str: ...

print(await tell_joke(topic))
await finalize()


asyncio.run(main("cats"))
3 changes: 3 additions & 0 deletions examples/logging/magentic_async_multi_session_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import openai
from magentic import AsyncStreamedStr, OpenaiChatModel, prompt

from log10._httpx_utils import finalize
from log10.load import log10, log10_session


Expand All @@ -29,5 +30,7 @@ async def main():
async for chunk in result:
print(chunk, end="", flush=True)

await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/magentic_async_parallel_function_call.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import openai
from magentic import AsyncParallelFunctionCall, prompt

from log10._httpx_utils import finalize
from log10.load import log10


Expand All @@ -25,6 +26,7 @@ async def main():
output = await plus_and_minus(2, 3)
async for chunk in output:
print(chunk)
await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/magentic_async_stream_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import openai
from magentic import AsyncStreamedStr, prompt

from log10._httpx_utils import finalize
from log10.load import log10, log10_session


Expand All @@ -19,6 +20,7 @@ async def main():
output = await tell_story("Europe.")
async for chunk in output:
print(chunk, end="", flush=True)
await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/magentic_async_widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from magentic import OpenaiChatModel, prompt
from pydantic import BaseModel

from log10._httpx_utils import finalize
from log10.load import log10


Expand All @@ -31,6 +32,7 @@ async def _generate_title_and_description(query: str, widget_data: str) -> Widge
async def main():
r = await _generate_title_and_description(query="Give me a summary of AAPL", widget_data="<the summary>")
rich.print(r)
await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/openai_async_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import openai
from openai import AsyncOpenAI

from log10._httpx_utils import finalize
from log10.load import log10


Expand All @@ -17,6 +18,7 @@ async def main():
messages=[{"role": "user", "content": "Say this is a test"}],
)
print(completion.choices[0].message.content)
await finalize()


asyncio.run(main())
2 changes: 2 additions & 0 deletions examples/logging/openai_async_tools_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from openai import AsyncOpenAI
from rich import print

from log10._httpx_utils import finalize
from log10.load import log10


Expand Down Expand Up @@ -72,6 +73,7 @@ async def run_conversation():
else:
tool_calls[-1].function.arguments += tc[0].function.arguments
print(tool_calls)
await finalize()
return


Expand Down
2 changes: 0 additions & 2 deletions tests/test_litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ async def test_completion_async_stream(anthropic_model):

## This test doesn't get completion_id from the session
## and logged a couple times during debug mode, punt this for now
await finalize()
assert output, "No output from the model."


Expand Down Expand Up @@ -153,5 +152,4 @@ async def test_image_async_stream(session, anthropic_model):
output += chunk.choices[0].delta.content

time.sleep(3)
await finalize()
_LogAssertion(completion_id=session.last_completion_id(), message_content=output).assert_chat_response()

0 comments on commit 8dab25e

Please sign in to comment.