Skip to content

Commit

Permalink
add examples
Browse files Browse the repository at this point in the history
  • Loading branch information
wenzhe-log10 committed Feb 22, 2024
1 parent a4c2eac commit 56df6f9
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 0 deletions.
25 changes: 25 additions & 0 deletions examples/logging/magentic_async_stream_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import asyncio

import openai
from magentic import AsyncStreamedStr, prompt

from log10.load import log10, log10_session


log10(openai)


@prompt("Tell me a 200-word story about {topic}")
async def tell_story(topic: str) -> AsyncStreamedStr:
...


async def main():
with log10_session(tags=["async_tag"]):
output = await tell_story("Europe.")
async for chunk in output:
print(chunk, end="", flush=True)


# Python 3.7+
asyncio.run(main())
22 changes: 22 additions & 0 deletions examples/logging/openai_async_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import asyncio

import openai
from openai import AsyncOpenAI

from log10.load import log10


log10(openai)

client = AsyncOpenAI()


async def main():
completion = await client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Say this is a test"}],
)
print(completion.choices[0].message.content)


asyncio.run(main())
24 changes: 24 additions & 0 deletions examples/logging/openai_async_stream_logging.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import asyncio

import openai
from openai import AsyncOpenAI

from log10.load import log10


log10(openai)

client = AsyncOpenAI()


async def main():
stream = await client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Count to 50."}],
stream=True,
)
async for chunk in stream:
print(chunk.choices[0].delta.content or "", end="", flush=True)


asyncio.run(main())

0 comments on commit 56df6f9

Please sign in to comment.