Skip to content

Commit

Permalink
Add anthropic async examples
Browse files Browse the repository at this point in the history
  • Loading branch information
kxtran committed May 23, 2024
1 parent 9563ed4 commit 2488460
Show file tree
Hide file tree
Showing 7 changed files with 215 additions and 5 deletions.
35 changes: 35 additions & 0 deletions examples/logging/anthropic_async_messages_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import asyncio

import anthropic

from log10.load import log10


log10(anthropic)

client = anthropic.AsyncAnthropic()


async def main() -> None:
async with client.messages.stream(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Say hello there!",
}
],
model="claude-3-haiku-20240307",
) as stream:
async for text in stream.text_stream:
print(text, end="", flush=True)
print()

# you can still get the accumulated final message outside of
# the context manager, as long as the entire stream was consumed
# inside of the context manager
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())


asyncio.run(main())
38 changes: 38 additions & 0 deletions examples/logging/anthropic_async_messages_stream_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic, AsyncMessageStream
from anthropic.types import MessageStreamEvent
from typing_extensions import override

from log10.load import log10


log10(anthropic)

client = AsyncAnthropic()


class MyStream(AsyncMessageStream):
@override
async def on_stream_event(self, event: MessageStreamEvent) -> None:
print("on_event fired with:", event)


async def main() -> None:
async with client.messages.stream(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Say hello there!",
}
],
model="claude-3-haiku-20240307",
event_handler=MyStream,
) as stream:
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())


asyncio.run(main())
47 changes: 47 additions & 0 deletions examples/logging/anthropic_async_tools_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic

from log10.load import log10


log10(anthropic)

client = AsyncAnthropic()


async def run_conversation():
tools = [
{
"name": "get_weather",
"description": "Get the weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": 'The unit of temperature, either "celsius" or "fahrenheit"',
},
},
"required": ["location"],
},
}
]
async with client.beta.tools.messages.stream(
model="claude-3-haiku-20240307",
tools=tools,
messages=[
{
"role": "user",
"content": "What's the weather like in San Francisco?",
}
],
max_tokens=1024,
) as stream:
await stream.until_done()


asyncio.run(run_conversation())
55 changes: 55 additions & 0 deletions examples/logging/anthropic_async_tools_stream_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic
from anthropic.lib.streaming.beta import AsyncToolsBetaMessageStream
from typing_extensions import override

from log10.load import log10


log10(anthropic)


client = AsyncAnthropic()


class MyHandler(AsyncToolsBetaMessageStream):
@override
async def on_input_json(self, delta: str, snapshot: object) -> None:
print(f"delta: {repr(delta)}")
print(f"snapshot: {snapshot}")
print()


async def main() -> None:
async with client.beta.tools.messages.stream(
max_tokens=1024,
model="claude-3-haiku-20240307",
tools=[
{
"name": "get_weather",
"description": "Get the weather at a specific location.",
"input_schema": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "Unit for the output",
},
},
"required": ["location"],
},
}
],
messages=[{"role": "user", "content": "What is the weather in SF?"}],
event_handler=MyHandler,
) as stream:
await stream.until_done()

print()


asyncio.run(main())
17 changes: 17 additions & 0 deletions examples/logging/anthropic_messages_not_given.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from log10.load import Anthropic


client = Anthropic()

completion = client.messages.create(
model="claude-instant-1.2",
messages=[
{
"role": "user",
"content": "tell a short joke.",
},
],
max_tokens=1000,
)

print(completion.content[0].text)
23 changes: 23 additions & 0 deletions examples/logging/magentic_async_chat_prompt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import asyncio

import anthropic
from magentic import UserMessage, chatprompt
from magentic.chat_model.anthropic_chat_model import AnthropicChatModel

from log10.load import log10


log10(anthropic)


async def main(topic: str) -> str:
@chatprompt(
UserMessage(f"Tell me a joke about {topic}"),
model=AnthropicChatModel("claude-3-opus-20240229"),
)
async def tell_joke(topic: str) -> str: ...

print(await tell_joke(topic))


asyncio.run(main("cats"))
5 changes: 0 additions & 5 deletions log10/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,11 +135,6 @@ def create_tokens_usage(prompt: str, completion: str):
def prepare_response(
response: anthropic.types.Completion | anthropic.types.Message, input_prompt: str = ""
) -> dict:
print(type(response))

if isinstance(response, dict):
response = anthropic.types.Message(**response)

if not hasattr(response, "stop_reason"):
return None

Expand Down

0 comments on commit 2488460

Please sign in to comment.