Skip to content

Commit

Permalink
ENG-784 Add anthropic async and tools stream api support (#162)
Browse files Browse the repository at this point in the history
* Add sync tools stream support

* Add anthropic tools stream example

* Clean up commented code

* Update anthropic tools stream format

* Update anthropic tools example

* Add anthropic async stream function calling support

* Delegate to exit and aexit of the response

* Address feedback

* Use http hooks for anthropic async calls

* Handle async anthropic function call via httpx hook

* Add anthropic async examples

* Prepare anthropic response from tool.beta.message

* Add tests for new anthropic changes

* Fix tests

* Address Wenzhe feedback
  • Loading branch information
kxtran authored May 28, 2024
1 parent 403eba8 commit 07871cd
Show file tree
Hide file tree
Showing 14 changed files with 802 additions and 69 deletions.
23 changes: 23 additions & 0 deletions examples/logging/anthropic_async_messages.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import asyncio

import anthropic

from log10.load import log10


log10(anthropic)

client = anthropic.AsyncAnthropic()


async def main() -> None:
message = await client.beta.tools.messages.create(
model="claude-instant-1.2",
max_tokens=1000,
messages=[{"role": "user", "content": "Say hello!"}],
)

print(message)


asyncio.run(main())
35 changes: 35 additions & 0 deletions examples/logging/anthropic_async_messages_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import asyncio

import anthropic

from log10.load import log10


log10(anthropic)

client = anthropic.AsyncAnthropic()


async def main() -> None:
async with client.messages.stream(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Say hello there!",
}
],
model="claude-3-haiku-20240307",
) as stream:
async for text in stream.text_stream:
print(text, end="", flush=True)
print()

# you can still get the accumulated final message outside of
# the context manager, as long as the entire stream was consumed
# inside of the context manager
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())


asyncio.run(main())
38 changes: 38 additions & 0 deletions examples/logging/anthropic_async_messages_stream_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic, AsyncMessageStream
from anthropic.types import MessageStreamEvent
from typing_extensions import override

from log10.load import log10


log10(anthropic)

client = AsyncAnthropic()


class MyStream(AsyncMessageStream):
@override
async def on_stream_event(self, event: MessageStreamEvent) -> None:
print("on_event fired with:", event)


async def main() -> None:
async with client.messages.stream(
max_tokens=1024,
messages=[
{
"role": "user",
"content": "Say hello there!",
}
],
model="claude-3-haiku-20240307",
event_handler=MyStream,
) as stream:
accumulated = await stream.get_final_message()
print("accumulated message: ", accumulated.to_json())


asyncio.run(main())
47 changes: 47 additions & 0 deletions examples/logging/anthropic_async_tools_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic

from log10.load import log10


log10(anthropic)

client = AsyncAnthropic()


async def run_conversation():
tools = [
{
"name": "get_weather",
"description": "Get the weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": 'The unit of temperature, either "celsius" or "fahrenheit"',
},
},
"required": ["location"],
},
}
]
async with client.beta.tools.messages.stream(
model="claude-3-haiku-20240307",
tools=tools,
messages=[
{
"role": "user",
"content": "What's the weather like in San Francisco?",
}
],
max_tokens=1024,
) as stream:
await stream.until_done()


asyncio.run(run_conversation())
55 changes: 55 additions & 0 deletions examples/logging/anthropic_async_tools_stream_handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import asyncio

import anthropic
from anthropic import AsyncAnthropic
from anthropic.lib.streaming.beta import AsyncToolsBetaMessageStream
from typing_extensions import override

from log10.load import log10


log10(anthropic)


client = AsyncAnthropic()


class MyHandler(AsyncToolsBetaMessageStream):
@override
async def on_input_json(self, delta: str, snapshot: object) -> None:
print(f"delta: {repr(delta)}")
print(f"snapshot: {snapshot}")
print()


async def main() -> None:
async with client.beta.tools.messages.stream(
max_tokens=1024,
model="claude-3-haiku-20240307",
tools=[
{
"name": "get_weather",
"description": "Get the weather at a specific location.",
"input_schema": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "Unit for the output",
},
},
"required": ["location"],
},
}
],
messages=[{"role": "user", "content": "What is the weather in SF?"}],
event_handler=MyHandler,
) as stream:
await stream.until_done()

print()


asyncio.run(main())
21 changes: 21 additions & 0 deletions examples/logging/anthropic_messages_not_given.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from anthropic import NOT_GIVEN

from log10.load import Anthropic


client = Anthropic()

completion = client.beta.tools.messages.create(
model="claude-3-haiku-20240307",
messages=[
{
"role": "user",
"content": "tell a short joke.",
},
],
max_tokens=1000,
tools=NOT_GIVEN,
tool_choice=NOT_GIVEN,
)

print(completion.content[0].text)
35 changes: 35 additions & 0 deletions examples/logging/anthropic_tools_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import anthropic

from log10.load import log10


log10(anthropic)


client = anthropic.Anthropic()

with client.beta.tools.messages.stream(
model="claude-3-haiku-20240307",
tools=[
{
"name": "get_weather",
"description": "Get the weather at a specific location",
"input_schema": {
"type": "object",
"properties": {
"location": {"type": "string", "description": "The city and state, e.g. San Francisco, CA"},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
"description": "Unit for the output",
},
},
"required": ["location"],
},
}
],
messages=[{"role": "user", "content": "What is the weather in SF?"}],
max_tokens=1024,
) as stream:
for message in stream:
print(message)
23 changes: 23 additions & 0 deletions examples/logging/magentic_async_chat_prompt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import asyncio

import anthropic
from magentic import UserMessage, chatprompt
from magentic.chat_model.anthropic_chat_model import AnthropicChatModel

from log10.load import log10


log10(anthropic)


async def main(topic: str) -> str:
@chatprompt(
UserMessage(f"Tell me a joke about {topic}"),
model=AnthropicChatModel("claude-3-opus-20240229"),
)
async def tell_joke(topic: str) -> str: ...

print(await tell_joke(topic))


asyncio.run(main("cats"))
Loading

0 comments on commit 07871cd

Please sign in to comment.