diff --git a/Makefile b/Makefile index f709c32c..c4376953 100644 --- a/Makefile +++ b/Makefile @@ -80,8 +80,10 @@ logging-tags: python examples/logging/tags_openai.py logging-magentic: - python examples/logging/magentic_async_stream_logging.py + python examples/logging/magentic_prompt.py + python examples/logging/magentic_prompt_stream.py python examples/logging/magentic_function_logging.py + python examples/logging/magentic_async_stream_logging.py python examples/logging/magentic_async_parallel_function_call.py python examples/logging/magentic_async_multi_session_tags.py python examples/logging/magentic_async_widget.py diff --git a/examples/logging/magentic_prompt.py b/examples/logging/magentic_prompt.py new file mode 100644 index 00000000..7d6d2459 --- /dev/null +++ b/examples/logging/magentic_prompt.py @@ -0,0 +1,14 @@ +import openai +from magentic import prompt + +from log10.load import log10 + + +log10(openai, USE_ASYNC_=True) + + +@prompt("Tell me a joke") +def llm() -> str: ... + + +print(llm()) diff --git a/examples/logging/magentic_prompt_stream.py b/examples/logging/magentic_prompt_stream.py new file mode 100644 index 00000000..c5e3bd6e --- /dev/null +++ b/examples/logging/magentic_prompt_stream.py @@ -0,0 +1,16 @@ +import openai +from magentic import StreamedStr, prompt + +from log10.load import log10 + + +log10(openai, USE_ASYNC_=True) + + +@prompt("Tell me a joke") +def llm() -> StreamedStr: ... + + +response = llm() +for chunk in response: + print(chunk, end="", flush=True) diff --git a/examples/logging/openai_chat_not_given.py b/examples/logging/openai_chat_not_given.py new file mode 100644 index 00000000..323d1f7b --- /dev/null +++ b/examples/logging/openai_chat_not_given.py @@ -0,0 +1,20 @@ +from openai import NOT_GIVEN + +from log10.load import OpenAI + + +client = OpenAI() + +completion = client.chat.completions.create( + model="gpt-3.5-turbo", + messages=[ + { + "role": "user", + "content": "tell a joke.", + }, + ], + tools=NOT_GIVEN, + tool_choice=NOT_GIVEN, +) + +print(completion.choices[0].message.content) diff --git a/log10/load.py b/log10/load.py index 057dbc81..eabce024 100644 --- a/log10/load.py +++ b/log10/load.py @@ -533,6 +533,9 @@ def _init_log_row(func, *args, **kwargs): elif "mistralai" in func.__module__: log_row["kind"] = "chat" elif "openai" in func.__module__: + from openai._utils._utils import strip_not_given + + kwargs_copy = strip_not_given(kwargs_copy) kind = "chat" if "chat" in func.__module__ else "completion" log_row["kind"] = kind elif "google.generativeai" in func.__module__: