Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Session bug (fix with context variables) #161

Merged
merged 11 commits into from
May 23, 2024
60 changes: 60 additions & 0 deletions examples/logging/tags_nested.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from log10.load import OpenAI, log10_session


client = OpenAI()
response = client.completions.create(
model="gpt-3.5-turbo-instruct",
prompt="I am demonstrating nested tags. Write a test case for this. This is the outer most call without any tags.",
temperature=0,
max_tokens=1024,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
print(response)

with log10_session(tags=["outer_tag"]):
response = client.completions.create(
model="gpt-3.5-turbo-instruct",
prompt="I am demonstrating nested tags. Write a test case for this. This is a inner call with tags.",
temperature=0,
max_tokens=1024,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
print(response)

with log10_session(tags=["inner_tag"]):
response = client.completions.create(
model="gpt-3.5-turbo-instruct",
prompt="I am demonstrating nested tags. Write a test case for this. This is the inner most call with tags.",
temperature=0,
max_tokens=1024,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
print(response)

response = client.completions.create(
model="gpt-3.5-turbo-instruct",
prompt="I am demonstrating nested tags. Write a test case for this. This is a inner call which should have outer tag.",
temperature=0,
max_tokens=1024,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
print(response)

response = client.completions.create(
model="gpt-3.5-turbo-instruct",
prompt="I am demonstrating nested tags. Write a test case for this. This is the outer most call without any tags (final call)",
temperature=0,
max_tokens=1024,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
print(response)
8 changes: 4 additions & 4 deletions log10/_httpx_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from httpx import Request, Response

from log10.llm import Log10Config
from log10.load import get_log10_session_tags, sessionID
from log10.load import get_log10_session_tags, session_id_var


logger: logging.Logger = logging.getLogger("LOG10")
Expand Down Expand Up @@ -139,7 +139,7 @@ async def log_request(request: Request):
"orig_module": orig_module,
"orig_qualname": orig_qualname,
"request": request.content.decode("utf-8"),
"session_id": sessionID,
"session_id": session_id_var.get(),
}
if get_log10_session_tags():
log_row["tags"] = get_log10_session_tags()
Expand Down Expand Up @@ -230,7 +230,7 @@ async def aiter_bytes(self, *args, **kwargs):
"stacktrace": json.dumps(stacktrace),
"kind": "chat",
"request": self.request.content.decode("utf-8"),
"session_id": sessionID,
"session_id": session_id_var.get(),
}
if get_log10_session_tags():
log_row["tags"] = get_log10_session_tags()
Expand Down Expand Up @@ -272,7 +272,7 @@ async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
"stacktrace": json.dumps(stacktrace),
"kind": "chat",
"request": request.content.decode("utf-8"),
"session_id": sessionID,
"session_id": session_id_var.get(),
}
if get_log10_session_tags():
log_row["tags"] = get_log10_session_tags()
Expand Down
Loading
Loading