Skip to content

Commit

Permalink
Merge branch 'main' into feat/new-langchain-callback-handler
Browse files Browse the repository at this point in the history
  • Loading branch information
the-praxs authored Jan 24, 2025
2 parents 002bcbd + 0883c34 commit ff352cd
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 279 deletions.
6 changes: 3 additions & 3 deletions agentops/host_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,9 @@ def get_ram_details():
try:
ram_info = psutil.virtual_memory()
return {
"Total": f"{ram_info.total / (1024 ** 3):.2f} GB",
"Available": f"{ram_info.available / (1024 ** 3):.2f} GB",
"Used": f"{ram_info.used / (1024 ** 3):.2f} GB",
"Total": f"{ram_info.total / (1024**3):.2f} GB",
"Available": f"{ram_info.available / (1024**3):.2f} GB",
"Used": f"{ram_info.used / (1024**3):.2f} GB",
"Percentage": f"{ram_info.percent}%",
}
except:
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def handle_stream_chunk(chunk: dict):

if chunk.get("done"):
llm_event.end_timestamp = get_ISO_time()
llm_event.model = f'ollama/{chunk.get("model")}'
llm_event.model = f"ollama/{chunk.get('model')}"
llm_event.returns = chunk
llm_event.returns["message"] = llm_event.completion
llm_event.prompt = kwargs["messages"]
Expand All @@ -53,7 +53,7 @@ def generator():
return generator()

llm_event.end_timestamp = get_ISO_time()
llm_event.model = f'ollama/{response["model"]}'
llm_event.model = f"ollama/{response['model']}"
llm_event.returns = response
llm_event.agent_id = check_call_stack_for_agent_id()
llm_event.prompt = kwargs["messages"]
Expand Down
167 changes: 0 additions & 167 deletions examples/anthropic_examples/anthropic-example-sync.py

This file was deleted.

107 changes: 0 additions & 107 deletions examples/openai_examples/openai_example_sync.py

This file was deleted.

0 comments on commit ff352cd

Please sign in to comment.