From 3f781686ccfea41d23a1ec7a733708b3768146af Mon Sep 17 00:00:00 2001 From: Kim Tran <17498395+kxtran@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:01:15 -0400 Subject: [PATCH 1/3] ENG-851 Update google-generativeai version, example and test (#185) * Update google-generativeai to 0.6.0 version * Update google example and test with history --- .../logging/google_genai_gemini_chat_w_history.py | 11 +++++++---- poetry.lock | 6 +++--- pyproject.toml | 2 +- tests/test_google.py | 11 +++++++---- 4 files changed, 18 insertions(+), 12 deletions(-) diff --git a/examples/logging/google_genai_gemini_chat_w_history.py b/examples/logging/google_genai_gemini_chat_w_history.py index b9481641..afa7c6f5 100644 --- a/examples/logging/google_genai_gemini_chat_w_history.py +++ b/examples/logging/google_genai_gemini_chat_w_history.py @@ -6,15 +6,18 @@ log10(genai) -model = genai.GenerativeModel("gemini-1.5-pro-latest", system_instruction="You are a cat. Your name is Neko.") +model = genai.GenerativeModel( + "gemini-1.5-pro-latest", + system_instruction="You will be provided with statements, and your task is to convert them to standard English.", +) chat = model.start_chat( history=[ - {"role": "user", "parts": [{"text": "please say yes."}]}, - {"role": "model", "parts": [{"text": "Yes yes yes?"}]}, + {"role": "user", "parts": [{"text": "He no went to the market."}]}, + {"role": "model", "parts": [{"text": "He did not go to the market."}]}, ] ) -prompt = "please say no." +prompt = "She no went to the market." response = chat.send_message(prompt) print(response.text) diff --git a/poetry.lock b/poetry.lock index 849cb219..de615332 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1126,12 +1126,12 @@ testing = ["pytest"] [[package]] name = "google-generativeai" -version = "0.5.4" +version = "0.6.0" description = "Google Generative AI High level API client library and tools." optional = true python-versions = ">=3.9" files = [ - {file = "google_generativeai-0.5.4-py3-none-any.whl", hash = "sha256:036d63ee35e7c8aedceda4f81c390a5102808af09ff3a6e57e27ed0be0708f3c"}, + {file = "google_generativeai-0.6.0-py3-none-any.whl", hash = "sha256:ba1d3b826b872bffe330aaac0dc6de2f0e4610df861c8ce7ec6433771611b676"}, ] [package.dependencies] @@ -3856,4 +3856,4 @@ together = ["together"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "6246821d7d245e3166a3759da50fbe20ce66ea6a22c0dbd09dd8f6c74d35af9a" +content-hash = "2190ee6487f544438498170965a7719667b908a07ec2e43aaa21e7b9a34e53ad" diff --git a/pyproject.toml b/pyproject.toml index b492b32a..2adbe8cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ mistralai = {version = "^0.1.5", optional = true} together = {version = "^0.2.7", optional = true} mosaicml-cli = {version = "^0.5.30", optional = true} google-cloud-bigquery = {version = "^3.11.4", optional = true} -google-generativeai = {version = "^0.5.2", optional = true} +google-generativeai = {version = "^0.6.0", optional = true} [tool.poetry.extras] autofeedback_icl = ["magentic"] diff --git a/tests/test_google.py b/tests/test_google.py index 83cf2f0e..329bec6c 100644 --- a/tests/test_google.py +++ b/tests/test_google.py @@ -27,15 +27,18 @@ def test_genai_chat(session, google_model): @pytest.mark.chat def test_genai_chat_w_history(session, google_model): - model = genai.GenerativeModel(google_model, system_instruction="You are a cat. Your name is Neko.") + model = genai.GenerativeModel( + google_model, + system_instruction="You will be provided with statements, and your task is to convert them to standard English.", + ) chat = model.start_chat( history=[ - {"role": "user", "parts": [{"text": "please say yes."}]}, - {"role": "model", "parts": [{"text": "Yes yes yes?"}]}, + {"role": "user", "parts": [{"text": "He no went to the market."}]}, + {"role": "model", "parts": [{"text": "He did not go to the market."}]}, ] ) - prompt = "please say no." + prompt = "She no went to the market." response = chat.send_message(prompt) text = response.text From 16544dc5b398c71cd52193166418dbf19204aa55 Mon Sep 17 00:00:00 2001 From: Kim Tran <17498395+kxtran@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:06:54 -0400 Subject: [PATCH 2/3] Update openai version to 1.33.0 (#186) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index de615332..7064c62a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2150,13 +2150,13 @@ files = [ [[package]] name = "openai" -version = "1.30.4" +version = "1.33.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.30.4-py3-none-any.whl", hash = "sha256:fb2635efd270efaf9fac2e07558d7948373b940637d3ae3ab624c1a983d4f03f"}, - {file = "openai-1.30.4.tar.gz", hash = "sha256:f3488d9a1c4e0d332b019377d27d7cb4b3d6103fd5d0a416c7ceac780d1d9b88"}, + {file = "openai-1.33.0-py3-none-any.whl", hash = "sha256:621163b56570897ab8389d187f686a53d4771fd6ce95d481c0a9611fe8bc4229"}, + {file = "openai-1.33.0.tar.gz", hash = "sha256:1169211a7b326ecbc821cafb427c29bfd0871f9a3e0947dd9e51acb3b0f1df78"}, ] [package.dependencies] From 2e308865969fc4b57a35852eb83bbdc65ffe9874 Mon Sep 17 00:00:00 2001 From: wenzhe <145375501+wenzhe-log10@users.noreply.github.com> Date: Thu, 13 Jun 2024 12:13:46 -0700 Subject: [PATCH 3/3] change logging to debug instead of warning (#189) --- log10/_httpx_utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/log10/_httpx_utils.py b/log10/_httpx_utils.py index 50a4b246..7543cecb 100644 --- a/log10/_httpx_utils.py +++ b/log10/_httpx_utils.py @@ -167,11 +167,11 @@ def format_anthropic_tools_request(request_content) -> str: async def get_completion_id(request: Request): host = request.headers.get("host") if "anthropic" in host and "/v1/messages" not in str(request.url): - logger.warning("Currently logging is only available for anthropic v1/messages.") + logger.debug("Currently logging is only available for anthropic v1/messages.") return if "openai" in host and "v1/chat/completions" not in str(request.url): - logger.warning("Currently logging is only available for openai v1/chat/completions.") + logger.debug("Currently logging is only available for openai v1/chat/completions.") return request.headers["x-log10-completion-id"] = str(uuid.uuid4()) @@ -209,7 +209,7 @@ async def log_request(request: Request): orig_module = "anthropic.resources.messages" orig_qualname = "Messages.stream" else: - logger.warning("Currently logging is only available for async openai and anthropic.") + logger.debug("Currently logging is only available for async openai and anthropic.") return log_row = { "status": "started", @@ -318,7 +318,7 @@ def is_response_end_reached(self, text: str): elif "openai" in host: return self.is_openai_response_end_reached(text) else: - logger.warning("Currently logging is only available for async openai and anthropic.") + logger.debug("Currently logging is only available for async openai and anthropic.") return False def is_anthropic_response_end_reached(self, text: str): @@ -447,7 +447,7 @@ def parse_response_data(self, responses: list[str]): elif "anthropic" in host: return self.parse_anthropic_responses(responses) else: - logger.warning("Currently logging is only available for async openai and anthropic.") + logger.debug("Currently logging is only available for async openai and anthropic.") return None