Skip to content
This repository has been archived by the owner on Nov 13, 2024. It is now read-only.

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
miararoy committed Sep 27, 2023
1 parent 7c341b3 commit 484b632
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 21 deletions.
19 changes: 9 additions & 10 deletions resin_cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,15 @@ def is_healthy(url: str):
except Exception:
return False


def validate_connection():
try:
KnowledgeBase._connect_pinecone()
openai.Model.list()
except Exception as e:
except Exception:
msg = (
f"Failed to connect to Pinecone index and OpenAI API, please make sure" +
" you have set the right env vars"
"Failed to connect to Pinecone index and OpenAI API, please make sure"
+ " you have set the right env vars"
)
click.echo(click.style(msg, fg="red"), err=True)
sys.exit(1)
Expand Down Expand Up @@ -109,8 +110,8 @@ def new(index_name, tokenizer_model):
@click.option("--tokenizer-model", default="gpt-3.5-turbo", help="Tokenizer model")
def upsert(index_name, data_path, tokenizer_model):
if index_name is None:
msg = 'Index name is not provided, please provide it with'
+ ' --index-name or set it with env var `export INDEX_NAME="MY_INDEX_NAME`'
msg = "Index name is not provided, please provide it with"
+' --index-name or set it with env var `export INDEX_NAME="MY_INDEX_NAME`'
click.echo(click.style(msg, fg="red"), err=True)
sys.exit(1)
Tokenizer.initialize(OpenAITokenizer, tokenizer_model)
Expand Down Expand Up @@ -163,7 +164,7 @@ def _chat(
debug_info = ChatDebugInfo(
id=openai_response_id,
intenal_model=intenal_model,
duration_in_sec=round(duration_in_sec, 2)
duration_in_sec=round(duration_in_sec, 2),
)
else:
intenal_model = openai_response.model
Expand Down Expand Up @@ -282,8 +283,7 @@ def stop(host, port, ssl):
if running_server_id == "":
click.echo(
click.style(
"Did not find active process for Resin service"
+ f" on {host}:{port}",
"Did not find active process for Resin service" + f" on {host}:{port}",
fg="red",
)
)
Expand All @@ -299,8 +299,7 @@ def stop(host, port, ssl):

click.confirm(
click.style(
f"Stopping Resin service on {host}:{port} with pid "
f"{running_server_id}",
f"Stopping Resin service on {host}:{port} with pid " f"{running_server_id}",
fg="red",
),
abort=True,
Expand Down
32 changes: 21 additions & 11 deletions tests/e2e/test_app.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os
import pytest
from fastapi.testclient import TestClient

from resin.knoweldge_base import KnowledgeBase
Expand All @@ -23,13 +22,17 @@

client = TestClient(app)


def test_health():
response = client.get("/health")
assert response.status_code == 200
assert response.json() == HealthStatus(pinecone_status="OK", llm_status="OK").dict()

# TODO: the following test is a complete e2e test, this it not the final design
# for the e2e tests, however there were some issues with the fixtures that will be resovled

# TODO: the following test is a complete e2e test, this it not the final design
# for the e2e tests, however there were some issues
# with the fixtures that will be resovled


def test_e2e():
try:
Expand All @@ -41,7 +44,8 @@ def test_e2e():
"text": "This is a test document, the topic is red bananas",
"source": "api_tests",
"metadata": {"test": "test"},
}],
}
],
)
upsert_response = client.post("/context/upsert", json=upsert_payload.dict())
assert upsert_response.status_code == 200
Expand All @@ -62,30 +66,36 @@ def test_e2e():

# test response is as expected on /query
response_as_json = query_response.json()
assert response_as_json[0]["query"] == query_payload.dict()["queries"][0]["text"]
assert response_as_json[0]["snippets"][0]["text"] == upsert_payload.dict()["documents"][0]["text"]
assert (
response_as_json[0]["query"] == query_payload.dict()["queries"][0]["text"]
)
assert (
response_as_json[0]["snippets"][0]["text"]
== upsert_payload.dict()["documents"][0]["text"]
)
# TODO: uncomment when fix is pushed
# assert response_as_json[0]["snippets"][0]["source"] == upsert_payload.dict()["documents"][0]["source"]
# assert response_as_json[0]["snippets"][0]["source"] == \
# upsert_payload.dict()["documents"][0]["source"]

# test response is as expected on /chat
chat_payload = {
"messages": [
{
"role": "user",
"content": "what is the topic of the test document? be concise"
"content": "what is the topic of the test document? be concise",
}
]
}
chat_response = client.post("/context/chat/completions", json=chat_payload)
assert chat_response.status_code == 200
chat_response_as_json = chat_response.json()
assert chat_response_as_json["choices"][0]["message"]["role"] == "assistant"
chat_response_content = chat_response_as_json["choices"][0]["message"]["content"]
chat_response_content = chat_response_as_json["choices"][0]["message"][
"content"
]
print(chat_response_content)
assert all([kw in chat_response_content for kw in ["red", "bananas"]])
except Exception as e:
raise e
finally:
kb.delete_index()


0 comments on commit 484b632

Please sign in to comment.