Skip to content

Commit

Permalink
verbosity and error handling
Browse files Browse the repository at this point in the history
  • Loading branch information
artitw committed Sep 23, 2024
1 parent b45b399 commit 736bbcc
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 14 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setuptools.setup(
name="text2text",
version="1.5.6",
version="1.5.7",
author="artitw",
author_email="artitw@gmail.com",
description="Text2Text: Crosslingual NLP/G toolkit",
Expand Down
29 changes: 16 additions & 13 deletions text2text/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import ollama
import psutil
import subprocess
import time

from llama_index.llms.ollama import Ollama
from llama_index.core.llms import ChatMessage
Expand All @@ -23,33 +22,37 @@ def __del__(self):
def load_model(self):
return_code = os.system("sudo apt install -q -y lshw")
if return_code != 0:
print("Cannot install lshw.")
raise Exception("Cannot install lshw.")

return_code = os.system("curl -fsSL https://ollama.com/install.sh | sh")
if return_code != 0:
print("Cannot install ollama.")
raise Exception("Cannot install ollama.")

return_code = os.system("sudo systemctl enable ollama")
if return_code != 0:
print("Cannot enable ollama.")
raise Exception("Cannot enable ollama.")

sub = subprocess.Popen(["ollama", "serve"])
return_code = os.system("ollama -v")
if return_code != 0:
print("Cannot serve ollama.")
raise Exception("Cannot serve ollama.")

result = ollama.pull(self.model_name)
if result["status"] != "success":
print(f"Did not pull {self.model_name}.")

time.sleep(10)

raise Exception(f"Did not pull {self.model_name}.")

def chat_completion(self, messages=[{"role": "user", "content": "hello"}], stream=False, schema=None, **kwargs):
try:
ollama.ps()
result = ollama.pull(self.model_name)
if result["status"] == "success":
time.sleep(10)
result = ollama.ps()
if not result:
result = ollama.pull(self.model_name)
if result["status"] == "success":
return self.chat_completion(messages=messages, stream=stream, **kwargs)
raise Exception(f"Did not pull {self.model_name}. Try restarting.")
except Exception as e:
print(str(e))
print("Retrying...")
self.load_model()
return self.chat_completion(messages=messages, stream=stream, **kwargs)

if schema:
Expand Down

0 comments on commit 736bbcc

Please sign in to comment.