From 0eeb465e651c7658eef1f73bae0b831b8e63c180 Mon Sep 17 00:00:00 2001 From: ned Date: Mon, 6 Mar 2023 14:31:11 +0100 Subject: [PATCH] added max conversation age logic --- .env.example | 3 +++ README.md | 2 ++ main.py | 1 + openai_helper.py | 21 +++++++++++++++++++-- 4 files changed, 25 insertions(+), 2 deletions(-) diff --git a/.env.example b/.env.example index e1b2a9e9..07e5c633 100644 --- a/.env.example +++ b/.env.example @@ -13,5 +13,8 @@ SHOW_USAGE=false # Max number of messages to keep in memory, after which the conversation will be summarised MAX_HISTORY_SIZE=10 +# Max minutes a conversation will live, after which the conversation will be reset to avoid excessive token usage +MAX_CONVERSATION_AGE_MINUTES=180 + # Whether to answer to voice messages with the transcript or with a ChatGPT response of the transcript VOICE_REPLY_WITH_TRANSCRIPT_ONLY=true \ No newline at end of file diff --git a/README.md b/README.md index 06553507..ceafc259 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,7 @@ ALLOWED_TELEGRAM_USER_IDS="USER_ID_1,USER_ID_2,..." # Defaults to "*" (everyone) PROXY="YOUR_PROXY" # e.g. "http://localhost:8080", defaults to none SHOW_USAGE=true # Defaults to false MAX_HISTORY_SIZE=15 # Defaults to 10 +MAX_CONVERSATION_AGE_MINUTES=120 # Defaults to 180 (3h) VOICE_REPLY_WITH_TRANSCRIPT_ONLY=false # Defaults to true ``` * `OPENAI_API_KEY`: Your OpenAI API key, you can get it from [here](https://platform.openai.com/account/api-keys) @@ -53,6 +54,7 @@ VOICE_REPLY_WITH_TRANSCRIPT_ONLY=false # Defaults to true * `PROXY`: Proxy to be used for OpenAI and Telegram bot * `SHOW_USAGE`: Whether to show OpenAI token usage information after each response * `MAX_HISTORY_SIZE`: Max number of messages to keep in memory, after which the conversation will be summarised to avoid excessive token usage ([#34](https://github.com/n3d1117/chatgpt-telegram-bot/issues/34)) +* `MAX_CONVERSATION_AGE_MINUTES`: Maximum number of minutes a conversation should live, after which the conversation will be reset to avoid excessive token usage * `VOICE_REPLY_WITH_TRANSCRIPT_ONLY`: Whether to answer to voice messages with the transcript only or with a ChatGPT response of the transcript ([#38](https://github.com/n3d1117/chatgpt-telegram-bot/issues/38)) Additional model parameters can be configured from the `main.py` file: diff --git a/main.py b/main.py index 766fa03e..ade54415 100644 --- a/main.py +++ b/main.py @@ -30,6 +30,7 @@ def main(): 'show_usage': os.environ.get('SHOW_USAGE', 'false').lower() == 'true', 'proxy': os.environ.get('PROXY', None), 'max_history_size': int(os.environ.get('MAX_HISTORY_SIZE', 10)), + 'max_conversation_age_minutes': int(os.environ.get('MAX_CONVERSATION_AGE_MINUTES', 180)), # 'gpt-3.5-turbo' or 'gpt-3.5-turbo-0301' 'model': 'gpt-3.5-turbo', diff --git a/openai_helper.py b/openai_helper.py index a3203943..d185ae21 100644 --- a/openai_helper.py +++ b/openai_helper.py @@ -1,3 +1,4 @@ +import datetime import logging import openai @@ -15,7 +16,8 @@ def __init__(self, config: dict): openai.api_key = config['api_key'] openai.proxy = config['proxy'] self.config = config - self.conversations: dict[int: list] = {} # {chat_id: history} + self.conversations: dict[int: list] = {} # {chat_id: history} + self.last_updated: dict[int: datetime] = {} # {chat_id: last_update_timestamp} def get_chat_response(self, chat_id: int, query: str) -> str: """ @@ -25,9 +27,11 @@ def get_chat_response(self, chat_id: int, query: str) -> str: :return: The answer from the model """ try: - if chat_id not in self.conversations: + if chat_id not in self.conversations or self.__max_age_reached(chat_id): self.reset_chat_history(chat_id) + self.last_updated[chat_id] = datetime.datetime.now() + # Summarize the chat history if it's too long to avoid excessive token usage if len(self.conversations[chat_id]) > self.config['max_history_size']: logging.info(f'Chat history for chat ID {chat_id} is too long. Summarising...') @@ -116,6 +120,19 @@ def reset_chat_history(self, chat_id): """ self.conversations[chat_id] = [{"role": "system", "content": self.config['assistant_prompt']}] + def __max_age_reached(self, chat_id) -> bool: + """ + Checks if the maximum conversation age has been reached. + :param chat_id: The chat ID + :return: A boolean indicating whether the maximum conversation age has been reached + """ + if chat_id not in self.last_updated: + return False + last_updated = self.last_updated[chat_id] + now = datetime.datetime.now() + max_age_minutes = self.config['max_conversation_age_minutes'] + return last_updated < now - datetime.timedelta(minutes=max_age_minutes) + def __add_to_history(self, chat_id, role, content): """ Adds a message to the conversation history.