diff --git a/README.md b/README.md index 9843ea0..07379f3 100644 --- a/README.md +++ b/README.md @@ -74,11 +74,15 @@ cp config-template.json config.json 在使用时只需要更改 model 和 channel 配置块下的 type 字段,即可在任意模型和应用间完成切换,连接不同的通路。下面将依次介绍各个 模型 及 应用 的配置和运行过程。 -#### settings +#### model 配置 + ++ type: chatgpt + + `max_history_num`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 + +#### common 配置 + `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 + default: ["#清除记忆"] -+ `max_history_per_session`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 ## 二、选择模型 diff --git a/config-template.json b/config-template.json index 3cf788b..062079b 100644 --- a/config-template.json +++ b/config-template.json @@ -57,7 +57,7 @@ "port": "80" } }, - "settings": { + "common": { "clear_memory_commands": ["#清除记忆"] } } diff --git a/config.py b/config.py index 3598f98..3c9ab17 100644 --- a/config.py +++ b/config.py @@ -54,10 +54,10 @@ def channel_conf_val(channel_type, key, default=None): return val -def settings_conf(): - return config.get('settings') +def common_conf(): + return config.get('common') -def settings_conf_val(key, default=None): - return config.get('settings').get(key, default) +def common_conf_val(key, default=None): + return config.get('common').get(key, default) diff --git a/model/openai/chatgpt_model.py b/model/openai/chatgpt_model.py index 7cf6cfc..2679ee3 100644 --- a/model/openai/chatgpt_model.py +++ b/model/openai/chatgpt_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf, settings_conf_val +from config import model_conf, common_conf_val from common import const from common import log import openai @@ -22,7 +22,7 @@ class ChatGPTModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[CHATGPT] query={}".format(query)) from_user_id = context['from_user_id'] - clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除' @@ -196,7 +196,7 @@ class Session(object): @staticmethod def save_session(query, answer, user_id, used_tokens=0): max_tokens = model_conf(const.OPEN_AI).get('conversation_max_tokens') - max_history_per_session = model_conf(const.OPEN_AI).get('max_history_per_session', None) + max_history_num = model_conf(const.OPEN_AI).get('max_history_num', None) if not max_tokens or max_tokens > 4000: # default value max_tokens = 1000 @@ -211,8 +211,9 @@ class Session(object): session.pop(1) session.pop(1) - if max_history_per_session is not None: - while len(session) > max_history_per_session + 1: + if max_history_num is not None: + while len(session) > max_history_num * 2 + 1: + session.pop(1) session.pop(1) @staticmethod diff --git a/model/openai/open_ai_model.py b/model/openai/open_ai_model.py index 8895482..78615bf 100644 --- a/model/openai/open_ai_model.py +++ b/model/openai/open_ai_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf, settings_conf_val +from config import model_conf, common_conf_val from common import const from common import log import openai @@ -20,7 +20,7 @@ class OpenAIModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[OPEN_AI] query={}".format(query)) from_user_id = context['from_user_id'] - clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除'