From dc213d6af6f3492bc13151c6d960379227b1408e Mon Sep 17 00:00:00 2001 From: Albert Date: Fri, 24 Mar 2023 17:03:58 +1100 Subject: [PATCH 1/4] support setting clear_memory_commands --- README.md | 4 ++++ config-template.json | 3 +++ config.py | 9 +++++++++ model/openai/chatgpt_model.py | 5 +++-- model/openai/open_ai_model.py | 5 +++-- 5 files changed, 22 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 5f31afe..51f14a4 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,10 @@ cp config-template.json config.json 在使用时只需要更改 model 和 channel 配置块下的 type 字段,即可在任意模型和应用间完成切换,连接不同的通路。下面将依次介绍各个 模型 及 应用 的配置和运行过程。 +#### settings + ++ `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 + + default: ["#清除记忆"] ## 二、选择模型 diff --git a/config-template.json b/config-template.json index 24c2a08..3cf788b 100644 --- a/config-template.json +++ b/config-template.json @@ -56,5 +56,8 @@ "http_auth_password": "6.67428e-11", "port": "80" } + }, + "settings": { + "clear_memory_commands": ["#清除记忆"] } } diff --git a/config.py b/config.py index 6ae2c95..3598f98 100644 --- a/config.py +++ b/config.py @@ -52,3 +52,12 @@ def channel_conf_val(channel_type, key, default=None): # common default config return config.get('channel').get(key, default) return val + + +def settings_conf(): + return config.get('settings') + + +def settings_conf_val(key, default=None): + return config.get('settings').get(key, default) + diff --git a/model/openai/chatgpt_model.py b/model/openai/chatgpt_model.py index 609e9f3..9a8f3d6 100644 --- a/model/openai/chatgpt_model.py +++ b/model/openai/chatgpt_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf +from config import model_conf, settings_conf_val from common import const from common import log import openai @@ -22,7 +22,8 @@ class ChatGPTModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[CHATGPT] query={}".format(query)) from_user_id = context['from_user_id'] - if query == '#清除记忆': + clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除' diff --git a/model/openai/open_ai_model.py b/model/openai/open_ai_model.py index 36dc3c4..8895482 100644 --- a/model/openai/open_ai_model.py +++ b/model/openai/open_ai_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf +from config import model_conf, settings_conf_val from common import const from common import log import openai @@ -20,7 +20,8 @@ class OpenAIModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[OPEN_AI] query={}".format(query)) from_user_id = context['from_user_id'] - if query == '#清除记忆': + clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除' From 5061a205f535d72137a43d21405929b5d85b8159 Mon Sep 17 00:00:00 2001 From: Xun Cai Date: Fri, 24 Mar 2023 18:51:16 +1100 Subject: [PATCH 2/4] support an optional setting `max_conversation_history_per_session` for chatgpt model --- README.md | 1 + model/openai/chatgpt_model.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/README.md b/README.md index 51f14a4..9843ea0 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,7 @@ cp config-template.json config.json + `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 + default: ["#清除记忆"] ++ `max_history_per_session`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 ## 二、选择模型 diff --git a/model/openai/chatgpt_model.py b/model/openai/chatgpt_model.py index 9a8f3d6..7cf6cfc 100644 --- a/model/openai/chatgpt_model.py +++ b/model/openai/chatgpt_model.py @@ -196,6 +196,7 @@ class Session(object): @staticmethod def save_session(query, answer, user_id, used_tokens=0): max_tokens = model_conf(const.OPEN_AI).get('conversation_max_tokens') + max_history_per_session = model_conf(const.OPEN_AI).get('max_history_per_session', None) if not max_tokens or max_tokens > 4000: # default value max_tokens = 1000 @@ -210,6 +211,10 @@ class Session(object): session.pop(1) session.pop(1) + if max_history_per_session is not None: + while len(session) > max_history_per_session + 1: + session.pop(1) + @staticmethod def clear_session(user_id): user_session[user_id] = [] From ba4b8cc1540a5e92a88660ec5d1b7329d6c0671e Mon Sep 17 00:00:00 2001 From: Xun Cai Date: Sat, 25 Mar 2023 16:09:36 +1100 Subject: [PATCH 3/4] use "max_history_num" to limit conversation count --- README.md | 8 ++++++-- config-template.json | 2 +- config.py | 8 ++++---- model/openai/chatgpt_model.py | 11 ++++++----- model/openai/open_ai_model.py | 4 ++-- 5 files changed, 19 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 9843ea0..07379f3 100644 --- a/README.md +++ b/README.md @@ -74,11 +74,15 @@ cp config-template.json config.json 在使用时只需要更改 model 和 channel 配置块下的 type 字段,即可在任意模型和应用间完成切换,连接不同的通路。下面将依次介绍各个 模型 及 应用 的配置和运行过程。 -#### settings +#### model 配置 + ++ type: chatgpt + + `max_history_num`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 + +#### common 配置 + `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 + default: ["#清除记忆"] -+ `max_history_per_session`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 ## 二、选择模型 diff --git a/config-template.json b/config-template.json index 3cf788b..062079b 100644 --- a/config-template.json +++ b/config-template.json @@ -57,7 +57,7 @@ "port": "80" } }, - "settings": { + "common": { "clear_memory_commands": ["#清除记忆"] } } diff --git a/config.py b/config.py index 3598f98..3c9ab17 100644 --- a/config.py +++ b/config.py @@ -54,10 +54,10 @@ def channel_conf_val(channel_type, key, default=None): return val -def settings_conf(): - return config.get('settings') +def common_conf(): + return config.get('common') -def settings_conf_val(key, default=None): - return config.get('settings').get(key, default) +def common_conf_val(key, default=None): + return config.get('common').get(key, default) diff --git a/model/openai/chatgpt_model.py b/model/openai/chatgpt_model.py index 7cf6cfc..2679ee3 100644 --- a/model/openai/chatgpt_model.py +++ b/model/openai/chatgpt_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf, settings_conf_val +from config import model_conf, common_conf_val from common import const from common import log import openai @@ -22,7 +22,7 @@ class ChatGPTModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[CHATGPT] query={}".format(query)) from_user_id = context['from_user_id'] - clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除' @@ -196,7 +196,7 @@ class Session(object): @staticmethod def save_session(query, answer, user_id, used_tokens=0): max_tokens = model_conf(const.OPEN_AI).get('conversation_max_tokens') - max_history_per_session = model_conf(const.OPEN_AI).get('max_history_per_session', None) + max_history_num = model_conf(const.OPEN_AI).get('max_history_num', None) if not max_tokens or max_tokens > 4000: # default value max_tokens = 1000 @@ -211,8 +211,9 @@ class Session(object): session.pop(1) session.pop(1) - if max_history_per_session is not None: - while len(session) > max_history_per_session + 1: + if max_history_num is not None: + while len(session) > max_history_num * 2 + 1: + session.pop(1) session.pop(1) @staticmethod diff --git a/model/openai/open_ai_model.py b/model/openai/open_ai_model.py index 8895482..78615bf 100644 --- a/model/openai/open_ai_model.py +++ b/model/openai/open_ai_model.py @@ -1,7 +1,7 @@ # encoding:utf-8 from model.model import Model -from config import model_conf, settings_conf_val +from config import model_conf, common_conf_val from common import const from common import log import openai @@ -20,7 +20,7 @@ class OpenAIModel(Model): if not context or not context.get('type') or context.get('type') == 'TEXT': log.info("[OPEN_AI] query={}".format(query)) from_user_id = context['from_user_id'] - clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆']) + clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆']) if query in clear_memory_commands: Session.clear_session(from_user_id) return '记忆已清除' From 581eaf2befb4a977654026ccc1b41cf3ca549045 Mon Sep 17 00:00:00 2001 From: Xun Cai Date: Sat, 25 Mar 2023 16:21:15 +1100 Subject: [PATCH 4/4] update document --- README.md | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 07379f3..55b63e1 100644 --- a/README.md +++ b/README.md @@ -74,16 +74,6 @@ cp config-template.json config.json 在使用时只需要更改 model 和 channel 配置块下的 type 字段,即可在任意模型和应用间完成切换,连接不同的通路。下面将依次介绍各个 模型 及 应用 的配置和运行过程。 -#### model 配置 - -+ type: chatgpt - + `max_history_num`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 - -#### common 配置 - -+ `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 - + default: ["#清除记忆"] - ## 二、选择模型 ### 1. ChatGPT @@ -123,6 +113,7 @@ pip3 install --upgrade openai + `model`: 模型名称,目前支持填入 `gpt-3.5-turbo`, `gpt-4`, `gpt-4-32k` (其中gpt-4 api暂未开放) + `proxy`: 代理客户端的地址,详情参考 [#56](https://github.com/zhayujie/bot-on-anything/issues/56) + `character_desc`: 配置中保存着你对chatgpt说的一段话,他会记住这段话并作为他的设定,你可以为他定制任何人格 + + `max_history_num`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。 ### 2.GPT-3.0 @@ -494,3 +485,9 @@ pip3 install PyJWT flask 本地运行:`python3 app.py`运行后访问 `http://127.0.0.1:80` 服务器运行:部署后访问 `http://公网域名或IP:端口` + + +### 通用配置 + ++ `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。 + + default: ["#清除记忆"]