use "max_history_num" to limit conversation count

This commit is contained in:
Xun Cai
2023-03-25 16:09:36 +11:00
parent 5061a205f5
commit ba4b8cc154
5 changed files with 19 additions and 14 deletions

View File

@@ -74,11 +74,15 @@ cp config-template.json config.json
在使用时只需要更改 model 和 channel 配置块下的 type 字段,即可在任意模型和应用间完成切换,连接不同的通路。下面将依次介绍各个 模型 及 应用 的配置和运行过程。
#### settings
#### model 配置
+ type: chatgpt
+ `max_history_num`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。
#### common 配置
+ `clear_memory_commands`: 对话内指令,主动清空前文记忆,字符串数组可自定义指令别名。
+ default: ["#清除记忆"]
+ `max_history_per_session`[optional]: 对话最大记忆长度,超过该长度则清理前面的记忆。
## 二、选择模型

View File

@@ -57,7 +57,7 @@
"port": "80"
}
},
"settings": {
"common": {
"clear_memory_commands": ["#清除记忆"]
}
}

View File

@@ -54,10 +54,10 @@ def channel_conf_val(channel_type, key, default=None):
return val
def settings_conf():
return config.get('settings')
def common_conf():
return config.get('common')
def settings_conf_val(key, default=None):
return config.get('settings').get(key, default)
def common_conf_val(key, default=None):
return config.get('common').get(key, default)

View File

@@ -1,7 +1,7 @@
# encoding:utf-8
from model.model import Model
from config import model_conf, settings_conf_val
from config import model_conf, common_conf_val
from common import const
from common import log
import openai
@@ -22,7 +22,7 @@ class ChatGPTModel(Model):
if not context or not context.get('type') or context.get('type') == 'TEXT':
log.info("[CHATGPT] query={}".format(query))
from_user_id = context['from_user_id']
clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆'])
clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
Session.clear_session(from_user_id)
return '记忆已清除'
@@ -196,7 +196,7 @@ class Session(object):
@staticmethod
def save_session(query, answer, user_id, used_tokens=0):
max_tokens = model_conf(const.OPEN_AI).get('conversation_max_tokens')
max_history_per_session = model_conf(const.OPEN_AI).get('max_history_per_session', None)
max_history_num = model_conf(const.OPEN_AI).get('max_history_num', None)
if not max_tokens or max_tokens > 4000:
# default value
max_tokens = 1000
@@ -211,8 +211,9 @@ class Session(object):
session.pop(1)
session.pop(1)
if max_history_per_session is not None:
while len(session) > max_history_per_session + 1:
if max_history_num is not None:
while len(session) > max_history_num * 2 + 1:
session.pop(1)
session.pop(1)
@staticmethod

View File

@@ -1,7 +1,7 @@
# encoding:utf-8
from model.model import Model
from config import model_conf, settings_conf_val
from config import model_conf, common_conf_val
from common import const
from common import log
import openai
@@ -20,7 +20,7 @@ class OpenAIModel(Model):
if not context or not context.get('type') or context.get('type') == 'TEXT':
log.info("[OPEN_AI] query={}".format(query))
from_user_id = context['from_user_id']
clear_memory_commands = settings_conf_val('clear_memory_commands', ['#清除记忆'])
clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
Session.clear_session(from_user_id)
return '记忆已清除'