Merge pull request #211 from RegimenArsenic/master

[bug fixed]caused by EdgeGPT 0.1.10 update
This commit is contained in:
zhayujie
2023-03-27 01:11:43 +08:00
committed by GitHub
2 changed files with 21 additions and 8 deletions

View File

@@ -44,13 +44,16 @@ class SydneyBot(Chatbot):
return ordered_messages
def pop_last_conversation(self):
self.conversations_cache[self.conversation_key]["messages"].pop()
async def ask(
self,
prompt: str,
conversation_style: EdgeGPT.CONVERSATION_STYLE_TYPE = None,
message_id: str = None,
) -> dict:
#开启新对话
# 开启新对话
self.chat_hub = SydneyHub(Conversation(
self.cookiePath, self.cookies, self.proxy))
self.parent_message_id = message_id if message_id != None else uuid.uuid4()
@@ -127,9 +130,10 @@ class SydneyHub(ChatHub):
async def ask_stream(
self,
prompt: str,
wss_link: str = "wss://sydney.bing.com/sydney/ChatHub",
conversation_style: EdgeGPT.CONVERSATION_STYLE_TYPE = None,
) -> Generator[str, None, None]:
async for item in super().ask_stream(prompt, conversation_style):
async for item in super().ask_stream(prompt=prompt, conversation_style=conversation_style, wss_link=wss_link):
yield item
@@ -142,8 +146,8 @@ class SydneyHubRequest(ChatHubRequest):
conversation_id: str,
invocation_id: int = 0,
) -> None:
super().__init__(conversation_signature, client_id,
conversation_id, invocation_id)
super().__init__(conversation_signature=conversation_signature, client_id=client_id,
conversation_id=conversation_id, invocation_id=invocation_id)
self.previous_messages = ""
def update(
@@ -153,7 +157,7 @@ class SydneyHubRequest(ChatHubRequest):
options: list | None = None,
) -> None:
self.invocation_id = 0
super().update(prompt, conversation_style, options)
super().update(prompt=prompt, conversation_style=conversation_style, options=options)
self.struct["arguments"][0]["message"]["messageType"] = "SearchQuery"
self.struct["arguments"][0]["previousMessages"] = [
{"text": "N/A\n\n"+self.previous_messages, "author": 'bot', }]

View File

@@ -1,7 +1,7 @@
# encoding:utf-8
import asyncio
from model.model import Model
from config import model_conf_val
from config import model_conf_val,common_conf_val
from common import log
from EdgeGPT import Chatbot, ConversationStyle
from ImageGen import ImageGen
@@ -29,6 +29,10 @@ class BingModel(Model):
def reply(self, query: str, context=None) -> tuple[str, dict]:
if not context or not context.get('type') or context.get('type') == 'TEXT':
clear_memory_commands = common_conf_val('clear_memory_commands', ['#清除记忆'])
if query in clear_memory_commands:
user_session[context['from_user_id']]=None
return '记忆已清除'
bot = user_session.get(context['from_user_id'], None)
if (bot == None):
bot = self.bot
@@ -46,8 +50,13 @@ class BingModel(Model):
task = bot.ask(query, conversation_style=self.style,message_id=bot.user_message_id)
else:
task = bot.ask(query, conversation_style=self.style)
answer = asyncio.run(task)
try:
answer = asyncio.run(task)
except Exception as e:
bot.pop_last_conversation()
log.exception(answer)
return f"AI生成内容被微软内容过滤器拦截,已删除最后一次提问的记忆,请尝试使用其他文字描述问题,若AI依然无法正常回复,请使用{clear_memory_commands[0]}命令清除全部记忆"
# 最新一条回复
try:
reply = answer["item"]["messages"][-1]