模块 nonebot_plugin_marshoai.handler
class MarshoHandler
func __init__(self, client: AsyncOpenAI, context: MarshoContext)
源代码 或 在GitHub上查看
python
def __init__(self, client: AsyncOpenAI, context: MarshoContext):
self.client = client
self.context = context
self.bot: Bot = current_bot.get()
self.event: Event = current_event.get()
self.matcher: Matcher = current_matcher.get()
self.message_id: str = UniMessage.get_message_id(self.event)
self.target = UniMessage.get_target(self.event)
async func process_user_input(self, user_input: UniMsg, model_name: str) -> Union[str, list]
说明: 处理用户输入为可输入 API 的格式,并添加昵称提示
源代码 或 在GitHub上查看
python
async def process_user_input(self, user_input: UniMsg, model_name: str) -> Union[str, list]:
is_support_image_model = model_name.lower() in SUPPORT_IMAGE_MODELS + config.marshoai_additional_image_models
usermsg = [] if is_support_image_model else ''
user_nickname = await get_nickname_by_user_id(self.event.get_user_id())
if user_nickname:
nickname_prompt = f'\n此消息的说话者为: {user_nickname}'
else:
nickname_prompt = ''
for i in user_input:
if i.type == 'text':
if is_support_image_model:
usermsg += [TextContentItem(text=i.data['text'] + nickname_prompt).as_dict()]
else:
usermsg += str(i.data['text'] + nickname_prompt)
elif i.type == 'image':
if is_support_image_model:
usermsg.append(ImageContentItem(image_url=ImageUrl(url=str(await get_image_b64(i.data['url'])))).as_dict())
logger.info(f"输入图片 {i.data['url']}")
elif config.marshoai_enable_support_image_tip:
await UniMessage('*此模型不支持图片处理或管理员未启用此模型的图片支持。图片将被忽略。').send()
return usermsg
async func handle_single_chat(self, user_message: Union[str, list], model_name: str, tools_list: list | None, tool_message: Optional[list] = None, stream: bool = False) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]
说明: 处理单条聊天
源代码 或 在GitHub上查看
python
async def handle_single_chat(self, user_message: Union[str, list], model_name: str, tools_list: list | None, tool_message: Optional[list]=None, stream: bool=False) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
context_msg = get_prompt(model_name) + self.context.build(self.target.id, self.target.private)
response = await make_chat_openai(client=self.client, msg=context_msg + [UserMessage(content=user_message).as_dict()] + (tool_message if tool_message else []), model_name=model_name, tools=tools_list if tools_list else None, stream=stream)
return response
async func handle_function_call(self, completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]], user_message: Union[str, list], model_name: str, tools_list: list)
源代码 或 在GitHub上查看
python
async def handle_function_call(self, completion: Union[ChatCompletion, AsyncStream[ChatCompletionChunk]], user_message: Union[str, list], model_name: str, tools_list: list):
tool_msg = []
if isinstance(completion, ChatCompletion):
choice = completion.choices[0]
else:
raise ValueError('Unexpected completion type')
tool_calls = choice.message.tool_calls
tool_msg.append(choice.message)
for tool_call in tool_calls:
try:
function_args = json.loads(tool_call.function.arguments)
except json.JSONDecodeError:
function_args = json.loads(tool_call.function.arguments.replace("'", '"'))
if 'placeholder' in function_args:
del function_args['placeholder']
logger.info(f"调用函数 {tool_call.function.name.replace('-', '.')}\n参数:" + '\n'.join([f'{k}={v}' for k, v in function_args.items()]))
await UniMessage(f"调用函数 {tool_call.function.name.replace('-', '.')}\n参数:" + '\n'.join([f'{k}={v}' for k, v in function_args.items()])).send()
if (caller := get_function_calls().get(tool_call.function.name)):
logger.debug(f'调用插件函数 {caller.full_name}')
func_return = await caller.with_ctx(SessionContext(bot=self.bot, event=self.event, matcher=self.matcher, state=None)).call(**function_args)
else:
logger.error(f"未找到函数 {tool_call.function.name.replace('-', '.')}")
func_return = f"未找到函数 {tool_call.function.name.replace('-', '.')}"
tool_msg.append(ToolMessage(tool_call_id=tool_call.id, content=func_return).as_dict())
return await self.handle_common_chat(user_message=user_message, model_name=model_name, tools_list=tools_list, tool_message=tool_msg)
async func handle_common_chat(self, user_message: Union[str, list], model_name: str, tools_list: list, stream: bool = False, tool_message: Optional[list] = None) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]
说明: 处理一般聊天
源代码 或 在GitHub上查看
python
async def handle_common_chat(self, user_message: Union[str, list], model_name: str, tools_list: list, stream: bool=False, tool_message: Optional[list]=None) -> Optional[Tuple[UserMessage, ChatCompletionMessage]]:
global target_list
if stream:
response = await self.handle_stream_request(user_message=user_message, model_name=model_name, tools_list=tools_list, tools_message=tool_message)
else:
response = await self.handle_single_chat(user_message=user_message, model_name=model_name, tools_list=tools_list, tool_message=tool_message)
if isinstance(response, ChatCompletion):
choice = response.choices[0]
else:
raise ValueError('Unexpected response type')
if choice.message.tool_calls is not None and config.marshoai_fix_toolcalls:
choice.finish_reason = 'tool_calls'
logger.info(f'完成原因:{choice.finish_reason}')
if choice.finish_reason == CompletionsFinishReason.STOPPED:
choice_msg_content, choice_msg_thinking, choice_msg_after = extract_content_and_think(choice.message)
if choice_msg_thinking and config.marshoai_send_thinking:
await UniMessage('思维链:\n' + choice_msg_thinking).send()
if [self.target.id, self.target.private] not in target_list:
target_list.append([self.target.id, self.target.private])
if config.marshoai_enable_richtext_parse:
await (await parse_richtext(str(choice_msg_content))).send(reply_to=True)
else:
await UniMessage(str(choice_msg_content)).send(reply_to=True)
return (UserMessage(content=user_message), choice_msg_after)
elif choice.finish_reason == CompletionsFinishReason.CONTENT_FILTERED:
await UniMessage('*已被内容过滤器过滤。请调整聊天内容后重试。').send(reply_to=True)
return None
elif choice.finish_reason == CompletionsFinishReason.TOOL_CALLS:
return await self.handle_function_call(response, user_message, model_name, tools_list)
else:
await UniMessage(f'意外的完成原因:{choice.finish_reason}').send()
return None
async func handle_stream_request(self, user_message: Union[str, list], model_name: str, tools_list: list, tools_message: Optional[list] = None) -> Union[ChatCompletion, None]
说明: 处理流式请求
源代码 或 在GitHub上查看
python
async def handle_stream_request(self, user_message: Union[str, list], model_name: str, tools_list: list, tools_message: Optional[list]=None) -> Union[ChatCompletion, None]:
response = await self.handle_single_chat(user_message=user_message, model_name=model_name, tools_list=None, tool_message=tools_message, stream=True)
if isinstance(response, AsyncStream):
reasoning_contents = ''
answer_contents = ''
last_chunk = None
is_first_token_appeared = False
is_answering = False
async for chunk in response:
last_chunk = chunk
if not is_first_token_appeared:
logger.debug(f'{chunk.id}: 第一个 token 已出现')
is_first_token_appeared = True
if not chunk.choices:
logger.info('Usage:', chunk.usage)
else:
delta = chunk.choices[0].delta
if hasattr(delta, 'reasoning_content') and delta.reasoning_content is not None:
reasoning_contents += delta.reasoning_content
else:
if not is_answering:
logger.debug(f'{chunk.id}: 思维链已输出完毕或无 reasoning_content 字段输出')
is_answering = True
if delta.content is not None:
answer_contents += delta.content
if last_chunk and last_chunk.choices:
message = ChatCompletionMessage(content=answer_contents, role='assistant', tool_calls=last_chunk.choices[0].delta.tool_calls)
choice = Choice(finish_reason=last_chunk.choices[0].finish_reason, index=last_chunk.choices[0].index, message=message)
return ChatCompletion(id=last_chunk.id, choices=[choice], created=last_chunk.created, model=last_chunk.model, system_fingerprint=last_chunk.system_fingerprint, object='chat.completion', usage=last_chunk.usage)
return None
var response
说明: type: ignore
默认值:
await self.handle_single_chat(user_message=user_message, model_name=model_name, tools_list=tools_list, tool_message=tool_message)