From ae759108cfd7341dabd2d1a155eba574e6858271 Mon Sep 17 00:00:00 2001 From: zhaoyingbo Date: Thu, 17 Oct 2024 09:40:27 +0000 Subject: [PATCH] =?UTF-8?q?feat(group-agent):=20=E5=A4=A7=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E7=9B=91=E6=8E=A7=E5=A2=9E=E5=8A=A0=E5=8F=82=E6=95=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- routes/bot/groupAgent/index.ts | 17 +++++++++++------ test/parseGroupAgentQuery.ts | 4 +++- utils/llm.ts | 17 +++++++++++++---- 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/routes/bot/groupAgent/index.ts b/routes/bot/groupAgent/index.ts index 508d66f..7f8f637 100644 --- a/routes/bot/groupAgent/index.ts +++ b/routes/bot/groupAgent/index.ts @@ -93,7 +93,7 @@ const sendGroupReport = async ( mentions?: LarkEvent.Mention[] } ) => { - const { larkService, logger, larkCard } = ctx + const { larkService, logger, larkCard, requestId } = ctx const cardGender = larkCard.child("groupAgent") const updateCard = (content: any) => larkService.message.update(messageId, content) @@ -117,10 +117,14 @@ const sendGroupReport = async ( logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`) try { - const llmRes = await llm.invoke(functionId, { - chatHistory: JSON.stringify(chatHistory), - time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }), - }) + const llmRes = await llm.invoke( + functionId, + { + chatHistory: JSON.stringify(chatHistory), + time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }), + }, + requestId + ) // 记录大模型返回结果后的时间戳 const processEnd = Date.now() // 计算时间差并存储在processingTime变量中,以秒为单位 @@ -173,6 +177,7 @@ const parseGroupAgentQuery = async ( larkService, larkCard, logger, + requestId, } = ctx const cardGender = larkCard.child("groupAgent") @@ -199,7 +204,7 @@ const parseGroupAgentQuery = async ( // 使用大模型解析用户输入 const { chatId, chatName, functionName, functionId, startTime, endTime } = - await llm.parseGroupAgentQuery(msgText, groupInfo, functionInfo) + await llm.parseGroupAgentQuery(msgText, groupInfo, functionInfo, requestId) logger.info( `Parsed group agent query: chatId: ${chatId}, chatName: ${chatName}, functionName: ${functionName}, functionId: ${functionId}, startTime: ${startTime}, endTime: ${endTime}` ) diff --git a/test/parseGroupAgentQuery.ts b/test/parseGroupAgentQuery.ts index ec91f08..6f4d668 100644 --- a/test/parseGroupAgentQuery.ts +++ b/test/parseGroupAgentQuery.ts @@ -20,4 +20,6 @@ const functionInfo = JSON.stringify([ const userInput = "你好" -llm.parseGroupAgentQuery(userInput, groupInfo, functionInfo).then(console.log) +llm + .parseGroupAgentQuery(userInput, groupInfo, functionInfo, "localTest") + .then(console.log) diff --git a/utils/llm.ts b/utils/llm.ts index 70d1364..be9a62d 100644 --- a/utils/llm.ts +++ b/utils/llm.ts @@ -9,11 +9,13 @@ import db from "../db" * 获取Langfuse * @returns */ -const getLangfuse = async () => { +const getLangfuse = async (name: string, requestId: string) => { const langfuseParams = { publicKey: await db.appConfig.getLangfusePk(), secretKey: await db.appConfig.getLangfuseSk(), baseUrl: "http://langfuse.c5-cloudml.xiaomi.srv", + sessionId: requestId, + name, } return { langfuseHandler: new CallbackHandler(langfuseParams), @@ -74,14 +76,19 @@ const groupAgentConfig = z.object({ * @param userInput 用户输入 * @param groupInfo 群聊信息 * @param functionInfo 功能信息 + * @param requestId 请求ID * @returns */ const parseGroupAgentQuery = async ( userInput: string, groupInfo: string, - functionInfo: string + functionInfo: string, + requestId: string ) => { - const { langfuseHandler } = await getLangfuse() + const { langfuseHandler } = await getLangfuse( + "parseGroupAgentQuery", + requestId + ) const model = await getModel("qwen-72b-instruct-int4/v1") const structuredLlm = model.withStructuredOutput(groupAgentConfig, { name: "groupAgent", @@ -121,15 +128,17 @@ const parseGroupAgentQuery = async ( * 调用LLM模型 * @param promptName 提示Key * @param variables 变量 + * @param requestId 请求ID * @param temperature 温度 * @returns */ const invoke = async ( promptName: string, variables: Record, + requestId: string, temperature = 0 ) => { - const { langfuse, langfuseHandler } = await getLangfuse() + const { langfuse, langfuseHandler } = await getLangfuse("invoke", requestId) const prompt = await langfuse.getPrompt(promptName) const config = prompt.config as { modelName: keyof typeof modelMap }