85 lines
2.7 KiB
TypeScript
85 lines
2.7 KiB
TypeScript
import { Context } from "../../types"
|
||
import llm from "../../utils/llm"
|
||
import { cleanLLMRes } from "../../utils/llm/base"
|
||
import getChatHistory from "./chatHistory"
|
||
|
||
const agent = async (ctx: Context) => {
|
||
const {
|
||
logger,
|
||
requestId,
|
||
larkCard,
|
||
larkService: { message },
|
||
appInfo,
|
||
larkBody: { messageId, msgText, chatId, mentions, rawMsgText, openId },
|
||
} = ctx
|
||
const cardGender = larkCard.child("groupAgent")
|
||
const loadingMessageId = await message.updateOrReply(
|
||
cardGender.genPendingCard("正在分析时间区间,请稍等...")
|
||
)
|
||
// 使用大模型解析用户输入
|
||
const { startTime, endTime } = await llm.timeParser(msgText, requestId)
|
||
logger.info(`Parsed time: startTime: ${startTime}, endTime: ${endTime}`)
|
||
// 更新卡片
|
||
await message.updateOrReply(
|
||
cardGender.genPendingCard("正在爬楼中,请稍等...")
|
||
)
|
||
|
||
// 获取聊天记录
|
||
const { messages: chatHistory, mentions: historyMentions } =
|
||
await getChatHistory(ctx, {
|
||
chatId,
|
||
startTime,
|
||
endTime,
|
||
mentions,
|
||
senderOpenId: openId,
|
||
excludedMessageIds: [loadingMessageId, messageId],
|
||
excludeMentions: [appInfo.appName],
|
||
})
|
||
// 如果没有聊天记录,返回错误信息
|
||
if (chatHistory.length === 0) {
|
||
logger.info("No chat history found")
|
||
await message.updateOrReply(cardGender.genErrorCard("未找到聊天记录"))
|
||
return
|
||
}
|
||
logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`)
|
||
|
||
// 根据Mention,拼装原始消息
|
||
let userInput = rawMsgText.trim()
|
||
for (const mention of mentions ?? []) {
|
||
if (mention.name !== appInfo.appName) {
|
||
userInput = userInput.replace(mention.key, `@${mention.name}`)
|
||
} else {
|
||
userInput = userInput.replace(mention.key, "")
|
||
}
|
||
}
|
||
|
||
// 调用大模型
|
||
try {
|
||
await message.updateOrReply(
|
||
cardGender.genPendingCard("LLM输出中,请稍等...")
|
||
)
|
||
const llmRes = (await llm.invoke(
|
||
"groupAgent",
|
||
{
|
||
userName: historyMentions.get(openId || "") ?? "用户",
|
||
userInput,
|
||
chatHistory: JSON.stringify(chatHistory),
|
||
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
|
||
},
|
||
requestId
|
||
)) as string
|
||
logger.info(
|
||
`LLM invoked successfully, see detail: http://langfuse.ai.srv/project/cm1j2tkj9001gukrgdvc1swuw/sessions/${requestId}`
|
||
)
|
||
const cleanedLlmRes = cleanLLMRes(llmRes)
|
||
await message.updateOrReply(cardGender.genSuccessCard(cleanedLlmRes))
|
||
} catch (error: any) {
|
||
logger.error(`Failed to invoke llm: ${error.message}`)
|
||
await message.updateOrReply(
|
||
cardGender.genErrorCard("LLM调用失败: " + error.message)
|
||
)
|
||
}
|
||
}
|
||
|
||
export default agent
|