85 lines
2.7 KiB
TypeScript
85 lines
2.7 KiB
TypeScript
import { Context } from "../../types"
|
||
import llm from "../../utils/llm"
|
||
import getChatHistory from "./chatHistory"
|
||
|
||
const agent = async (ctx: Context) => {
|
||
const {
|
||
logger,
|
||
requestId,
|
||
larkCard,
|
||
larkService,
|
||
appInfo,
|
||
larkBody: { messageId, msgText, chatId, mentions, rawMsgText, openId },
|
||
} = ctx
|
||
const cardGender = larkCard.child("groupAgent")
|
||
// 回复一个loading的卡片
|
||
const {
|
||
data: { message_id },
|
||
} = await larkService.message.replyCard(
|
||
messageId,
|
||
cardGender.genPendingCard("分析中,请稍等...")
|
||
)
|
||
const updateCard = (content: any) =>
|
||
larkService.message.update(message_id, content)
|
||
|
||
// 使用大模型解析用户输入
|
||
const { startTime, endTime } = await llm.timeParser(msgText, requestId)
|
||
logger.info(`Parsed time: startTime: ${startTime}, endTime: ${endTime}`)
|
||
// 更新卡片
|
||
await updateCard(cardGender.genPendingCard("正在爬楼中,请稍等..."))
|
||
// 获取聊天记录
|
||
const { messages: chatHistory, mentions: historyMentions } =
|
||
await getChatHistory(ctx, {
|
||
chatId,
|
||
startTime,
|
||
endTime,
|
||
mentions,
|
||
senderOpenId: openId,
|
||
excludedMessageIds: [message_id, messageId],
|
||
excludeMentions: [appInfo.appName],
|
||
})
|
||
// 如果没有聊天记录,返回错误信息
|
||
if (chatHistory.length === 0) {
|
||
logger.info("No chat history found")
|
||
return await updateCard(cardGender.genErrorCard("未找到聊天记录"))
|
||
}
|
||
logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`)
|
||
|
||
// 根据Mention,拼装原始消息
|
||
let userInput = rawMsgText.trim()
|
||
for (const mention of mentions ?? []) {
|
||
if (mention.name !== appInfo.appName) {
|
||
userInput = userInput.replace(mention.key, `@${mention.name}`)
|
||
} else {
|
||
userInput = userInput.replace(mention.key, "")
|
||
}
|
||
}
|
||
|
||
// 调用大模型
|
||
try {
|
||
await updateCard(cardGender.genPendingCard("LLM输出中,请稍等..."))
|
||
const llmRes = (await llm.invoke(
|
||
"groupAgent",
|
||
{
|
||
userName: historyMentions.get(openId || "") ?? "用户",
|
||
userInput,
|
||
chatHistory: JSON.stringify(chatHistory),
|
||
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
|
||
},
|
||
requestId
|
||
)) as string
|
||
logger.info(
|
||
`LLM invoked successfully, see detail: http://langfuse.ai.srv/project/cm1j2tkj9001gukrgdvc1swuw/sessions/${requestId}`
|
||
)
|
||
const cleanedLlmRes = llmRes
|
||
.replace(/```(\w+)?\n([\s\S]*?)```/g, "$2")
|
||
.trim()
|
||
await updateCard(cardGender.genSuccessCard(cleanedLlmRes))
|
||
} catch (error: any) {
|
||
logger.error(`Failed to invoke llm: ${error.message}`)
|
||
await updateCard(cardGender.genErrorCard("LLM调用失败: " + error.message))
|
||
}
|
||
}
|
||
|
||
export default agent
|