65 lines
1.9 KiB
TypeScript
65 lines
1.9 KiB
TypeScript
import { Context } from "../../types"
|
|
import llm from "../../utils/llm"
|
|
import getChatHistory from "./chatHistory"
|
|
|
|
const agent = async (ctx: Context.Data) => {
|
|
const {
|
|
logger,
|
|
requestId,
|
|
larkCard,
|
|
larkService,
|
|
larkBody: { messageId, msgText, chatId, mentions },
|
|
} = ctx
|
|
const cardGender = larkCard.child("groupAgent")
|
|
// 回复一个loading的卡片
|
|
const {
|
|
data: { message_id },
|
|
} = await larkService.message.replyCard(
|
|
messageId,
|
|
cardGender.genPendingCard("分析中,请稍等...")
|
|
)
|
|
const updateCard = (content: any) =>
|
|
larkService.message.update(message_id, content)
|
|
|
|
// 使用大模型解析用户输入
|
|
const { startTime, endTime } = await llm.timeParser(msgText, requestId)
|
|
logger.info(`Parsed time: startTime: ${startTime}, endTime: ${endTime}`)
|
|
// 更新卡片
|
|
updateCard(cardGender.genPendingCard("正在爬楼中,请稍等..."))
|
|
// 获取聊天记录
|
|
const chatHistory = await getChatHistory(ctx, {
|
|
chatId,
|
|
startTime,
|
|
endTime,
|
|
mentions,
|
|
})
|
|
// 如果没有聊天记录,返回错误信息
|
|
if (chatHistory.length === 0) {
|
|
logger.info("No chat history found")
|
|
return await updateCard(cardGender.genErrorCard("未找到聊天记录"))
|
|
}
|
|
logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`)
|
|
|
|
// 调用大模型
|
|
try {
|
|
const llmRes = await llm.invoke(
|
|
"groupAgent",
|
|
{
|
|
userInput: msgText,
|
|
chatHistory: JSON.stringify(chatHistory),
|
|
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
|
|
},
|
|
requestId
|
|
)
|
|
logger.info(
|
|
`LLM invoked successfully, see detail: http://langfuse.ai.srv/project/cm1j2tkj9001gukrgdvc1swuw/sessions/${requestId}`
|
|
)
|
|
await updateCard(cardGender.genSuccessCard(llmRes))
|
|
} catch (error: any) {
|
|
logger.error(`Failed to invoke llm: ${error.message}`)
|
|
await updateCard(cardGender.genErrorCard("LLM调用失败: " + error.message))
|
|
}
|
|
}
|
|
|
|
export default agent
|