feat(group-agent): 大模型监控增加参数
This commit is contained in:
parent
4e08d68af8
commit
ae759108cf
@ -93,7 +93,7 @@ const sendGroupReport = async (
|
||||
mentions?: LarkEvent.Mention[]
|
||||
}
|
||||
) => {
|
||||
const { larkService, logger, larkCard } = ctx
|
||||
const { larkService, logger, larkCard, requestId } = ctx
|
||||
const cardGender = larkCard.child("groupAgent")
|
||||
const updateCard = (content: any) =>
|
||||
larkService.message.update(messageId, content)
|
||||
@ -117,10 +117,14 @@ const sendGroupReport = async (
|
||||
logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`)
|
||||
|
||||
try {
|
||||
const llmRes = await llm.invoke(functionId, {
|
||||
chatHistory: JSON.stringify(chatHistory),
|
||||
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
|
||||
})
|
||||
const llmRes = await llm.invoke(
|
||||
functionId,
|
||||
{
|
||||
chatHistory: JSON.stringify(chatHistory),
|
||||
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
|
||||
},
|
||||
requestId
|
||||
)
|
||||
// 记录大模型返回结果后的时间戳
|
||||
const processEnd = Date.now()
|
||||
// 计算时间差并存储在processingTime变量中,以秒为单位
|
||||
@ -173,6 +177,7 @@ const parseGroupAgentQuery = async (
|
||||
larkService,
|
||||
larkCard,
|
||||
logger,
|
||||
requestId,
|
||||
} = ctx
|
||||
const cardGender = larkCard.child("groupAgent")
|
||||
|
||||
@ -199,7 +204,7 @@ const parseGroupAgentQuery = async (
|
||||
|
||||
// 使用大模型解析用户输入
|
||||
const { chatId, chatName, functionName, functionId, startTime, endTime } =
|
||||
await llm.parseGroupAgentQuery(msgText, groupInfo, functionInfo)
|
||||
await llm.parseGroupAgentQuery(msgText, groupInfo, functionInfo, requestId)
|
||||
logger.info(
|
||||
`Parsed group agent query: chatId: ${chatId}, chatName: ${chatName}, functionName: ${functionName}, functionId: ${functionId}, startTime: ${startTime}, endTime: ${endTime}`
|
||||
)
|
||||
|
@ -20,4 +20,6 @@ const functionInfo = JSON.stringify([
|
||||
|
||||
const userInput = "你好"
|
||||
|
||||
llm.parseGroupAgentQuery(userInput, groupInfo, functionInfo).then(console.log)
|
||||
llm
|
||||
.parseGroupAgentQuery(userInput, groupInfo, functionInfo, "localTest")
|
||||
.then(console.log)
|
||||
|
17
utils/llm.ts
17
utils/llm.ts
@ -9,11 +9,13 @@ import db from "../db"
|
||||
* 获取Langfuse
|
||||
* @returns
|
||||
*/
|
||||
const getLangfuse = async () => {
|
||||
const getLangfuse = async (name: string, requestId: string) => {
|
||||
const langfuseParams = {
|
||||
publicKey: await db.appConfig.getLangfusePk(),
|
||||
secretKey: await db.appConfig.getLangfuseSk(),
|
||||
baseUrl: "http://langfuse.c5-cloudml.xiaomi.srv",
|
||||
sessionId: requestId,
|
||||
name,
|
||||
}
|
||||
return {
|
||||
langfuseHandler: new CallbackHandler(langfuseParams),
|
||||
@ -74,14 +76,19 @@ const groupAgentConfig = z.object({
|
||||
* @param userInput 用户输入
|
||||
* @param groupInfo 群聊信息
|
||||
* @param functionInfo 功能信息
|
||||
* @param requestId 请求ID
|
||||
* @returns
|
||||
*/
|
||||
const parseGroupAgentQuery = async (
|
||||
userInput: string,
|
||||
groupInfo: string,
|
||||
functionInfo: string
|
||||
functionInfo: string,
|
||||
requestId: string
|
||||
) => {
|
||||
const { langfuseHandler } = await getLangfuse()
|
||||
const { langfuseHandler } = await getLangfuse(
|
||||
"parseGroupAgentQuery",
|
||||
requestId
|
||||
)
|
||||
const model = await getModel("qwen-72b-instruct-int4/v1")
|
||||
const structuredLlm = model.withStructuredOutput(groupAgentConfig, {
|
||||
name: "groupAgent",
|
||||
@ -121,15 +128,17 @@ const parseGroupAgentQuery = async (
|
||||
* 调用LLM模型
|
||||
* @param promptName 提示Key
|
||||
* @param variables 变量
|
||||
* @param requestId 请求ID
|
||||
* @param temperature 温度
|
||||
* @returns
|
||||
*/
|
||||
const invoke = async (
|
||||
promptName: string,
|
||||
variables: Record<string, any>,
|
||||
requestId: string,
|
||||
temperature = 0
|
||||
) => {
|
||||
const { langfuse, langfuseHandler } = await getLangfuse()
|
||||
const { langfuse, langfuseHandler } = await getLangfuse("invoke", requestId)
|
||||
const prompt = await langfuse.getPrompt(promptName)
|
||||
const config = prompt.config as { modelName: keyof typeof modelMap }
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user