egg_server/utils/llm/index.ts
2024-11-25 11:08:54 +00:00

106 lines
2.8 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import { PromptTemplate } from "@langchain/core/prompts"
import { z } from "zod"
import { getLangfuse, getModel } from "./base"
const groupAgentConfig = z.object({
chatId: z.string().describe("群聊ID"),
chatName: z.string().describe("群聊名称"),
functionId: z.string().describe("功能ID"),
functionName: z.string().describe("功能名称"),
startTime: z.string().describe("开始时间,格式为 YYYY-MM-DD HH:mm:ss"),
endTime: z.string().describe("结束时间,格式为 YYYY-MM-DD HH:mm:ss"),
})
/**
* 解析GroupAgent用户输入
* @param userInput 用户输入
* @param groupInfo 群聊信息
* @param functionInfo 功能信息
* @param requestId 请求ID
* @returns
*/
const parseGroupAgentQuery = async (
userInput: string,
groupInfo: string,
functionInfo: string,
requestId: string
) => {
const { langfuseHandler } = await getLangfuse(
"parseGroupAgentQuery",
requestId
)
const model = await getModel()
const structuredLlm = model.withStructuredOutput(groupAgentConfig, {
name: "groupAgent",
})
return await structuredLlm.invoke(
`
当前时间为:${new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" })}
所有可用群组信息:${groupInfo}
所有支持功能信息:${functionInfo}
你是一个专业的语义解析工程师给定以下用户输入帮我解析出群聊ID、群聊名称、功能ID、功能名称、开始时间和结束时间。
默认功能为总结消息,时间如果用户没有输入则留空
返回格式定义为:
\`\`\`
{
"chatId": "oc_ef98c2a9229657f99d4ef573a30fe91c",
"chatName": "MIAI-FE 人工智能部-前端组",
"functionId": "summary-qwen-72b-instruct-int4",
"functionName": "总结消息",
"startTime": "2022-01-01 00:00:00",
"endTime": "2022-01-01 23:59:59"
}
\`\`\`
如果不包含对应内容,请返回空值。
用户输入:
\`\`\`
${userInput.replaceAll("`", " ")}
\`\`\`
`,
{
callbacks: [langfuseHandler],
}
)
}
/**
* 调用LLM模型
* @param promptName 提示Key
* @param variables 变量
* @param requestId 请求ID
* @param temperature 温度
* @returns
*/
const invoke = async (
promptName: string,
variables: Record<string, any>,
requestId: string,
temperature = 0
) => {
const { langfuse, langfuseHandler } = await getLangfuse("invoke", requestId)
const prompt = await langfuse.getPrompt(promptName)
const langchainTextPrompt = PromptTemplate.fromTemplate(
prompt.getLangchainPrompt()
).withConfig({
metadata: { langfusePrompt: prompt },
})
const chain = langchainTextPrompt.pipe(await getModel(temperature))
const { content } = await chain.invoke(variables, {
callbacks: [langfuseHandler],
})
return content
}
const llm = {
parseGroupAgentQuery,
invoke,
}
export default llm