egg_server/utils/llm/base.ts

41 lines
896 B
TypeScript

import { ChatOpenAI } from "@langchain/openai"
import CallbackHandler, { Langfuse } from "langfuse-langchain"
import { APP_CONFIG } from "../../constant/config"
/**
* 获取模型
* @param temperature 温度
* @returns
*/
export const getModel = (temperature = 0) => {
return new ChatOpenAI(
{
temperature,
model: APP_CONFIG.TEXT_LLM_MODEL,
apiKey: APP_CONFIG.TEXT_LLM_API_KEY,
},
{
baseURL: APP_CONFIG.TEXT_LLM_BASE_URL,
}
)
}
/**
* 获取Langfuse
* @returns
*/
export const getLangfuse = async (name: string, requestId: string) => {
const langfuseParams = {
publicKey: APP_CONFIG.LANGFUSE_PK,
secretKey: APP_CONFIG.LANGFUSE_SK,
baseUrl: APP_CONFIG.LANGFUSE_BASE_URL,
sessionId: requestId,
name,
}
return {
langfuseHandler: new CallbackHandler(langfuseParams),
langfuse: new Langfuse(langfuseParams),
}
}