feat: 修改对话模型
All checks were successful
Egg Server MIflow / build-image (push) Successful in 48s

This commit is contained in:
zhaoyingbo 2024-09-27 01:33:41 +00:00
parent 9baaec4412
commit b225af1d3e
9 changed files with 61 additions and 46 deletions

4
.env.dev Normal file
View File

@ -0,0 +1,4 @@
NODE_ENV=dev
http_proxy=http://proxy.pt.xiaomi.com:80
https_proxy=http://proxy.pt.xiaomi.com:80
no_proxy=localhost,.xiaomiwh.cn,.mioffice.cn,.xiaomi.com,.imoaix.cn,.deepseek.com,.gpt.ge,.xiaomi.srv

4
.env.prod Normal file
View File

@ -0,0 +1,4 @@
NODE_ENV=production
http_proxy=http://proxy.pt.xiaomi.com:80
https_proxy=http://proxy.pt.xiaomi.com:80
no_proxy=localhost,.xiaomiwh.cn,.mioffice.cn,.xiaomi.com,.imoaix.cn,.deepseek.com,.gpt.ge,.xiaomi.srv

View File

@ -8,6 +8,7 @@
"deepseek",
"devcontainer",
"devcontainers",
"dotenvx",
"eamodio",
"EOPK",
"esbenp",
@ -21,6 +22,7 @@
"openai",
"openchat",
"PWTHWP",
"qwen",
"tseslint",
"userid",
"wlpbbgiky",

BIN
bun.lockb

Binary file not shown.

View File

@ -3,8 +3,8 @@
"module": "index.ts",
"type": "module",
"scripts": {
"start": "NODE_ENV=production bun run index.ts",
"dev": "NODE_ENV=dev bun run index.ts --watch",
"start": "dotenvx run -f .env.prod -- bun run index.ts",
"dev": "dotenvx run -f .env.dev -- bun run index.ts --watch",
"lint": "eslint --fix .",
"prepare": "husky",
"prettier": "prettier --write ."
@ -34,6 +34,7 @@
"typescript": "^5.5.4"
},
"dependencies": {
"@dotenvx/dotenvx": "^1.14.1",
"@egg/hooks": "^1.2.0",
"@egg/lark-msg-tool": "^1.4.0",
"@egg/logger": "^1.4.3",

View File

@ -176,10 +176,10 @@ const manageGroupMsg = async (ctx: Context.Data) => {
}
logger.debug(`Chat history: ${JSON.stringify(chatHistory)}`)
try {
const llmRes = await llm.invokeLLM4ChatHistory(
functionId,
JSON.stringify(chatHistory)
)
const llmRes = await llm.invoke(functionId, {
chatHistory: JSON.stringify(chatHistory),
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
})
// 记录大模型返回结果后的时间戳
const endTime = Date.now()
// 计算时间差并存储在processingTime变量中以秒为单位

View File

@ -1,4 +1,5 @@
import { getIsActionMsg, getIsEventMsg } from "@egg/lark-msg-tool"
import { sleep } from "bun"
import { Context } from "../../types"
import { manageActionMsg } from "./actionMsg"
@ -28,6 +29,8 @@ export const manageBotReq = async (ctx: Context.Data): Promise<Response> => {
// 处理Action消息
if (getIsActionMsg(body)) manageActionMsg(ctx)
// 模拟处理时间
await sleep(1000)
// 返回成功响应
return ctx.genResp.ok()
}

View File

@ -1,10 +0,0 @@
import llm, { LlmPromptType } from "../utils/llm"
const res = await llm.invokeLLM4ChatHistory(
LlmPromptType.summary,
`
[{"body":{"content":"{"text":"1"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727172145642","deleted":false,"message_id":"om_efafd5da2a6307056779a81ca7ab7007","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727172145642","updated":false},{"body":{"content":"{"title":"🍳使 ","elements":[[{"tag":"text","text":"ChatIdoc_8c789ce8f4ecc6695bb63ca6ec4c61ea"}],[{"tag":"text","text":""},{"tag":"text","text":"n"},{"tag":"text","text":"/id"},{"tag":"text","text":" chat_idn"},{"tag":"text","text":"/ci"},{"tag":"text","text":" 线n"},{"tag":"text","text":"share "},{"tag":"text","text":" 使AI总结您的分享并收录到飞书表格中n示例share https://lacus.site 嘤博的古早博客欢迎来看hhh"}],[{"tag":"hr"}],[{"tag":"note","elements":[{"tag":"img","image_key":"img_v2_19db22c1-0030-434b-9b54-2a53b99c5f3l"},{"tag":"text","text":"有问题请及时联系zhaoyingbo"}]}]]}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727172146339","deleted":false,"message_id":"om_c9d14a9f1280f923143f39c42c504c41","msg_type":"interactive","sender":{"id":"cli_a1eff35b43b89063","id_type":"app_id","sender_type":"app","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727172146339","updated":false},{"body":{"content":"{"text":"1"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173483485","deleted":false,"message_id":"om_1371a30b8e9a221624a8972154341f2b","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173483485","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173655427","deleted":false,"message_id":"om_1f90551b63f5b4b3c50125fbf3b1a972","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173655427","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173663085","deleted":false,"message_id":"om_5df8767493047efbc4f4b480befc6a3f","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173663085","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173673613","deleted":false,"message_id":"om_8ec0d495ea499ac262c8ce43fc382600","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173673613","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173681855","deleted":false,"message_id":"om_5f1363e4e0bc2fa2f055e3e4234010a0","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173681855","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173691490","deleted":false,"message_id":"om_c219de6f1c51793be785f5da1623c560","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173691490","updated":false},{"body":{"content":"{"text":"/group"}"},"chat_id":"oc_8c789ce8f4ecc6695bb63ca6ec4c61ea","create_time":"1727173696926","deleted":false,"message_id":"om_191fadbddae7e56ce5d6d659b3ce965b","msg_type":"text","sender":{"id":"ou_470ac13b8b50fc472d9d8ee71e03de26","id_type":"open_id","sender_type":"user","tenant_key":"2ee61fe50f4f1657"},"update_time":"1727173696926","updated":false}]
`
)
console.log(res)

View File

@ -21,15 +21,32 @@ const getLangfuse = async () => {
}
}
const modelMap = {
"deepseek-chat": {
model: "deepseek-chat",
apiKey: "",
baseURL: "http://10.38.214.162:8003/v1",
},
"qwen2-72b-instruct-int4": {
model: "qwen2-72b-instruct-int4",
apiKey: "",
baseURL: "http://10.38.214.206:8000/v1",
},
}
/**
* Deepseek模型
*
* @param modelName
* @param temperature
*/
const getDeepseekModel = async (temperature = 0) => {
const model = "deepseek-chat"
const apiKey = await db.appConfig.getDeepseekApiKey()
const baseURL = "https://api.deepseek.com"
return new ChatOpenAI({ apiKey, temperature, model }, { baseURL })
const getModel = async (modelName: keyof typeof modelMap, temperature = 0) => {
const { model, apiKey, baseURL } = modelMap[modelName]
return new ChatOpenAI(
{ temperature, model, apiKey },
{
baseURL,
}
)
}
const timeConfig = z.object({
@ -43,7 +60,7 @@ const timeConfig = z.object({
* @returns
*/
const parseTime = async (userInput: string) => {
const model = await getDeepseekModel()
const model = await getModel("deepseek-chat")
const structuredLlm = model.withStructuredOutput(timeConfig, { name: "time" })
return await structuredLlm.invoke(
`
@ -58,22 +75,21 @@ const parseTime = async (userInput: string) => {
)
}
export enum LlmPromptType {
summary = "summary",
}
/**
* LLM模型
* @param promptType
* @param chatHistory
* @param promptName Key
* @param variables
* @param temperature
* @returns
*/
const invokeLLM4ChatHistory = async (
promptType: LlmPromptType,
chatHistory: string
const invoke = async (
promptName: string,
variables: Record<string, any>,
temperature = 0
) => {
const { langfuse, langfuseHandler } = await getLangfuse()
const prompt = await langfuse.getPrompt(promptType)
const prompt = await langfuse.getPrompt(promptName)
const config = prompt.config as { modelName: keyof typeof modelMap }
const langchainTextPrompt = PromptTemplate.fromTemplate(
prompt.getLangchainPrompt()
@ -81,25 +97,20 @@ const invokeLLM4ChatHistory = async (
metadata: { langfusePrompt: prompt },
})
const chain = langchainTextPrompt.pipe(await getDeepseekModel(0.5))
const res = await chain.invoke(
{
chatHistory,
time: new Date().toLocaleString("zh-CN", { timeZone: "Asia/Shanghai" }),
},
{
callbacks: [langfuseHandler],
}
const chain = langchainTextPrompt.pipe(
await getModel(config.modelName, temperature)
)
return res.content
const { content } = await chain.invoke(variables, {
callbacks: [langfuseHandler],
})
return content
}
const llm = {
getDeepseekModel,
parseTime,
invokeLLM4ChatHistory,
invoke,
}
export default llm