feat: 基础建设

This commit is contained in:
zhaoyingbo 2024-11-25 11:08:54 +00:00
parent 84ab136492
commit 6792b3703f
27 changed files with 257 additions and 176 deletions

View File

@ -30,7 +30,8 @@
"Prisma.prisma",
"humao.rest-client",
"GitHub.copilot",
"GitHub.copilot-chat"
"GitHub.copilot-chat",
"oven.bun-vscode"
]
}
},

View File

@ -1,4 +0,0 @@
NODE_ENV=dev
http_proxy=http://proxy.pt.xiaomi.com:80
https_proxy=http://proxy.pt.xiaomi.com:80
no_proxy=localhost,.xiaomiwh.cn,.mioffice.cn,.xiaomi.com,.imoaix.cn,.deepseek.com,.gpt.ge,.xiaomi.srv

View File

@ -1,4 +0,0 @@
NODE_ENV=production
http_proxy=http://proxy.pt.xiaomi.com:80
https_proxy=http://proxy.pt.xiaomi.com:80
no_proxy=localhost,.xiaomiwh.cn,.mioffice.cn,.xiaomi.com,.imoaix.cn,.deepseek.com,.gpt.ge,.xiaomi.srv

2
.gitignore vendored
View File

@ -56,3 +56,5 @@ profile-*
profile*
*clinic*
*flamegraph*
.env

38
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,38 @@
variables:
REGISTRY: micr.cloud.mioffice.cn
IMAGE_NAME: micr.cloud.mioffice.cn/egg/egg_server
MATRIX_PROJECT: 462609
MATRIX_SPACE: egg-server-prev
MATRIX_AK: CAKBCYGR0BI2BGMVUF8
stages:
- build
- deploy
default:
tags:
- fe-bj
build:
stage: build
only:
- master
image: docker:latest
services:
- docker:dind
script:
- cp $ENV .env
- docker login -u $MATRIX_AK -p $MATRIX_SK $REGISTRY
- docker build -t $IMAGE_NAME:$CI_COMMIT_SHA -f ./docker/deploy/Dockerfile .
- docker push $IMAGE_NAME:$CI_COMMIT_SHA
- docker logout $REGISTRY
- docker rmi $IMAGE_NAME:$CI_COMMIT_SHA
deploy:
stage: deploy
only:
- master
image: cr.d.xiaomi.net/bigdata-fe/matrix-cli
script:
- matrix-cli config set --access-key $MATRIX_AK --secret-key $MATRIX_SK
- matrix-cli deploy --project $MATRIX_PROJECT --deploy-space $MATRIX_SPACE --reason "$CI_COMMIT_MESSAGE" --deploy-percentage 1 --tag $CI_COMMIT_SHA

View File

@ -25,6 +25,7 @@
"mindnote",
"openai",
"openchat",
"oxlint",
"pocketbase",
"PWTHWP",
"qwen",

BIN
bun.lockb

Binary file not shown.

3
cache.ts Normal file
View File

@ -0,0 +1,3 @@
import { parseJsonString } from "@egg/hooks"
console.log(parseJsonString(Bun.env.LARK_APP_MAP, []), [])

View File

@ -1,5 +1,6 @@
import pluginJs from "@eslint/js"
import simpleImportSort from "eslint-plugin-simple-import-sort"
import unusedImports from "eslint-plugin-unused-imports"
import globals from "globals"
import tseslint from "typescript-eslint"
@ -11,12 +12,24 @@ export default [
{
plugins: {
"simple-import-sort": simpleImportSort,
"unused-imports": unusedImports,
},
rules: {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-namespace": "off",
"simple-import-sort/imports": "error",
"simple-import-sort/exports": "error",
"no-unused-vars": "off", // Turn off base rule
"unused-imports/no-unused-imports": "error", // Enable plugin rule
"unused-imports/no-unused-vars": [
"warn",
{
vars: "all",
varsIgnorePattern: "^_",
args: "after-used",
argsIgnorePattern: "^_",
},
],
},
},
]

View File

@ -3,50 +3,51 @@
"module": "index.ts",
"type": "module",
"scripts": {
"start": "dotenvx run -f .env.prod -- bun run index.ts",
"dev": "dotenvx run -f .env.dev -- bun run index.ts --watch",
"lint": "eslint --fix .",
"start": "bun run index.ts",
"dev": "bun run index.ts --watch",
"lint": "oxlint --fix .",
"prepare": "husky",
"prettier": "prettier --write ."
},
"lint-staged": {
"*.{js,jsx,ts,tsx}": [
"eslint --fix",
"oxlint --fix",
"prettier --write",
"git add"
]
},
"devDependencies": {
"@commitlint/cli": "^19.5.0",
"@commitlint/config-conventional": "^19.5.0",
"@eslint/js": "^9.13.0",
"@commitlint/cli": "^19.6.0",
"@commitlint/config-conventional": "^19.6.0",
"@eslint/js": "^9.15.0",
"@types/node-schedule": "^2.1.7",
"@types/uuid": "^10.0.0",
"bun-types": "^1.1.33",
"eslint": "^9.13.0",
"bun-types": "^1.1.36",
"eslint": "^9.15.0",
"eslint-plugin-simple-import-sort": "^12.1.1",
"husky": "^9.1.6",
"eslint-plugin-unused-imports": "^4.1.4",
"husky": "^9.1.7",
"lint-staged": "^15.2.10",
"oxlint": "^0.13.1",
"prettier": "^3.3.3",
"typescript-eslint": "^8.12.1"
"typescript-eslint": "^8.15.0"
},
"peerDependencies": {
"typescript": "^5.5.4"
},
"dependencies": {
"@dotenvx/dotenvx": "^1.21.0",
"@egg/hooks": "^1.2.0",
"@egg/lark-msg-tool": "^1.14.0",
"@egg/logger": "^1.4.4",
"@egg/net-tool": "^1.9.2",
"@egg/net-tool": "^1.14.2",
"@egg/path-tool": "^1.4.1",
"@langchain/core": "^0.3.15",
"@langchain/openai": "^0.3.11",
"@langchain/core": "^0.3.18",
"@langchain/openai": "^0.3.14",
"joi": "^17.13.3",
"langfuse-langchain": "^3.28.0",
"langfuse-langchain": "^3.30.3",
"node-schedule": "^2.1.1",
"p-limit": "^6.1.0",
"pocketbase": "^0.21.5",
"uuid": "^10.0.0"
}
}
}

View File

@ -1,4 +1,5 @@
import { stringifyJson } from "@egg/hooks"
import { LarkService } from "@egg/net-tool"
import db from "../../db"
import { Context, DB, LarkServer, MsgProxy } from "../../types"
@ -40,7 +41,7 @@ const validateMessageReq = ({
export const manageMessageReq = async (
ctx: Context.Data
): Promise<Response> => {
const { body: rawBody, genResp, larkService } = ctx
const { body: rawBody, genResp, appMap, requestId } = ctx
const body = rawBody as MsgProxy.Body
// 校验参数
@ -87,6 +88,20 @@ export const manageMessageReq = async (
return genResp.notFound(error)
}
// 获取 app info
const appInfo = appMap[appName]
if (!appInfo) {
const error = "app not found"
db.log.create(LOG_COLLECTION, { ...baseLog, error })
return genResp.notFound(error)
}
const larkService = new LarkService({
appId: appInfo.app_id,
appSecret: appInfo.app_secret,
requestId,
})
// 如果有 group_id则发送给所有 group_id 中的人
if (body.group_id) {
// 获取所有接收者
@ -103,14 +118,8 @@ export const manageMessageReq = async (
const makeSendFunc = (receive_id_type: LarkServer.ReceiveIDType) => {
return (receive_id: string) => {
sendList.push(
larkService
.child(appName)
.message.send(
receive_id_type,
receive_id,
body.msg_type,
finalContent
)
larkService.message
.send(receive_id_type, receive_id, body.msg_type, finalContent)
.then((res) => {
sendRes[receive_id_type][receive_id] = res
})
@ -130,14 +139,8 @@ export const manageMessageReq = async (
if (body.receive_id && body.receive_id_type) {
body.receive_id.split(",").forEach((receive_id) => {
sendList.push(
larkService
.child(appName)
.message.send(
body.receive_id_type,
receive_id,
body.msg_type,
finalContent
)
larkService.message
.send(body.receive_id_type, receive_id, body.msg_type, finalContent)
.then((res) => {
sendRes[body.receive_id_type][receive_id] = res
})

View File

@ -1,3 +1,5 @@
import { LarkService } from "@egg/net-tool"
import { Context } from "../../types"
/**
@ -6,19 +8,34 @@ import { Context } from "../../types"
* @returns
*/
const manageLogin = async (ctx: Context.Data) => {
const { req, larkService, genResp, logger } = ctx
const { req, genResp, logger, appMap, requestId } = ctx
logger.info("micro app login")
const url = new URL(req.url)
const code = url.searchParams.get("code")
const appName = url.searchParams.get("app_name") || undefined
const appName = url.searchParams.get("app_name")
if (!code) {
return genResp.badRequest("code not found")
}
if (!appName) {
return genResp.badRequest("app_name not found")
}
// 获取 app info
const appInfo = appMap[appName]
if (!appInfo) {
return genResp.badRequest("app not found")
}
const larkService = new LarkService({
appId: appInfo.app_id,
appSecret: appInfo.app_secret,
requestId,
})
const {
code: resCode,
data,
message,
} = await larkService.child(appName).user.code2Login(code)
} = await larkService.user.code2Login(code)
logger.debug(`get user session: ${JSON.stringify(data)}`)
@ -35,7 +52,7 @@ const manageLogin = async (ctx: Context.Data) => {
* @returns
*/
const manageBatchUser = async (ctx: Context.Data) => {
const { body, genResp, larkService, logger } = ctx
const { body, genResp, logger, appMap, requestId } = ctx
logger.info("batch get user info")
if (!body) return genResp.badRequest("req body is empty")
const { user_ids, user_id_type, app_name } = body
@ -45,10 +62,25 @@ const manageBatchUser = async (ctx: Context.Data) => {
if (!user_id_type) {
return genResp.badRequest("user_id_type not found")
}
if (!app_name) {
return genResp.badRequest("app_name not found")
}
// 获取 app info
const appInfo = appMap[app_name]
if (!appInfo) {
return genResp.badRequest("app not found")
}
const { code, data, message } = await larkService
.child(app_name)
.user.batchGet(user_ids, user_id_type)
const larkService = new LarkService({
appId: appInfo.app_id,
appSecret: appInfo.app_secret,
requestId,
})
const { code, data, message } = await larkService.user.batchGet(
user_ids,
user_id_type
)
logger.debug(`batch get user info: ${JSON.stringify(data)}`)

View File

@ -1,3 +1,4 @@
import { LarkService } from "@egg/net-tool"
import Joi from "joi"
import db from "../../db"
@ -10,11 +11,10 @@ import insertSheet from "./insert"
* @param {Context.Data} ctx -
* @returns {Promise<false | Response>} false
*/
const validateSheetReq = async (
ctx: Context.Data
): Promise<false | Response> => {
const { body, genResp } = ctx
const validateSheetReq = async ({
body,
genResp,
}: Context.Data): Promise<false | Response> => {
// 定义基础的Schema
let schema = Joi.object({
api_key: Joi.string()
@ -64,7 +64,7 @@ const validateSheetReq = async (
* @returns {Promise<Response>}
*/
export const manageSheetReq = async (ctx: Context.Data): Promise<Response> => {
const { body: rawBody, genResp } = ctx
const { body: rawBody, genResp, appMap, requestId } = ctx
const body = rawBody as SheetProxy.InsertData
// 校验参数
@ -83,8 +83,21 @@ export const manageSheetReq = async (ctx: Context.Data): Promise<Response> => {
return genResp.notFound("app name not found")
}
// 获取APP信息
const appInfo = appMap[appName]
if (!appInfo) {
return genResp.notFound("app not found")
}
// 组织新的LarkService
ctx.larkService = new LarkService({
appId: appInfo.app_id,
appSecret: appInfo.app_secret,
requestId,
})
// 根据请求类型处理
if (body.type === "insert") return await insertSheet(ctx, appName)
if (body.type === "insert") return await insertSheet(ctx)
// 默认返回成功响应
return genResp.ok()

View File

@ -7,13 +7,15 @@ import { SheetProxy } from "../../types/sheetProxy"
* @param {string} appName -
* @returns {Promise<Response>}
*/
const insertSheet = async (ctx: Context.Data, appName: string) => {
const insertSheet = async (ctx: Context.Data) => {
const { genResp, larkService } = ctx
const body = ctx.body as SheetProxy.InsertData
const insertRes = await larkService
.child(appName)
.sheet.insertRows(body.sheet_token, body.range, body.values)
const insertRes = await larkService.sheet.insertRows(
body.sheet_token,
body.range,
body.values
)
if (insertRes?.code !== 0) {
return genResp.serverError(insertRes?.message)

View File

@ -1,24 +1,24 @@
import logger from "@egg/logger"
import { LarkService } from "@egg/net-tool"
import pLimit from "p-limit"
import db from "../db"
import { LarkService } from "../services"
export const resetAccessToken = async () => {
try {
const appList = await db.appInfo.getFullList()
const limit = pLimit(3)
const service = new LarkService("", "schedule")
const promiseList = appList.map((app) =>
limit(() =>
service.auth.getAk(app.app_id, app.app_secret).then((res) => {
if (res.code !== 0) return
return db.tenantAccessToken.update(
app.id,
app.name,
res.tenant_access_token
)
})
new LarkService({
appId: app.app_id,
appSecret: app.app_secret,
requestId: "schedule",
}).auth
.getAppAuth()
.then((res) => {
return db.tenantAccessToken.update(app.id, app.name, res)
})
)
)
await Promise.allSettled(promiseList)

View File

@ -1,10 +1,6 @@
import schedule from "node-schedule"
import { resetAccessToken } from "./accessToken"
export const initSchedule = async () => {
// 定时任务每15分钟刷新一次token
schedule.scheduleJob("*/15 * * * *", resetAccessToken)
// 立即执行一次
resetAccessToken()
// // 定时任务每15分钟刷新一次token
// schedule.scheduleJob("*/15 * * * *", resetAccessToken)
// // 立即执行一次
// resetAccessToken()
}

View File

@ -1,4 +1,3 @@
import AttachService from "./attach"
import LarkService from "./lark"
export { AttachService, LarkService }
export { AttachService }

View File

@ -1,6 +1,10 @@
import { LarkService } from "../../services"
import { LarkService } from "@egg/net-tool"
const service = new LarkService("egg", "")
const service = new LarkService({
appId: "cli_9f4b3b1b1b",
appSecret: "bask4IvKT61xCvTUxIkcMaWoiYf",
requestId: "123",
})
const {
data: {

View File

@ -1,8 +1,12 @@
import LarkDriveService from "../../services/lark/drive"
import { LarkService } from "@egg/net-tool"
const service = new LarkDriveService("egg", "")
const service = new LarkService({
appId: "cli_9f1e6b4e",
appSecret: "7b2f2f3a9b2c8e8a2f1a2b2b2b2b2b2b",
requestId: "schedule",
})
const res = await service.createFile(
const res = await service.drive.createFile(
"D6ETfzaU9lN08adVDz3kjLey4Bx",
"xxx 项目 KV管理器",
"bitable"

View File

@ -1,7 +0,0 @@
import { LarkService } from "../../services"
const service = new LarkService("egg", "")
const res = await service.chat.getInnerList()
console.log(JSON.stringify(res, null, 2))

View File

@ -1,13 +0,0 @@
import { test } from "bun:test"
import LarkSheetService from "../../services/lark/sheet"
const service = new LarkSheetService("egg", "")
test("getRecords", async () => {
const res = await service.getRecords(
"bask4BA989TBbnu5R7Onmdh1csb",
"tblabYZk3AYtGLSe"
)
console.log(res)
})

View File

@ -1,14 +1,19 @@
import { LarkBody, LarkCard } from "@egg/lark-msg-tool"
import { NetTool } from "@egg/net-tool"
import { LarkService, NetTool } from "@egg/net-tool"
import { PathCheckTool } from "@egg/path-tool"
import { Logger } from "winston"
import cardMap from "../constant/card"
import functionMap from "../constant/function"
import tempMap from "../constant/template"
import { AttachService, LarkService } from "../services"
import { AttachService } from "../services"
export namespace Context {
export interface APP_INFO {
app_id: string
app_secret: string
app_name: string
}
export interface Data {
req: Request
requestId: string
@ -23,5 +28,7 @@ export namespace Context {
path: PathCheckTool
searchParams: URLSearchParams
app: "michat" | "egg" | string
appInfo: APP_INFO
appMap: Record<string, APP_INFO>
}
}

View File

@ -1,15 +1,22 @@
import { parseJsonString } from "@egg/hooks"
import { LarkBody, LarkCard } from "@egg/lark-msg-tool"
import loggerIns from "@egg/logger"
import { NetTool } from "@egg/net-tool"
import { LarkService, NetTool } from "@egg/net-tool"
import { PathCheckTool } from "@egg/path-tool"
import { v4 as uuid } from "uuid"
import cardMap from "../constant/card"
import functionMap from "../constant/function"
import tempMap from "../constant/template"
import { AttachService, LarkService } from "../services"
import { AttachService } from "../services"
import { Context } from "../types"
// 获取所有应用信息
const appMap = parseJsonString(process.env.LARK_APP_MAP, {}) as Record<
string,
Context.APP_INFO
>
/**
* requestId
*
@ -41,10 +48,15 @@ const genContext = async (req: Request) => {
const larkBody = new LarkBody(body)
const searchParams = new URL(req.url).searchParams
const app = searchParams.get("app") || "egg"
const appInfo = appMap[app]
const requestId = getPreRequestId(larkBody) || uuid()
const logger = loggerIns.child({ requestId })
const genResp = new NetTool({ requestId })
const larkService = new LarkService(app, requestId)
const larkService = new LarkService({
appId: appInfo.app_id,
appSecret: appInfo.app_secret,
requestId,
})
const attachService = new AttachService({ requestId })
const path = new PathCheckTool(req.url)
const larkCard = new LarkCard(
@ -70,6 +82,8 @@ const genContext = async (req: Request) => {
attachService,
searchParams,
app,
appInfo,
appMap,
} as Context.Data
}

38
utils/llm/base.ts Normal file
View File

@ -0,0 +1,38 @@
import { ChatOpenAI } from "@langchain/openai"
import CallbackHandler, { Langfuse } from "langfuse-langchain"
/**
*
* @param temperature
* @returns
*/
export const getModel = (temperature = 0) => {
return new ChatOpenAI(
{
temperature,
model: Bun.env.LLM_MODEL,
apiKey: Bun.env.LLM_API_KEY,
},
{
baseURL: Bun.env.LLM_BASE_URL,
}
)
}
/**
* Langfuse
* @returns
*/
export const getLangfuse = async (name: string, requestId: string) => {
const langfuseParams = {
publicKey: Bun.env.LANGFUSE_PK,
secretKey: Bun.env.LANGFUSE_SK,
baseUrl: Bun.env.LANGFUSE_BASE_URL,
sessionId: requestId,
name,
}
return {
langfuseHandler: new CallbackHandler(langfuseParams),
langfuse: new Langfuse(langfuseParams),
}
}

View File

@ -1,66 +1,7 @@
import { PromptTemplate } from "@langchain/core/prompts"
import { ChatOpenAI } from "@langchain/openai"
import { CallbackHandler, Langfuse } from "langfuse-langchain"
import { z } from "zod"
import db from "../db"
/**
* Langfuse
* @returns
*/
const getLangfuse = async (name: string, requestId: string) => {
const langfuseParams = {
publicKey: await db.appConfig.getLangfusePk(),
secretKey: await db.appConfig.getLangfuseSk(),
baseUrl: "http://langfuse.c5-cloudml.xiaomi.srv",
sessionId: requestId,
name,
}
return {
langfuseHandler: new CallbackHandler(langfuseParams),
langfuse: new Langfuse(langfuseParams),
}
}
const modelMap = {
"deepseek-chat": {
model: "deepseek-chat",
apiKey: "xx",
baseURL: "http://10.38.214.162:8003/v1",
},
"qwen2-72b-instruct-int4": {
model: "qwen2-72b-instruct-int4",
apiKey: "xx",
baseURL: "http://10.38.214.206:8000/v1",
},
"gpt-4o": {
model: "gpt-4o",
apiKey: "sk-EhbBTR0QjhH22iLr9aCb04D2B0F44f88A07c2924Eb54CfA4",
baseURL: "https://api.gpt.ge/v1",
},
"qwen-72b-instruct-int4/v1": {
model: "qwen-72b-instruct-int4/v1",
apiKey: "xx",
baseURL:
"http://ms-13871-qwen-model-128k-9-1012195754.kscn-tj5-prod2-cloudml.xiaomi.srv/v1",
},
}
/**
*
* @param modelName
* @param temperature
*/
const getModel = async (modelName: keyof typeof modelMap, temperature = 0) => {
const { model, apiKey, baseURL } = modelMap[modelName]
return new ChatOpenAI(
{ temperature, model, apiKey },
{
baseURL,
}
)
}
import { getLangfuse, getModel } from "./base"
const groupAgentConfig = z.object({
chatId: z.string().describe("群聊ID"),
@ -89,7 +30,7 @@ const parseGroupAgentQuery = async (
"parseGroupAgentQuery",
requestId
)
const model = await getModel("qwen-72b-instruct-int4/v1")
const model = await getModel()
const structuredLlm = model.withStructuredOutput(groupAgentConfig, {
name: "groupAgent",
})
@ -140,7 +81,6 @@ const invoke = async (
) => {
const { langfuse, langfuseHandler } = await getLangfuse("invoke", requestId)
const prompt = await langfuse.getPrompt(promptName)
const config = prompt.config as { modelName: keyof typeof modelMap }
const langchainTextPrompt = PromptTemplate.fromTemplate(
prompt.getLangchainPrompt()
@ -148,9 +88,7 @@ const invoke = async (
metadata: { langfusePrompt: prompt },
})
const chain = langchainTextPrompt.pipe(
await getModel(config.modelName, temperature)
)
const chain = langchainTextPrompt.pipe(await getModel(temperature))
const { content } = await chain.invoke(variables, {
callbacks: [langfuseHandler],