chore: bump pro submodule for hydration stability (#6808)

* sandbox-sync-agent

* refactor: host pro as submodule

* chore: checkpoint host pro restructure

* refactor workspace test layout and startup init

* chore: update next turbopack setup

* chore: snapshot current work before actions fix

* chore: update pro submodule

* chore: point pro submodule url to upstream https

* fix: Dockerfile

* chore: update pro submodule

* ci: support private pro submodule token and skip fork jobs

* fix(ci): build sdk workspace deps before code-sandbox bundle

* fix(app): exclude vitest configs from production typecheck

* fix(app-image): build sdk packages before next build

* fix(ci): align dockerfiles with workspace sdk build flow

* chore(docker): upgrade node20 docker images to node24

* fix(ci): read admin coverage output path in pro test workflow

* fix(app-image): include next-i18next config and locale assets

* chore: update pro submodule

* chore: do not specify branch for submodule

* chore: remove most ts-nocheck sign

* chore: update pro submodule

* chore: remove sandbox-agent-sync package

* chore: do not modify "pushData" file logic

* fix: health check

* chore: restore dev axios proxy state

* fix: test-fastgpt report workflow

* fix: use valid vitest coverage action inputs
This commit is contained in:
Ryo
2026-04-27 17:44:12 +08:00
committed by GitHub
parent e32410b93c
commit 289da0f7b0
340 changed files with 6547 additions and 16162 deletions
@@ -0,0 +1,89 @@
name: Build fastgpt-sso-service images
on:
workflow_dispatch:
push:
tags:
- 'v*'
permissions:
contents: read
packages: write
attestations: write
id-token: write
jobs:
build-fastgpt-sso-service-images:
runs-on: buildjet-2vcpu-ubuntu-2204
steps:
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 1
- name: Update submodules
env:
PRO_SUBMODULE_TOKEN: ${{ secrets.PRO_SUBMODULE_TOKEN }}
run: |
if [ -f .gitmodules ]; then
if [ -n "${PRO_SUBMODULE_TOKEN}" ]; then
git config --global url."https://x-access-token:${PRO_SUBMODULE_TOKEN}@github.com/".insteadOf "https://github.com/"
fi
git submodule update --init --recursive
fi
- name: Install Dependencies
run: |
sudo apt update && sudo apt install -y nodejs npm
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
driver-opts: network=host
- name: Cache Docker layers
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: labring
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Ali Hub
uses: docker/login-action@v2
with:
registry: registry.cn-hangzhou.aliyuncs.com
username: ${{ secrets.FASTGPT_ALI_IMAGE_USER }}
password: ${{ secrets.FASTGPT_ALI_IMAGE_PSW }}
- name: Set image tags
run: |
if [[ "${{ github.ref_name }}" == "main" ]]; then
echo "Git_Latest=ghcr.io/labring/fastgpt-sso-service:latest" >> $GITHUB_ENV
echo "Git_Tag=ghcr.io/labring/fastgpt-sso-service:latest" >> $GITHUB_ENV
echo "Ali_Latest=${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-sso-service:latest" >> $GITHUB_ENV
echo "Ali_Tag=${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-sso-service:latest" >> $GITHUB_ENV
else
echo "Git_Tag=ghcr.io/labring/fastgpt-sso-service:${{ github.ref_name }}" >> $GITHUB_ENV
echo "Git_Latest=ghcr.io/labring/fastgpt-sso-service:latest" >> $GITHUB_ENV
echo "Ali_Tag=${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-sso-service:${{ github.ref_name }}" >> $GITHUB_ENV
echo "Ali_Latest=${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-sso-service:latest" >> $GITHUB_ENV
fi
- name: Build and publish image
run: |
docker buildx build \
-f pro/sso/Dockerfile \
--build-arg name=sso \
--platform linux/amd64,linux/arm64 \
--label "org.opencontainers.image.source=https://github.com/labring/FastGPT" \
--label "org.opencontainers.image.description=fastgpt-sso-service image" \
--push \
--cache-from=type=local,src=/tmp/.buildx-cache \
--cache-to=type=local,dest=/tmp/.buildx-cache \
-t ${Git_Tag} \
-t ${Git_Latest} \
-t ${Ali_Tag} \
-t ${Ali_Latest} \
.
+54
View File
@@ -0,0 +1,54 @@
name: 'FastGPT-Pro-Test'
on:
pull_request:
workflow_dispatch:
concurrency:
group: 'fastgpt-pro-test-${{ github.event.pull_request.number || github.ref }}'
cancel-in-progress: true
jobs:
test:
if: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository || secrets.PRO_SUBMODULE_TOKEN != '' }}
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref || github.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }}
- name: Update submodules
env:
PRO_SUBMODULE_TOKEN: ${{ secrets.PRO_SUBMODULE_TOKEN }}
run: |
if [ -f .gitmodules ]; then
if [ -n "${PRO_SUBMODULE_TOKEN}" ]; then
git config --global url."https://x-access-token:${PRO_SUBMODULE_TOKEN}@github.com/".insteadOf "https://github.com/"
fi
git submodule update --init --recursive
fi
- name: Install system deps for node-canvas
run: |
sudo apt-get update
sudo apt-get install -y libcairo2-dev libpango1.0-dev libjpeg-dev libgif-dev librsvg2-dev
- uses: pnpm/action-setup@v4
with:
version: 10.33.2
- uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'pnpm'
- name: Install Deps
run: pnpm install --frozen-lockfile
- name: Test
run: pnpm test:admin
- name: Report Coverage
if: always() && hashFiles('pro/admin/coverage/coverage-summary.json') != ''
uses: davelosert/vitest-coverage-report-action@v2
with:
json-final-path: pro/admin/coverage/coverage-final.json
json-summary-path: pro/admin/coverage/coverage-summary.json
+103
View File
@@ -0,0 +1,103 @@
name: Preview Admin Image - Build & Push
on:
pull_request_target:
types: [opened, synchronize, reopened]
workflow_dispatch:
concurrency:
group: 'preview-admin-build-${{ github.head_ref }}'
cancel-in-progress: true
permissions:
contents: read
packages: write
pull-requests: write
issues: write
jobs:
build-and-push:
if: ${{ github.event.pull_request.head.repo.full_name == github.repository || secrets.PRO_SUBMODULE_TOKEN != '' }}
runs-on: ubuntu-24.04
steps:
- name: Checkout PR code
uses: actions/checkout@v4
with:
ref: refs/pull/${{ github.event.pull_request.number }}/head
fetch-depth: 0
- name: Update submodules
env:
PRO_SUBMODULE_TOKEN: ${{ secrets.PRO_SUBMODULE_TOKEN }}
run: |
if [ -f .gitmodules ]; then
if [ -n "${PRO_SUBMODULE_TOKEN}" ]; then
git config --global url."https://x-access-token:${PRO_SUBMODULE_TOKEN}@github.com/".insteadOf "https://github.com/"
fi
git submodule update --init --recursive
fi
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Aliyun Container Registry
uses: docker/login-action@v3
with:
registry: registry.cn-hangzhou.aliyuncs.com
username: ${{ secrets.FASTGPT_ALI_IMAGE_USER }}
password: ${{ secrets.FASTGPT_ALI_IMAGE_PSW }}
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: pro/admin/Dockerfile
platforms: linux/amd64
push: true
tags: ${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-pro-pr:${{ github.event.pull_request.head.sha }}
labels: |
org.opencontainers.image.source=https://github.com/${{ github.repository_owner }}/FastGPT
org.opencontainers.image.description=fastgpt-pro admin image
- name: Add PR comment on success
if: success()
uses: actions/github-script@v7
with:
script: |
const prNumber = ${{ github.event.pull_request.number }};
const marker = '<!-- fastgpt-admin-preview -->';
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
});
const existingComment = comments.find(comment =>
comment.body.includes(marker)
);
const commentBody = `${marker}
✅ **Admin Preview Image Ready!**
\`\`\`
${{ secrets.FASTGPT_ALI_IMAGE_PREFIX }}/fastgpt-pro-pr:${{ github.event.pull_request.head.sha }}
\`\`\`
`;
if (existingComment) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existingComment.id,
body: commentBody
});
} else {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: commentBody
});
}
+47 -12
View File
@@ -1,11 +1,11 @@
name: 'FastGPT-Test'
name: "FastGPT-Test"
on:
pull_request:
workflow_dispatch:
# Only one build per PR branch at a time
# Only one build per PR branch at a time
concurrency:
group: 'test-fastgpt-${{ github.head_ref }}'
group: "test-fastgpt-${{ github.event.pull_request.number || github.ref }}"
cancel-in-progress: true
permissions:
@@ -20,18 +20,53 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref || github.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name ||
github.repository }}
- uses: pnpm/action-setup@v4
with:
version: 9
- name: 'Install Deps'
run: pnpm install
- name: 'Test'
run: pnpm run test
- name: 'Report Coverage'
version: 10.33.2
- uses: actions/setup-node@v4
with:
node-version: "24"
cache: "pnpm"
- name: "Install Deps"
run: pnpm install --frozen-lockfile
- name: "Test Global"
run: pnpm test:global
- name: "Test Service"
run: pnpm test:service
- name: "Test App"
run: pnpm test:app
- name: "Report Coverage (Global)"
# Set if: always() to also generate the report if tests are failing
# Only works if you set `reportOnFailure: true` in your vite config as specified above
if: always()
if: always() && hashFiles('packages/global/coverage/coverage-summary.json') != ''
uses: davelosert/vitest-coverage-report-action@v2
with:
name: global
json-final-path: packages/global/coverage/coverage-final.json
json-summary-path: packages/global/coverage/coverage-summary.json
- name: "Report Coverage (Service)"
if: always() && hashFiles('packages/service/coverage/coverage-summary.json') != ''
uses: davelosert/vitest-coverage-report-action@v2
with:
name: service
json-final-path: packages/service/coverage/coverage-final.json
json-summary-path: packages/service/coverage/coverage-summary.json
- name: "Report Coverage (App)"
if: always() && hashFiles('projects/app/coverage/coverage-summary.json') != ''
uses: davelosert/vitest-coverage-report-action@v2
with:
name: app
json-final-path: projects/app/coverage/coverage-final.json
json-summary-path: projects/app/coverage/coverage-summary.json
+1 -1
View File
@@ -20,7 +20,7 @@ jobs:
- uses: pnpm/action-setup@v4
with:
version: 9
version: 10.33.2
- uses: actions/setup-node@v4
with:
+5 -1
View File
@@ -2,6 +2,7 @@
node_modules/
# next.js
.next/
.turbo/
out/
# production
build/
@@ -40,6 +41,9 @@ coverage
document/.source
projects/app/worker/
pro/admin/worker/
# Agent
.codex
.codex
.turbo
+3
View File
@@ -0,0 +1,3 @@
[submodule "pro"]
path = pro
url = https://github.com/labring/fastgpt-pro.git
+3 -3
View File
@@ -3,8 +3,8 @@
"editor.mouseWheelZoom": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"prettier.prettierPath": "node_modules/prettier",
"typescript.preferences.includePackageJsonAutoImports": "on",
"typescript.tsdk": "node_modules/typescript/lib",
"js/ts.preferences.includePackageJsonAutoImports": "on",
"js/ts.tsdk.path": "node_modules/typescript/lib",
"i18n-ally.localesPaths": [
"packages/web/i18n",
],
@@ -34,4 +34,4 @@
"files.associations": {
"*.mdx": "markdown"
}
}
}
+9 -3
View File
@@ -10,10 +10,16 @@ ifndef name
$(error name is not defined)
endif
filePath=./projects/$(name)/Dockerfile
projectDir=$(or $(wildcard ./projects/$(name)),$(wildcard ./pro/$(name)))
ifeq ($(strip $(projectDir)),)
$(error Unknown project name '$(name)'; expected ./projects/$(name) or ./pro/$(name))
endif
filePath=$(projectDir)/Dockerfile
dev:
pnpm --prefix ./projects/$(name) dev
pnpm --prefix $(projectDir) dev
build:
ifeq ($(proxy), taobao)
@@ -22,4 +28,4 @@ else ifeq ($(proxy), clash)
docker build -f $(filePath) -t $(image) . --network host --build-arg HTTP_PROXY=http://127.0.0.1:7890 --build-arg HTTPS_PROXY=http://127.0.0.1:7890
else
docker build --progress=plain -f $(filePath) -t $(image) .
endif
endif
+14
View File
@@ -60,6 +60,20 @@ docker compose up -d
- **商业版**
如果你需要更完整的功能,或深度的服务支持,可以选择我们的[商业版](https://doc.fastgpt.io/introduction/commercial)。我们除了提供完整的软件外,还提供相应的场景落地辅导,具体可提交[商业咨询](https://fael3z0zfze.feishu.cn/share/base/form/shrcnjJWtKqjOI9NbQTzhNyzljc)
## 📁 仓库结构
- `projects/app`: 开源主应用
- `pro/admin`: 商业版后台
- `pro/sso`: 商业版 SSO 服务
统一在仓库根目录执行 `pnpm i`,开发时使用:
```bash
make dev name=app
make dev name=admin
make dev name=sso
```
## 💡 核心功能
| | |
+33 -20
View File
@@ -2,10 +2,12 @@
Since FastGPT is managed in the same way as monorepo, it is recommended to install make first during development.
monorepo Project Name:
- app: main project
-......
monorepo Project Name:
- app: main project
- admin: pro admin project
- sso: pro sso service
-......
## Dev
@@ -15,13 +17,15 @@ chmod -R +x ./scripts/
# Executing under the code root directory installs all dependencies within the root package, projects, and packages
pnpm i
# Not make cmd
cd projects/app
pnpm dev
# Make cmd
make dev name=app
```
# Not make cmd
cd projects/app
pnpm dev
# Make cmd
make dev name=app
make dev name=admin
make dev name=sso
```
Note: If the Node version is >= 20, you need to pass the `--no-node-snapshot` parameter to Node when running `pnpm i`
@@ -106,13 +110,22 @@ Please fill the AuditEventEnum and audit function is added to the ts, and on the
## Build
```sh
# Docker cmd: Build image, not proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
# Make cmd: Build image, not proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
# Docker cmd: Build image with proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
# Make cmd: Build image with proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
# Docker cmd: Build image, not proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app
# Docker cmd: Build pro admin image, not proxy
docker build -f ./pro/admin/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:v4.8.1 . --build-arg name=admin
# Docker cmd: Build pro sso image, not proxy
docker build -f ./pro/sso/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sso-service:v4.8.1 . --build-arg name=sso
# Make cmd: Build image, not proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1
make build name=admin image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:v4.8.1
make build name=sso image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-sso-service:v4.8.1
# Docker cmd: Build image with proxy
docker build -f ./projects/app/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 . --build-arg name=app --build-arg proxy=taobao
# Docker cmd: Build pro admin image with proxy
docker build -f ./pro/admin/Dockerfile -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:v4.8.1 . --build-arg name=admin --build-arg proxy=taobao
# Make cmd: Build image with proxy
make build name=app image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 proxy=taobao
make build name=admin image=registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-admin:v4.8.1 proxy=taobao
```
+1 -1
View File
@@ -1,4 +1,4 @@
FROM node:20-alpine AS base
FROM node:24-alpine AS base
FROM base AS builder
RUN apk add --no-cache \
+1 -1
View File
@@ -1,5 +1,5 @@
{
"name": "fast",
"name": "@fastgpt/document",
"version": "0.0.0",
"private": true,
"scripts": {
+34 -17
View File
@@ -3,6 +3,9 @@
"version": "4.0",
"private": true,
"scripts": {
"dev:app": "turbo run dev --filter=@fastgpt/app",
"dev:admin": "turbo run dev --filter=@fastgpt/admin",
"dev": "turbo run dev --filter=@fastgpt/app",
"prepare": "husky install",
"gen:theme-typings": "chakra-cli tokens packages/web/styles/theme.ts --out node_modules/.pnpm/node_modules/@chakra-ui/styled-system/dist/theming.types.d.ts",
"gen:deploy": "node deploy/init.mjs",
@@ -10,36 +13,42 @@
"initIcon": "node ./scripts/icon/init.js && prettier --config \"./.prettierrc.js\" --write \"packages/web/components/common/Icon/constants.ts\"",
"previewIcon": "node ./scripts/icon/index.js",
"create:i18n": "node ./scripts/i18n/index.js",
"clean:unused:pro": "node --experimental-strip-types ./pro/scripts/cleanup-unused.ts",
"clean:unused:pro:write": "node --experimental-strip-types ./pro/scripts/cleanup-unused.ts --write",
"lint": "eslint \"**/*.{ts,tsx}\" --fix --ignore-path .eslintignore",
"test": "vitest run --coverage",
"test:vector": "vitest run --config test/integrationTest/vectorDB/vitest.config.mts"
"test": "pnpm test:workspace",
"test:all": "pnpm test:workspace && pnpm test:vector",
"test:repo": "vitest run --config vitest.config.mts --coverage --passWithNoTests",
"test:workspace": "turbo run test --filter=@fastgpt/app --filter=@fastgpt/admin --filter=@fastgpt/global --filter=@fastgpt/service",
"test:app": "turbo run test --filter=@fastgpt/app",
"test:admin": "turbo run test --filter=@fastgpt/admin",
"test:global": "turbo run test --filter=@fastgpt/global",
"test:service": "turbo run test --filter=@fastgpt/service",
"test:service:integration": "turbo run test:integration --filter=@fastgpt/service",
"test:vector": "turbo run test:integration --filter=@fastgpt/service",
"build:sdks": "pnpm -r --filter @fastgpt-sdk/storage --filter @fastgpt-sdk/logger --filter @fastgpt-sdk/otel build",
"predev": "pnpm run build:sdks"
},
"devDependencies": {
"@chakra-ui/cli": "^2.4.1",
"@typescript-eslint/eslint-plugin": "^6.21.0",
"@typescript-eslint/parser": "^6.21.0",
"@vitest/coverage-v8": "^3.0.9",
"@typescript-eslint/eslint-plugin": "catalog:",
"@typescript-eslint/parser": "catalog:",
"@vitest/coverage-v8": "catalog:",
"eslint": "catalog:",
"eslint-config-next": "catalog:",
"husky": "^8.0.3",
"i18next": "catalog:",
"js-yaml": "catalog:",
"lint-staged": "^13.3.0",
"mongodb-memory-server": "^10.1.4",
"mongodb-memory-server": "catalog:",
"next-i18next": "catalog:",
"prettier": "3.2.4",
"react-i18next": "catalog:",
"typescript": "^5.1.3",
"vitest": "^3.0.9",
"zhlint": "^0.7.4"
"turbo": "2.9.6",
"typescript": "catalog:",
"vitest": "catalog:"
},
"lint-staged": {
"./**/**/*.{ts,tsx,scss}": [
"prettier --config ./.prettierrc.js --write --ignore-unknown"
],
"./**/**/*.{ts,tsx}": [
"eslint --fix --ignore-path .eslintignore"
],
"./document/**/**/*.mdx": [
"pnpm -C ./document run format-doc",
"pnpm -C ./document run initDocTime",
@@ -47,10 +56,18 @@
"pnpm -C ./document run checkDocRefs",
"pnpm -C ./document run removeInvalidImg",
"git add ."
],
"**/*.{ts,tsx}": [
"prettier --config ./.prettierrc.js --write",
"eslint --fix --ignore-path .eslintignore"
],
"**/*.scss": [
"prettier --config ./.prettierrc.js --write"
]
},
"engines": {
"node": ">=20",
"pnpm": "9.x"
}
"pnpm": "10.x"
},
"packageManager": "pnpm@10.33.2"
}
@@ -21,6 +21,7 @@ export const APIFileServerSchema = z
})
.meta({ description: 'API 服务器配置' });
export type APIFileServerType = z.infer<typeof APIFileServerSchema>;
export type APIFileServer = APIFileServerType;
export const FeishuServerSchema = z
.object({
appId: z.string(),
@@ -29,6 +30,7 @@ export const FeishuServerSchema = z
})
.meta({ description: '飞书服务器配置' });
export type FeishuServerType = z.infer<typeof FeishuServerSchema>;
export type FeishuServer = FeishuServerType;
export const YuqueServerSchema = z
.object({
userId: z.string(),
@@ -37,6 +39,7 @@ export const YuqueServerSchema = z
})
.meta({ description: '语雀服务器配置' });
export type YuqueServerType = z.infer<typeof YuqueServerSchema>;
export type YuqueServer = YuqueServerType;
export const ApiDatasetServerSchema = z
.object({
@@ -53,10 +56,12 @@ export const ApiFileReadContentResponseSchema = z.object({
rawText: z.string()
});
export type ApiFileReadContentResponseType = z.infer<typeof ApiFileReadContentResponseSchema>;
export type ApiFileReadContentResponse = ApiFileReadContentResponseType;
export const APIFileReadResponseSchema = z.object({
url: z.string()
});
export type APIFileReadResponseType = z.infer<typeof APIFileReadResponseSchema>;
export type APIFileReadResponse = APIFileReadResponseType;
export type ApiDatasetDetailResponse = APIFileItemType;
@@ -1,5 +1,5 @@
import { AdminInformPath } from './inform';
import { AdminLoginPath } from './login';
import { AdminInformPath } from './inform';
import type { OpenAPIPath } from '../../../type';
export const AdminUserPath: OpenAPIPath = {
+3 -1
View File
@@ -30,7 +30,6 @@ export type PaginationResponseType<T = any> = {
total: number;
list: T[];
};
export type PaginationResponse<T = any> = PaginationResponseType<T>;
/* 按 cursor 分页 */
@@ -88,3 +87,6 @@ export type LinkedListResponse<T = {}, A = any> = {
hasMorePrev: boolean;
hasMoreNext: boolean;
};
// Backward-compatible alias for older callers that still import PaginationResponse.
export type PaginationResponse<T = any> = PaginationResponseType<T>;
+8 -4
View File
@@ -1,9 +1,13 @@
{
"name": "@fastgpt/global",
"version": "1.0.0",
"scripts": {
"test": "vitest run -c vitest.config.ts",
"test:watch": "vitest -c vitest.config.ts"
},
"engines": {
"node": ">=20",
"pnpm": "9.x"
"pnpm": "10.x"
},
"dependencies": {
"@fastgpt-sdk/plugin": "0.6.0",
@@ -16,7 +20,7 @@
"js-yaml": "catalog:",
"jschardet": "3.1.1",
"json5": "catalog:",
"nanoid": "^5.1.3",
"nanoid": "catalog:",
"next": "catalog:",
"openai": "4.104.0",
"openapi-types": "^12.1.3",
@@ -27,7 +31,7 @@
},
"devDependencies": {
"@types/lodash": "catalog:",
"@types/js-yaml": "^4.0.9",
"@types/node": "20.14.0"
"@types/js-yaml": "catalog:",
"@types/node": "catalog:"
}
}
+13 -1
View File
@@ -1 +1,13 @@
export * from '@fastgpt-sdk/plugin';
export {
FastGPTPluginClient,
RunToolWithStream,
ToolDetailSchema,
ToolSimpleSchema,
ToolTagsNameMap
} from '@fastgpt-sdk/plugin';
export type {
AIProxyChannelsType,
I18nStringStrictType,
ToolDetailType,
ToolSimpleType
} from '@fastgpt-sdk/plugin';
+2
View File
@@ -123,6 +123,8 @@ export type OutLinkEditType<T extends OutlinkAppType = undefined> = {
app?: T;
};
export type OutLinkSchema<T extends OutlinkAppType = undefined> = OutLinkSchemaType<T>;
export const PlaygroundVisibilityConfigSchema = z.object({
showRunningStatus: z.boolean(),
showSkillReferences: z.boolean().optional().default(true),
+3 -1
View File
@@ -1,7 +1,9 @@
import type { CollaboratorIdType, CollaboratorItemType } from './collaborator';
import { ManageRoleVal, OwnerRoleVal } from './constant';
import type { RoleValueType } from './type';
import { type PermissionValueType } from './type';
const OwnerRoleVal = ~0 >>> 0;
const ManageRoleVal = 0b001;
/**
* Sum the permission value.
* If no permission value is provided, return undefined to fallback to default value.
+19
View File
@@ -0,0 +1,19 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"baseUrl": "..",
"paths": {
"@/*": ["../../packages/*"],
"@fastgpt-sdk/logger": ["../../sdk/logger/src/index.ts"],
"@fastgpt-sdk/storage": ["../../sdk/storage/src/index.ts"],
"@fastgpt-sdk/otel": ["../../sdk/otel/src/index.ts"],
"@fastgpt-sdk/otel/logger": ["../../sdk/otel/src/logger-entry.ts"],
"@fastgpt-sdk/otel/metrics": ["../../sdk/otel/src/metrics-entry.ts"],
"@fastgpt-sdk/otel/tracing": ["../../sdk/otel/src/tracing-entry.ts"],
"@fastgpt/*": ["../../packages/*"],
"@test/*": ["../../test/*"]
}
},
"include": ["**/*.test.ts", "../**/*.ts", "../**/*.tsx"],
"exclude": ["node_modules"]
}
+45
View File
@@ -0,0 +1,45 @@
import { resolve } from 'node:path';
import { defineConfig } from 'vitest/config';
export default defineConfig({
resolve: {
alias: {
'@': resolve('..'),
'@fastgpt-sdk/logger': resolve('../../sdk/logger/src/index.ts'),
'@fastgpt-sdk/storage': resolve('../../sdk/storage/src/index.ts'),
'@fastgpt-sdk/otel/logger': resolve('../../sdk/otel/src/logger-entry.ts'),
'@fastgpt-sdk/otel/metrics': resolve('../../sdk/otel/src/metrics-entry.ts'),
'@fastgpt-sdk/otel/tracing': resolve('../../sdk/otel/src/tracing-entry.ts'),
'@fastgpt-sdk/otel': resolve('../../sdk/otel/src/index.ts'),
'@fastgpt': resolve('..'),
'@test': resolve('../../test')
}
},
test: {
coverage: {
enabled: true,
reporter: ['text', 'text-summary', 'html', 'json-summary', 'json'],
reportOnFailure: true,
include: ['common/**/*.ts', 'core/**/*.ts', 'support/**/*.ts', 'openapi/**/*.ts'],
exclude: [
'**/node_modules/**',
'**/*.spec.ts',
'**/*/*.d.ts',
'**/test/**',
'**/*.test.ts',
'**/*/constants.ts',
'**/*/*.const.ts',
'**/*/type.ts',
'**/*/types.ts',
'**/*/type/*',
'**/*/schema.ts',
'**/*/*.schema.ts',
'openapi/**/*',
'core/workflow/template/**/*'
],
cleanOnRerun: false
},
outputFile: 'test-results.json',
include: ['test/**/*.test.ts']
}
});
+2 -1
View File
@@ -3,10 +3,11 @@ import { ProxyAgent } from 'proxy-agent';
import { isDevEnv } from '@fastgpt/global/common/system/constants';
export function createProxyAxios(config?: AxiosRequestConfig) {
const agent = new ProxyAgent();
if (isDevEnv) {
return _.create(config);
}
const agent = new ProxyAgent();
return _.create({
proxy: false,
+3 -1
View File
@@ -3,6 +3,7 @@ import {
type Processor,
Queue,
type QueueOptions,
UnrecoverableError,
Worker,
type WorkerOptions
} from 'bullmq';
@@ -150,4 +151,5 @@ export function getWorker<DataType, ReturnType = void>(
return newWorker;
}
export * from 'bullmq';
export { Queue, UnrecoverableError, Worker, delay };
export type { ConnectionOptions, Job, Processor, QueueOptions, WorkerOptions } from 'bullmq';
+12 -1
View File
@@ -1,7 +1,18 @@
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import type { LocationName } from './type';
export const dbPath = path.join(process.cwd(), 'data/GeoLite2-City.mmdb');
const dbFileName = 'GeoLite2-City.mmdb';
const currentDir = path.dirname(fileURLToPath(import.meta.url));
const dbPathCandidates = [
path.resolve(currentDir, '../../../../projects/app/data', dbFileName),
path.resolve(process.cwd(), 'data', dbFileName),
path.resolve(process.cwd(), '../../projects/app/data', dbFileName)
];
export const dbPath = dbPathCandidates.find((item) => fs.existsSync(item)) ?? dbPathCandidates[0];
export const privateOrOtherLocationName: LocationName = {
city: undefined,
+15 -2
View File
@@ -1,12 +1,25 @@
import { isTestEnv } from '@fastgpt/global/common/system/constants';
import { getLogger, LogCategories } from '../logger';
import type { Model } from 'mongoose';
import type {
AnyBulkWriteOperation,
ClientSession,
Model,
Mongoose as MongooseType,
PipelineStage
} from 'mongoose';
import mongoose, { Mongoose } from 'mongoose';
const logger = getLogger(LogCategories.INFRA.MONGO);
export default mongoose;
export * from 'mongoose';
export { Schema, Types } from 'mongoose';
export type {
AnyBulkWriteOperation,
ClientSession,
Model,
MongooseType as Mongoose,
PipelineStage
};
export const MONGO_URL = process.env.MONGODB_URI as string;
export const MONGO_LOG_URL = (process.env.MONGODB_LOG_URI ?? process.env.MONGODB_URI) as string;
+58 -6
View File
@@ -1,5 +1,6 @@
import { getLogger, LogCategories } from '../logger';
import Redis from 'ioredis';
import type { RedisOptions } from 'ioredis';
const logger = getLogger(LogCategories.INFRA.REDIS);
@@ -41,9 +42,60 @@ const REDIS_BASE_OPTION = {
enableOfflineQueue: true
};
const getRedisConnectionOptions = (): RedisOptions => {
if (REDIS_URL.startsWith('/')) {
return {
...REDIS_BASE_OPTION,
path: REDIS_URL
};
}
const normalizedRedisUrl = REDIS_URL.includes('://') ? REDIS_URL : `redis://${REDIS_URL}`;
try {
const redisUrl = new URL(normalizedRedisUrl);
const protocol = redisUrl.protocol.toLowerCase();
if (protocol !== 'redis:' && protocol !== 'rediss:') {
logger.warn('Unsupported Redis URL protocol, fallback to defaults', {
protocol,
redisUrl: REDIS_URL
});
return {
...REDIS_BASE_OPTION
};
}
const dbFromPath = redisUrl.pathname.replace(/^\//, '');
const parsedDb = dbFromPath ? Number(dbFromPath) : undefined;
const db = Number.isFinite(parsedDb) ? parsedDb : undefined;
const options: RedisOptions = {
...REDIS_BASE_OPTION,
host: redisUrl.hostname || 'localhost',
port: redisUrl.port ? Number(redisUrl.port) : 6379
};
if (redisUrl.username) options.username = decodeURIComponent(redisUrl.username);
if (redisUrl.password) options.password = decodeURIComponent(redisUrl.password);
if (db !== undefined) options.db = db;
if (protocol === 'rediss:') options.tls = {};
return options;
} catch (error) {
logger.warn('Failed to parse REDIS_URL with WHATWG URL API, fallback to defaults', {
redisUrl: REDIS_URL,
error: String(error)
});
return {
...REDIS_BASE_OPTION
};
}
};
export const newQueueRedisConnection = () => {
const redis = new Redis(REDIS_URL, {
...REDIS_BASE_OPTION,
const redis = new Redis({
...getRedisConnectionOptions(),
// Limit retries for queue operations
maxRetriesPerRequest: 3
});
@@ -51,8 +103,8 @@ export const newQueueRedisConnection = () => {
};
export const newWorkerRedisConnection = () => {
const redis = new Redis(REDIS_URL, {
...REDIS_BASE_OPTION,
const redis = new Redis({
...getRedisConnectionOptions(),
// BullMQ requires maxRetriesPerRequest: null for blocking operations
maxRetriesPerRequest: null
});
@@ -63,8 +115,8 @@ export const FASTGPT_REDIS_PREFIX = 'fastgpt:';
export const getGlobalRedisConnection = () => {
if (global.redisClient) return global.redisClient;
global.redisClient = new Redis(REDIS_URL, {
...REDIS_BASE_OPTION,
global.redisClient = new Redis({
...getRedisConnectionOptions(),
keyPrefix: FASTGPT_REDIS_PREFIX,
maxRetriesPerRequest: 3
});
@@ -0,0 +1,14 @@
const emptyModule = new Proxy(function emptyModule() {}, {
get() {
return emptyModule;
},
apply() {
return undefined;
},
construct() {
return {};
}
});
module.exports = emptyModule;
module.exports.default = emptyModule;
+221
View File
@@ -0,0 +1,221 @@
import { inspect } from 'node:util';
type ErrTextGetter = (error: any, def?: string) => string;
type SerializedInitializationError = {
message: string;
name?: string;
code?: string;
stage?: string;
step?: string;
stack?: string;
cause?: SerializedInitializationError;
details: string;
};
type InitializationLogger = {
error: (message: string, payload?: Record<string, unknown>) => void;
info?: (message: string, payload?: Record<string, unknown>) => void;
};
const getObjectMessage = (error: Record<string, any>, fallback: string) => {
const message =
(typeof error.message === 'string' && error.message) ||
(typeof error.msg === 'string' && error.msg) ||
(typeof error.error === 'string' && error.error) ||
(typeof error.code === 'string' && error.code);
return message || fallback;
};
export const serializeInitializationError = (
error: unknown,
depth = 0
): SerializedInitializationError => {
const fallback = 'Unknown initialization error';
if (depth > 5) {
return {
message: 'Max initialization error depth reached',
details: 'Max initialization error depth reached'
};
}
if (error instanceof Error) {
const err = error as Error & {
code?: string;
stage?: string;
step?: string;
cause?: unknown;
};
return {
message: err.message || fallback,
name: err.name,
code: err.code,
stage: err.stage,
step: err.step,
stack: err.stack,
cause: err.cause ? serializeInitializationError(err.cause, depth + 1) : undefined,
details: inspect(error, { depth: 6, breakLength: 120 })
};
}
if (typeof error === 'string') {
return {
message: error || fallback,
details: error || fallback
};
}
if (error && typeof error === 'object') {
const err = error as Record<string, any>;
return {
message: getObjectMessage(err, fallback),
code: typeof err.code === 'string' ? err.code : undefined,
stage: typeof err.stage === 'string' ? err.stage : undefined,
step: typeof err.step === 'string' ? err.step : undefined,
cause: err.cause ? serializeInitializationError(err.cause, depth + 1) : undefined,
details: inspect(error, { depth: 6, breakLength: 120 })
};
}
return {
message: fallback,
details: String(error)
};
};
export const createInitializationError = (
error: unknown,
{
stage,
step,
getErrText
}: {
stage?: string;
step?: string;
getErrText?: ErrTextGetter;
} = {}
) => {
const fallback = 'Unknown initialization error';
const errorText =
getErrText?.(error, fallback) || serializeInitializationError(error).message || fallback;
const labels = [stage, step].filter(Boolean).join(' / ');
const wrappedError = new Error(labels ? `[${labels}]: ${errorText}` : errorText, {
cause: error
});
wrappedError.name = 'SystemInitializationError';
return Object.assign(wrappedError, {
stage,
step
});
};
export const runInitializationStep = async <T>({
step,
action,
stage,
logger,
getErrText,
meta
}: {
step: string;
action: () => Promise<T> | T;
stage?: string;
logger?: InitializationLogger;
getErrText?: ErrTextGetter;
meta?: Record<string, unknown>;
}) => {
try {
return await action();
} catch (error) {
const logPayload = {
step,
stage,
...meta,
...getInitializationErrorLog(error)
};
console.error('System initialization step failed', logPayload);
logger?.error(`System initialization step failed: ${step}`, logPayload);
throw createInitializationError(error, {
stage,
step,
getErrText
});
}
};
export const runBackgroundInitializationStep = ({
step,
action,
stage,
logger,
getErrText,
meta
}: {
step: string;
action: () => Promise<unknown> | unknown;
stage?: string;
logger?: InitializationLogger;
getErrText?: ErrTextGetter;
meta?: Record<string, unknown>;
}) => {
try {
const task = action();
logger?.info?.('System background initialization step started', {
step,
stage,
...meta
});
void Promise.resolve(task).catch((error) => {
const logPayload = {
step,
stage,
...meta,
...getInitializationErrorLog(error)
};
console.error('System background initialization step failed', logPayload);
logger?.error(`System background initialization step failed: ${step}`, logPayload);
});
} catch (error) {
const logPayload = {
step,
stage,
...meta,
...getInitializationErrorLog(error)
};
console.error('System background initialization step failed', logPayload);
logger?.error(`System background initialization step failed: ${step}`, logPayload);
throw createInitializationError(error, {
stage,
step,
getErrText
});
}
};
export const getInitializationErrorLog = (error: unknown) => {
const serialized = serializeInitializationError(error);
return {
errorMessage: serialized.message,
errorName: serialized.name,
errorCode: serialized.code,
errorStage: serialized.stage,
errorStep: serialized.step,
errorStack: serialized.stack,
errorCause: serialized.cause,
errorDetails: serialized.details
};
};
@@ -55,6 +55,8 @@ SandboxInstanceSchema.index(
{
unique: true,
partialFilterExpression: {
// Keep the index compatible with Mongo-compatible backends that do not
// support `$ne: null` inside partial indexes.
appId: { $exists: true },
userId: { $exists: true },
chatId: { $exists: true }
+16 -10
View File
@@ -2,28 +2,34 @@
"name": "@fastgpt/service",
"version": "1.0.0",
"type": "module",
"scripts": {
"test": "vitest run -c vitest.config.ts",
"test:watch": "vitest -c vitest.config.ts",
"test:integration": "vitest run -c vitest.integration.config.ts",
"test:integration:watch": "vitest -c vitest.integration.config.ts"
},
"engines": {
"node": ">=20",
"pnpm": "9.x"
"pnpm": "10.x"
},
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^11.7.2",
"@fastgpt-sdk/otel": "catalog:",
"@fastgpt-sdk/otel": "workspace:*",
"@fastgpt-sdk/sandbox-adapter": "^0.0.36",
"@fastgpt-sdk/storage": "catalog:",
"@fastgpt-sdk/storage": "workspace:*",
"@fastgpt/global": "workspace:*",
"@mariozechner/pi-agent-core": "^0.67.3",
"@mariozechner/pi-ai": "^0.67.3",
"@maxmind/geoip2-node": "^6.3.4",
"@modelcontextprotocol/sdk": "catalog:",
"@node-rs/jieba": "2.0.1",
"@node-rs/jieba": "catalog:",
"@opentelemetry/api": "^1.9.0",
"@t3-oss/env-core": "0.13.10",
"@xmldom/xmldom": "^0.8.10",
"@zilliz/milvus2-sdk-node": "2.4.10",
"axios": "catalog:",
"bullmq": "^5.52.2",
"chalk": "^5.3.0",
"chalk": "catalog:",
"cheerio": "1.0.0-rc.12",
"cookie": "^0.7.1",
"date-fns": "catalog:",
@@ -42,14 +48,14 @@
"json5": "catalog:",
"jsonpath-plus": "^10.3.0",
"jsonrepair": "^3.0.0",
"jsonwebtoken": "^9.0.2",
"jsonwebtoken": "catalog:",
"jszip": "^3.10.1",
"lodash": "catalog:",
"mammoth": "^1.11.0",
"mime": "catalog:",
"mime-types": "catalog:",
"minio": "catalog:",
"mongoose": "^8.10.1",
"mongoose": "catalog:",
"multer": "2.1.0",
"mysql2": "^3.11.3",
"next": "catalog:",
@@ -64,7 +70,7 @@
"pino-opentelemetry-transport": "^1.0.1",
"proxy-agent": "catalog:",
"proxy-from-env": "^1.1.0",
"request-ip": "^3.3.0",
"request-ip": "catalog:",
"tiktoken": "1.0.17",
"tunnel": "^0.0.6",
"turndown": "^7.1.2",
@@ -76,7 +82,7 @@
"@types/async-retry": "^1.4.9",
"@types/cookie": "^0.5.2",
"@types/decompress": "^4.2.7",
"@types/jsonwebtoken": "^9.0.3",
"@types/jsonwebtoken": "catalog:",
"@types/lodash": "catalog:",
"@types/mime-types": "catalog:",
"@types/multer": "^1.4.10",
@@ -84,7 +90,7 @@
"@types/papaparse": "5.3.7",
"@types/pg": "^8.6.6",
"@types/proxy-from-env": "^1.0.4",
"@types/request-ip": "^0.0.37",
"@types/request-ip": "catalog:",
"@types/tunnel": "^0.0.4",
"@types/turndown": "^5.0.4"
}

Some files were not shown because too many files have changed in this diff Show More