diff --git a/.github/workflows/docs-preview.yml b/.github/workflows/docs-preview.yml index 3a11fbda5..900082e4a 100644 --- a/.github/workflows/docs-preview.yml +++ b/.github/workflows/docs-preview.yml @@ -10,6 +10,12 @@ on: jobs: # This workflow contains jobs "deploy-production" deploy-preview: + permissions: + contents: read + packages: write + attestations: write + id-token: write + pull-requests: write # The environment this job references environment: name: Preview @@ -32,6 +38,7 @@ jobs: repository: ${{ github.event.pull_request.head.repo.full_name }} submodules: recursive # Fetch submodules fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod + token: ${{ secrets.GITHUB_TOKEN }} # Step 2 Detect changes to Docs Content - name: Detect changes in doc content @@ -58,36 +65,38 @@ jobs: - name: Build run: cd docSite && hugo mod get -u github.com/colinwilson/lotusdocs@6d0568e && hugo -v --minify - # Step 5 - Push our generated site to Vercel - - name: Deploy to Vercel - uses: amondnet/vercel-action@v25 - id: vercel-action + - name: Test + run: ls ./docSite/public + + # Step 5 - Push our generated site to Cloudflare + - name: Deploy to Cloudflare Pages + id: deploy + uses: cloudflare/wrangler-action@v3 with: - vercel-token: ${{ secrets.VERCEL_TOKEN }} # Required - vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} #Required - vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }} #Required - github-comment: false - vercel-args: '--local-config ../vercel.json' # Optional - working-directory: docSite/public - alias-domains: | #Optional - fastgpt-staging.vercel.app - docsOutput: - permissions: - pull-requests: write - needs: [deploy-preview] - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + command: pages deploy ./docSite/public --project-name=fastgpt-doc + packageManager: npm + + - name: Create deployment status comment + if: always() + env: + JOB_STATUS: ${{ job.status }} + PREVIEW_URL: ${{ steps.deploy.outputs.deployment-url }} + uses: actions/github-script@v6 with: - ref: ${{ github.event.pull_request.head.ref }} - repository: ${{ github.event.pull_request.head.repo.full_name }} - - uses: actions/github-script@v7 - with: - github-token: ${{secrets.GITHUB_TOKEN}} + token: ${{ secrets.GITHUB_TOKEN }} script: | - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: '[👀 Visit preview document](${{ needs.deploy-preview.outputs.url }})' - }) + const success = process.env.JOB_STATUS === 'success'; + const deploymentUrl = `${process.env.PREVIEW_URL}`; + const status = success ? '✅ Success' : '❌ Failed'; + console.log(process.env.JOB_STATUS); + + const commentBody = `**Deployment Status: ${status}** + ${success ? `🔗 Preview URL: ${deploymentUrl}` : ''}`; + + await github.rest.issues.createComment({ + ...context.repo, + issue_number: context.payload.pull_request.number, + body: commentBody + }); diff --git a/.github/workflows/fastgpt-preview-image.yml b/.github/workflows/fastgpt-preview-image.yml index e831fdedc..351a79ece 100644 --- a/.github/workflows/fastgpt-preview-image.yml +++ b/.github/workflows/fastgpt-preview-image.yml @@ -18,7 +18,9 @@ jobs: uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.ref }} + repository: ${{ github.event.pull_request.head.repo.full_name }} fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod + token: ${{ secrets.GITHUB_TOKEN }} - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 diff --git a/deploy/docker/docker-compose-milvus.yml b/deploy/docker/docker-compose-milvus.yml index f875d61ec..e54f43dc8 100644 --- a/deploy/docker/docker-compose-milvus.yml +++ b/deploy/docker/docker-compose-milvus.yml @@ -110,6 +110,18 @@ services: # 等待docker-entrypoint.sh脚本执行的MongoDB服务进程 wait $$! + redis: + image: redis:7.2-alpine + container_name: redis + # ports: + # - 6379:6379 + networks: + - fastgpt + restart: always + command: | + redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction + volumes: + - ./redis/data:/data # fastgpt sandbox: @@ -157,6 +169,8 @@ services: # zilliz 连接参数 - MILVUS_ADDRESS=http://milvusStandalone:19530 - MILVUS_TOKEN=none + # Redis 地址 + - REDIS_URL=redis://default:mypassword@redis:6379 # sandbox 地址 - SANDBOX_URL=http://sandbox:3000 # 日志等级: debug, info, warn, error diff --git a/deploy/docker/docker-compose-pgvector.yml b/deploy/docker/docker-compose-pgvector.yml index 0a21d9c29..c3f2db44a 100644 --- a/deploy/docker/docker-compose-pgvector.yml +++ b/deploy/docker/docker-compose-pgvector.yml @@ -69,6 +69,19 @@ services: # 等待docker-entrypoint.sh脚本执行的MongoDB服务进程 wait $$! + redis: + image: redis:7.2-alpine + container_name: redis + # ports: + # - 6379:6379 + networks: + - fastgpt + restart: always + command: | + redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction + volumes: + - ./redis/data:/data + # fastgpt sandbox: container_name: sandbox @@ -114,6 +127,8 @@ services: - MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin # pg 连接参数 - PG_URL=postgresql://username:password@pg:5432/postgres + # Redis 连接参数 + - REDIS_URL=redis://default:mypassword@redis:6379 # sandbox 地址 - SANDBOX_URL=http://sandbox:3000 # 日志等级: debug, info, warn, error diff --git a/deploy/docker/docker-compose-zilliz.yml b/deploy/docker/docker-compose-zilliz.yml index 8e2c0bb7a..47e54a104 100644 --- a/deploy/docker/docker-compose-zilliz.yml +++ b/deploy/docker/docker-compose-zilliz.yml @@ -51,6 +51,19 @@ services: # 等待docker-entrypoint.sh脚本执行的MongoDB服务进程 wait $$! + redis: + image: redis:7.2-alpine + container_name: redis + # ports: + # - 6379:6379 + networks: + - fastgpt + restart: always + command: | + redis-server --requirepass mypassword --loglevel warning --maxclients 10000 --appendonly yes --save 60 10 --maxmemory 4gb --maxmemory-policy noeviction + volumes: + - ./redis/data:/data + sandbox: container_name: sandbox image: ghcr.io/labring/fastgpt-sandbox:v4.9.3 # git @@ -92,6 +105,8 @@ services: - FILE_TOKEN_KEY=filetoken # MongoDB 连接参数. 用户名myusername,密码mypassword。 - MONGODB_URI=mongodb://myusername:mypassword@mongo:27017/fastgpt?authSource=admin + # Redis 连接参数 + - REDIS_URI=redis://default:mypassword@redis:6379 # zilliz 连接参数 - MILVUS_ADDRESS=zilliz_cloud_address - MILVUS_TOKEN=zilliz_cloud_token diff --git a/docSite/assets/imgs/sealos-redis1.png b/docSite/assets/imgs/sealos-redis1.png new file mode 100644 index 000000000..240f30786 Binary files /dev/null and b/docSite/assets/imgs/sealos-redis1.png differ diff --git a/docSite/assets/imgs/sealos-redis2.png b/docSite/assets/imgs/sealos-redis2.png new file mode 100644 index 000000000..990ef3243 Binary files /dev/null and b/docSite/assets/imgs/sealos-redis2.png differ diff --git a/docSite/assets/imgs/sealos-redis3.png b/docSite/assets/imgs/sealos-redis3.png new file mode 100644 index 000000000..c0ff867fc Binary files /dev/null and b/docSite/assets/imgs/sealos-redis3.png differ diff --git a/docSite/content/zh-cn/docs/development/openapi/chat.md b/docSite/content/zh-cn/docs/development/openapi/chat.md index 7a54c312d..731cb5978 100644 --- a/docSite/content/zh-cn/docs/development/openapi/chat.md +++ b/docSite/content/zh-cn/docs/development/openapi/chat.md @@ -18,12 +18,14 @@ weight: 852 {{% alert icon="🤖 " context="success" %}} * 该接口的 API Key 需使用`应用特定的 key`,否则会报错。 + + * 有些包调用时,`BaseUrl`需要添加`v1`路径,有些不需要,如果出现404情况,可补充`v1`重试。 {{% /alert %}} ## 请求简易应用和工作流 -对话接口兼容`GPT`的接口!如果你的项目使用的是标准的`GPT`官方接口,可以直接通过修改`BaseUrl`和 `Authorization`来访问 FastGpt 应用,不过需要注意下面几个规则: +`v1`对话接口兼容`GPT`的接口!如果你的项目使用的是标准的`GPT`官方接口,可以直接通过修改`BaseUrl`和 `Authorization`来访问 FastGpt 应用,不过需要注意下面几个规则: {{% alert icon="🤖 " context="success" %}} * 传入的`model`,`temperature`等参数字段均无效,这些字段由编排决定,不会根据 API 参数改变。 @@ -32,6 +34,100 @@ weight: 852 ### 请求 + + {{< tabs tabTotal="5" >}} {{< tab tabName="detail=false,stream=false 响应" >}} {{< markdownify >}} @@ -475,6 +745,8 @@ curl --location --request POST 'https://api.fastgpt.in/api/v1/chat/completions' ### 请求示例 +#### v1 + ```bash curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \ --header 'Authorization: Bearer test-xxxxx' \ @@ -488,8 +760,25 @@ curl --location --request POST 'http://localhost:3000/api/v1/chat/completions' \ }' ``` +#### v2 + +```bash +curl --location --request POST 'http://localhost:3000/api/v2/chat/completions' \ +--header 'Authorization: Bearer test-xxxxx' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "stream": false, + "chatId": "test", + "variables": { + "query":"你好" + } +}' +``` + ### 响应示例 +#### v1 + {{< tabs tabTotal="3" >}} {{< tab tabName="detail=true,stream=false 响应" >}} @@ -649,6 +938,149 @@ event取值: {{< /tabs >}} +#### v2 + +{{< tabs tabTotal="3" >}} + +{{< tab tabName="detail=true,stream=false 响应" >}} +{{< markdownify >}} + +* 插件的输出可以通过查找`responseData`中, `moduleType=pluginOutput`的元素,其`pluginOutput`是插件的输出。 +* 流输出,仍可以通过`choices`进行获取。 + +```json +{ + "responseData": [ + { + "id": "bsH1ZdbYkz9iJwYa", + "nodeId": "pluginInput", + "moduleName": "workflow:template.plugin_start", + "moduleType": "pluginInput", + "runningTime": 0 + }, + { + "id": "zDgfqSPhbYZFHVIn", + "nodeId": "h4Gr4lJtFVQ6qI4c", + "moduleName": "AI 对话", + "moduleType": "chatNode", + "runningTime": 1.44, + "totalPoints": 0, + "model": "GPT-4o-mini", + "tokens": 34, + "inputTokens": 8, + "outputTokens": 26, + "query": "你好", + "reasoningText": "", + "historyPreview": [ + { + "obj": "Human", + "value": "你好" + }, + { + "obj": "AI", + "value": "你好!有什么我可以帮助你的吗?" + } + ], + "contextTotalLen": 2 + }, + { + "id": "uLLwKKRZvufXzgF4", + "nodeId": "pluginOutput", + "moduleName": "common:core.module.template.self_output", + "moduleType": "pluginOutput", + "runningTime": 0, + "totalPoints": 0, + "pluginOutput": { + "result": "你好!有什么我可以帮助你的吗?" + } + } + ], + "newVariables": { + + }, + "id": "test", + "model": "", + "usage": { + "prompt_tokens": 1, + "completion_tokens": 1, + "total_tokens": 1 + }, + "choices": [ + { + "message": { + "role": "assistant", + "content": "你好!有什么我可以帮助你的吗?" + }, + "finish_reason": "stop", + "index": 0 + } + ] +} +``` + +{{< /markdownify >}} +{{< /tab >}} + + +{{< tab tabName="detail=true,stream=true 响应" >}} +{{< markdownify >}} + +* 插件的输出可以通过获取`event=flowResponses`中的字符串,并将其反序列化后得到一个数组。同样的,查找 `moduleType=pluginOutput`的元素,其`pluginOutput`是插件的输出。 +* 流输出,仍和对话接口一样获取。 + +```bash +data: {"event":"flowNodeResponse","data":"{\"id\":\"q8ablUOqHGgqLIRM\",\"nodeId\":\"pluginInput\",\"moduleName\":\"workflow:template.plugin_start\",\"moduleType\":\"pluginInput\",\"runningTime\":0}"} + +data: {"event":"flowNodeStatus","data":"{\"status\":\"running\",\"name\":\"AI 对话\"}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"你好\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"!\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"有什么\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"我\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"可以\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"帮助\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"你\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"的吗\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":\"?\"},\"index\":0,\"finish_reason\":null}]}"} + +data: {"event":"flowNodeResponse","data":"{\"id\":\"rqlXLUap8QeiN7Kf\",\"nodeId\":\"h4Gr4lJtFVQ6qI4c\",\"moduleName\":\"AI 对话\",\"moduleType\":\"chatNode\",\"runningTime\":1.79,\"totalPoints\":0,\"model\":\"GPT-4o-mini\",\"tokens\":137,\"inputTokens\":111,\"outputTokens\":26,\"query\":\"你好\",\"reasoningText\":\"\",\"historyPreview\":[{\"obj\":\"Human\",\"value\":\"[{\\\"renderTypeList\\\":[\\\"reference\\\"],\\\"selectedTypeInde\\n\\n...[hide 174 chars]...\\n\\ncanSelectImg\\\":true,\\\"required\\\":false,\\\"value\\\":\\\"你好\\\"}]\"},{\"obj\":\"AI\",\"value\":\"你好!有什么我可以帮助你的吗?\"},{\"obj\":\"Human\",\"value\":\"你好\"},{\"obj\":\"AI\",\"value\":\"你好!有什么我可以帮助你的吗?\"}],\"contextTotalLen\":4}"} + +data: {"event":"flowNodeResponse","data":"{\"id\":\"lHCpHI0MrM00HQlX\",\"nodeId\":\"pluginOutput\",\"moduleName\":\"common:core.module.template.self_output\",\"moduleType\":\"pluginOutput\",\"runningTime\":0,\"totalPoints\":0,\"pluginOutput\":{\"result\":\"你好!有什么我可以帮助你的吗?\"}}"} + +data: {"event":"answer","data":"{\"id\":\"\",\"object\":\"\",\"created\":0,\"model\":\"\",\"choices\":[{\"delta\":{\"role\":\"assistant\",\"content\":null},\"index\":0,\"finish_reason\":\"stop\"}]}"} + +data: {"event":"answer","data":"[DONE]"} +``` + +{{< /markdownify >}} +{{< /tab >}} + +{{< tab tabName="输出获取" >}} +{{< markdownify >}} + +event取值: + +- answer: 返回给客户端的文本(最终会算作回答) +- fastAnswer: 指定回复返回给客户端的文本(最终会算作回答) +- toolCall: 执行工具 +- toolParams: 工具参数 +- toolResponse: 工具返回 +- flowNodeStatus: 运行到的节点状态 +- flowNodeResponse: 单个节点详细响应 +- updateVariables: 更新变量 +- error: 报错 + +{{< /markdownify >}} +{{< /tab >}} +{{< /tabs >}} # 对话 CRUD diff --git a/docSite/content/zh-cn/docs/development/upgrading/494.md b/docSite/content/zh-cn/docs/development/upgrading/494.md index 18f6accac..bf996ad25 100644 --- a/docSite/content/zh-cn/docs/development/upgrading/494.md +++ b/docSite/content/zh-cn/docs/development/upgrading/494.md @@ -7,10 +7,53 @@ toc: true weight: 796 --- +## 升级指南 + +### 1. 做好数据备份 + +### 1. 安装 Redis + +* docker 部署的用户,参考最新的 `docker-compose.yml` 文件增加 Redis 配置。增加一个 redis 容器,并配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。 +* Sealos 部署的用户,在数据库里新建一个`redis`数据库,并复制`内网地址的 connection` 作为 `redis` 的链接串。然后配置`fastgpt`,`fastgpt-pro`的环境变量,增加 `REDIS_URL` 环境变量。 + +| | | | +| --- | --- | --- | +| ![](/imgs/sealos-redis1.png) | ![](/imgs/sealos-redis2.png) | ![](/imgs/sealos-redis3.png) | + +### 2. 更新镜像 tag + + +### 3. 执行升级脚本 + +该脚本仅需商业版用户执行。 + +从任意终端,发起 1 个 HTTP 请求。其中 {{rootkey}} 替换成环境变量里的 `rootkey`;{{host}} 替换成**FastGPT 域名**。 + +```bash +curl --location --request POST 'https://{{host}}/api/admin/initv494' \ +--header 'rootkey: {{rootkey}}' \ +--header 'Content-Type: application/json' +``` + +**脚本功能** + +1. 更新站点同步定时器 ## 🚀 新增内容 -1. SMTP 发送邮件插件 +1. 集合数据训练状态展示 +2. SMTP 发送邮件插件 +3. BullMQ 消息队列。 +4. 利用 redis 进行部分数据缓存。 +5. 站点同步支持配置训练参数。 +6. AI 对话/工具调用,增加返回模型 finish_reason 字段。 + +## ⚙️ 优化 + +1. Admin 模板渲染调整。 +2. 支持环境变量配置对话文件过期时间。 +3. MongoDB log 库可独立部署。 ## 🐛 修复 +1. 搜索应用/知识库时,无法点击目录进入下一层。 \ No newline at end of file diff --git a/docSite/content/zh-cn/docs/intro.md b/docSite/content/zh-cn/docs/intro.md index 90a808125..48dec95b2 100644 --- a/docSite/content/zh-cn/docs/intro.md +++ b/docSite/content/zh-cn/docs/intro.md @@ -7,7 +7,7 @@ toc: true weight: -10 --- -FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,提供开箱即用的数据处理、模型调用等能力。同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的问答场景! +FastGPT 是一个AI Agent 构建平台,提供开箱即用的数据处理、模型调用等能力,同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的应用场景!t {{% alert icon="🤖 " context="success" %}} FastGPT 在线使用:[https://tryfastgpt.ai](https://tryfastgpt.ai) diff --git a/env.d.ts b/env.d.ts new file mode 100644 index 000000000..6492948b4 --- /dev/null +++ b/env.d.ts @@ -0,0 +1,40 @@ +declare global { + namespace NodeJS { + interface ProcessEnv { + LOG_DEPTH: string; + DEFAULT_ROOT_PSW: string; + DB_MAX_LINK: string; + TOKEN_KEY: string; + FILE_TOKEN_KEY: string; + ROOT_KEY: string; + OPENAI_BASE_URL: string; + CHAT_API_KEY: string; + AIPROXY_API_ENDPOINT: string; + AIPROXY_API_TOKEN: string; + MULTIPLE_DATA_TO_BASE64: string; + MONGODB_URI: string; + MONGODB_LOG_URI?: string; + PG_URL: string; + OCEANBASE_URL: string; + MILVUS_ADDRESS: string; + MILVUS_TOKEN: string; + SANDBOX_URL: string; + PRO_URL: string; + FE_DOMAIN: string; + FILE_DOMAIN: string; + NEXT_PUBLIC_BASE_URL: string; + LOG_LEVEL: string; + STORE_LOG_LEVEL: string; + USE_IP_LIMIT: string; + WORKFLOW_MAX_RUN_TIMES: string; + WORKFLOW_MAX_LOOP_TIMES: string; + CHECK_INTERNAL_IP: string; + CHAT_LOG_URL: string; + CHAT_LOG_INTERVAL: string; + CHAT_LOG_SOURCE_ID_PREFIX: string; + ALLOWED_ORIGINS: string; + } + } +} + +export {}; diff --git a/packages/global/common/system/types/index.d.ts b/packages/global/common/system/types/index.d.ts index eb989da5d..6349226ea 100644 --- a/packages/global/common/system/types/index.d.ts +++ b/packages/global/common/system/types/index.d.ts @@ -118,11 +118,12 @@ export type SystemEnvType = { oneapiUrl?: string; chatApiKey?: string; - customPdfParse?: { - url?: string; - key?: string; - - doc2xKey?: string; - price?: number; // n points/1 page - }; + customPdfParse?: customPdfParseType; +}; + +export type customPdfParseType = { + url?: string; + key?: string; + doc2xKey?: string; + price?: number; }; diff --git a/packages/global/core/ai/constants.ts b/packages/global/core/ai/constants.ts index 45e7155f1..538fc1098 100644 --- a/packages/global/core/ai/constants.ts +++ b/packages/global/core/ai/constants.ts @@ -1,3 +1,5 @@ +import { i18nT } from '../../../web/i18n/utils'; + export enum ChatCompletionRequestMessageRoleEnum { 'System' = 'system', 'User' = 'user', @@ -28,3 +30,13 @@ export enum EmbeddingTypeEnm { query = 'query', db = 'db' } + +export const completionFinishReasonMap = { + close: i18nT('chat:completion_finish_close'), + stop: i18nT('chat:completion_finish_stop'), + length: i18nT('chat:completion_finish_length'), + tool_calls: i18nT('chat:completion_finish_tool_calls'), + content_filter: i18nT('chat:completion_finish_content_filter'), + function_call: i18nT('chat:completion_finish_function_call'), + null: i18nT('chat:completion_finish_null') +}; diff --git a/packages/global/core/ai/type.d.ts b/packages/global/core/ai/type.d.ts index c87cd931c..c7d1fa740 100644 --- a/packages/global/core/ai/type.d.ts +++ b/packages/global/core/ai/type.d.ts @@ -73,6 +73,15 @@ export type ChatCompletionMessageFunctionCall = export type StreamChatType = Stream; export type UnStreamChatType = openai.Chat.Completions.ChatCompletion; +export type CompletionFinishReason = + | 'close' + | 'stop' + | 'length' + | 'tool_calls' + | 'content_filter' + | 'function_call' + | null; + export default openai; export * from 'openai'; diff --git a/packages/global/core/dataset/api.d.ts b/packages/global/core/dataset/api.d.ts index 40e696b2d..abb5db927 100644 --- a/packages/global/core/dataset/api.d.ts +++ b/packages/global/core/dataset/api.d.ts @@ -15,7 +15,6 @@ export type DatasetUpdateBody = { name?: string; avatar?: string; intro?: string; - status?: DatasetSchemaType['status']; agentModel?: string; vlmModel?: string; @@ -26,6 +25,7 @@ export type DatasetUpdateBody = { apiServer?: DatasetSchemaType['apiServer']; yuqueServer?: DatasetSchemaType['yuqueServer']; feishuServer?: DatasetSchemaType['feishuServer']; + chunkSettings?: DatasetSchemaType['chunkSettings']; // sync schedule autoSync?: boolean; @@ -141,7 +141,6 @@ export type PushDatasetDataChunkProps = { export type PostWebsiteSyncParams = { datasetId: string; - billId: string; }; export type PushDatasetDataProps = { diff --git a/packages/global/core/dataset/constants.ts b/packages/global/core/dataset/constants.ts index 627129835..4ec67db14 100644 --- a/packages/global/core/dataset/constants.ts +++ b/packages/global/core/dataset/constants.ts @@ -50,7 +50,9 @@ export const DatasetTypeMap = { export enum DatasetStatusEnum { active = 'active', - syncing = 'syncing' + syncing = 'syncing', + waiting = 'waiting', + error = 'error' } export const DatasetStatusMap = { [DatasetStatusEnum.active]: { @@ -58,6 +60,12 @@ export const DatasetStatusMap = { }, [DatasetStatusEnum.syncing]: { label: i18nT('common:core.dataset.status.syncing') + }, + [DatasetStatusEnum.waiting]: { + label: i18nT('common:core.dataset.status.waiting') + }, + [DatasetStatusEnum.error]: { + label: i18nT('dataset:status_error') } }; diff --git a/packages/global/core/dataset/type.d.ts b/packages/global/core/dataset/type.d.ts index a92785b94..0f3e01b8b 100644 --- a/packages/global/core/dataset/type.d.ts +++ b/packages/global/core/dataset/type.d.ts @@ -17,6 +17,20 @@ import { SourceMemberType } from 'support/user/type'; import { DatasetDataIndexTypeEnum } from './data/constants'; import { ChunkSettingModeEnum } from './constants'; +export type ChunkSettingsType = { + trainingType: DatasetCollectionDataProcessModeEnum; + autoIndexes?: boolean; + imageIndex?: boolean; + + chunkSettingMode?: ChunkSettingModeEnum; + chunkSplitMode?: DataChunkSplitModeEnum; + + chunkSize?: number; + indexSize?: number; + chunkSplitter?: string; + qaPrompt?: string; +}; + export type DatasetSchemaType = { _id: string; parentId?: string; @@ -29,7 +43,6 @@ export type DatasetSchemaType = { name: string; intro: string; type: `${DatasetTypeEnum}`; - status: `${DatasetStatusEnum}`; vectorModel: string; agentModel: string; @@ -39,14 +52,16 @@ export type DatasetSchemaType = { url: string; selector: string; }; + + chunkSettings?: ChunkSettingsType; + inheritPermission: boolean; apiServer?: APIFileServer; feishuServer?: FeishuServer; yuqueServer?: YuqueServer; - autoSync?: boolean; - // abandon + autoSync?: boolean; externalReadUrl?: string; defaultPermission?: number; }; @@ -163,6 +178,7 @@ export type DatasetTrainingSchemaType = { weight: number; indexes: Omit[]; retryCount: number; + errorMsg?: string; }; export type CollectionWithDatasetType = DatasetCollectionSchemaType & { @@ -192,6 +208,8 @@ export type DatasetListItemType = { }; export type DatasetItemType = Omit & { + status: `${DatasetStatusEnum}`; + errorMsg?: string; vectorModel: EmbeddingModelItemType; agentModel: LLMModelItemType; vlmModel?: LLMModelItemType; @@ -216,6 +234,7 @@ export type DatasetCollectionItemType = CollectionWithDatasetType & { file?: DatasetFileSchema; permission: DatasetPermission; indexAmount: number; + errorCount?: number; }; /* ================= data ===================== */ diff --git a/packages/global/core/workflow/runtime/constants.ts b/packages/global/core/workflow/runtime/constants.ts index 10737d282..0293d0b0c 100644 --- a/packages/global/core/workflow/runtime/constants.ts +++ b/packages/global/core/workflow/runtime/constants.ts @@ -5,6 +5,7 @@ export enum SseResponseEventEnum { answer = 'answer', // animation stream fastAnswer = 'fastAnswer', // direct answer text, not animation flowNodeStatus = 'flowNodeStatus', // update node status + flowNodeResponse = 'flowNodeResponse', // node response toolCall = 'toolCall', // tool start toolParams = 'toolParams', // tool params return diff --git a/packages/global/core/workflow/runtime/type.d.ts b/packages/global/core/workflow/runtime/type.d.ts index 2ab34bfbe..80a4c0897 100644 --- a/packages/global/core/workflow/runtime/type.d.ts +++ b/packages/global/core/workflow/runtime/type.d.ts @@ -22,6 +22,7 @@ import { UserSelectOptionType } from '../template/system/userSelect/type'; import { WorkflowResponseType } from '../../../../service/core/workflow/dispatch/type'; import { AiChatQuoteRoleType } from '../template/system/aiChat/type'; import { LafAccountType, OpenaiAccountType } from '../../../support/user/team/type'; +import { CompletionFinishReason } from '../../ai/type'; export type ExternalProviderType = { openaiAccount?: OpenaiAccountType; @@ -59,6 +60,7 @@ export type ChatDispatchProps = { isToolCall?: boolean; workflowStreamResponse?: WorkflowResponseType; workflowDispatchDeep?: number; + version?: 'v1' | 'v2'; }; export type ModuleDispatchProps = ChatDispatchProps & { @@ -129,6 +131,7 @@ export type DispatchNodeResponseType = { obj: `${ChatRoleEnum}`; value: string; }[]; // completion context array. history will slice + finishReason?: CompletionFinishReason; // dataset search similarity?: number; diff --git a/packages/global/package.json b/packages/global/package.json index c71c9d445..69c51688e 100644 --- a/packages/global/package.json +++ b/packages/global/package.json @@ -10,7 +10,7 @@ "js-yaml": "^4.1.0", "jschardet": "3.1.1", "nanoid": "^5.1.3", - "next": "14.2.25", + "next": "14.2.26", "openai": "4.61.0", "openapi-types": "^12.1.3", "json5": "^2.2.3", diff --git a/packages/service/common/buffer/rawText/schema.ts b/packages/service/common/buffer/rawText/schema.ts index 57b9a8309..98c9fa745 100644 --- a/packages/service/common/buffer/rawText/schema.ts +++ b/packages/service/common/buffer/rawText/schema.ts @@ -1,5 +1,4 @@ -import { connectionMongo, getMongoModel } from '../../mongo'; -const { Schema } = connectionMongo; +import { getMongoModel, Schema } from '../../mongo'; import { RawTextBufferSchemaType } from './type'; export const collectionName = 'buffer_rawtexts'; diff --git a/packages/service/common/buffer/tts/schema.ts b/packages/service/common/buffer/tts/schema.ts index ab22f05eb..fd21f52c2 100644 --- a/packages/service/common/buffer/tts/schema.ts +++ b/packages/service/common/buffer/tts/schema.ts @@ -1,5 +1,4 @@ -import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo'; -const { Schema, model, models } = connectionMongo; +import { Schema, getMongoModel } from '../../../common/mongo'; import { TTSBufferSchemaType } from './type.d'; export const collectionName = 'buffer_tts'; diff --git a/packages/service/common/bullmq/index.ts b/packages/service/common/bullmq/index.ts new file mode 100644 index 000000000..e64bd2020 --- /dev/null +++ b/packages/service/common/bullmq/index.ts @@ -0,0 +1,79 @@ +import { ConnectionOptions, Processor, Queue, QueueOptions, Worker, WorkerOptions } from 'bullmq'; +import { addLog } from '../system/log'; +import { newQueueRedisConnection, newWorkerRedisConnection } from '../redis'; + +const defaultWorkerOpts: Omit = { + removeOnComplete: { + count: 0 // Delete jobs immediately on completion + }, + removeOnFail: { + count: 0 // Delete jobs immediately on failure + } +}; + +export enum QueueNames { + websiteSync = 'websiteSync' +} + +export const queues = (() => { + if (!global.queues) { + global.queues = new Map(); + } + return global.queues; +})(); +export const workers = (() => { + if (!global.workers) { + global.workers = new Map(); + } + return global.workers; +})(); + +export function getQueue( + name: QueueNames, + opts?: Omit +): Queue { + // check if global.queues has the queue + const queue = queues.get(name); + if (queue) { + return queue as Queue; + } + const newQueue = new Queue(name.toString(), { + connection: newQueueRedisConnection(), + ...opts + }); + + // default error handler, to avoid unhandled exceptions + newQueue.on('error', (error) => { + addLog.error(`MQ Queue [${name}]: ${error.message}`, error); + }); + queues.set(name, newQueue); + return newQueue; +} + +export function getWorker( + name: QueueNames, + processor: Processor, + opts?: Omit +): Worker { + const worker = workers.get(name); + if (worker) { + return worker as Worker; + } + + const newWorker = new Worker(name.toString(), processor, { + connection: newWorkerRedisConnection(), + ...defaultWorkerOpts, + ...opts + }); + // default error handler, to avoid unhandled exceptions + newWorker.on('error', (error) => { + addLog.error(`MQ Worker [${name}]: ${error.message}`, error); + }); + newWorker.on('failed', (jobId, error) => { + addLog.error(`MQ Worker [${name}]: ${error.message}`, error); + }); + workers.set(name, newWorker); + return newWorker; +} + +export * from 'bullmq'; diff --git a/packages/service/common/bullmq/type.d.ts b/packages/service/common/bullmq/type.d.ts new file mode 100644 index 000000000..723675b27 --- /dev/null +++ b/packages/service/common/bullmq/type.d.ts @@ -0,0 +1,7 @@ +import { Queue, Worker } from 'bullmq'; +import { QueueNames } from './index'; + +declare global { + var queues: Map | undefined; + var workers: Map | undefined; +} diff --git a/packages/service/common/file/gridfs/schema.ts b/packages/service/common/file/gridfs/schema.ts index 8d054f49d..d6670a8a9 100644 --- a/packages/service/common/file/gridfs/schema.ts +++ b/packages/service/common/file/gridfs/schema.ts @@ -1,5 +1,4 @@ -import { connectionMongo, getMongoModel, type Model } from '../../mongo'; -const { Schema } = connectionMongo; +import { Schema, getMongoModel } from '../../mongo'; const DatasetFileSchema = new Schema({}); const ChatFileSchema = new Schema({}); diff --git a/packages/service/common/file/image/schema.ts b/packages/service/common/file/image/schema.ts index 1418fa479..e36be3cbe 100644 --- a/packages/service/common/file/image/schema.ts +++ b/packages/service/common/file/image/schema.ts @@ -1,7 +1,6 @@ import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant'; -import { connectionMongo, getMongoModel } from '../../mongo'; +import { Schema, getMongoModel } from '../../mongo'; import { MongoImageSchemaType } from '@fastgpt/global/common/file/image/type.d'; -const { Schema } = connectionMongo; const ImageSchema = new Schema({ teamId: { diff --git a/packages/service/common/mongo/index.ts b/packages/service/common/mongo/index.ts index 02b4213e6..af431ad96 100644 --- a/packages/service/common/mongo/index.ts +++ b/packages/service/common/mongo/index.ts @@ -1,17 +1,26 @@ import { addLog } from '../../common/system/log'; -import mongoose, { Model } from 'mongoose'; +import mongoose, { Model, Mongoose } from 'mongoose'; export default mongoose; export * from 'mongoose'; +export const MONGO_URL = process.env.MONGODB_URI as string; +export const MONGO_LOG_URL = (process.env.MONGODB_LOG_URI ?? process.env.MONGODB_URI) as string; + export const connectionMongo = (() => { if (!global.mongodb) { - global.mongodb = mongoose; + global.mongodb = new Mongoose(); } - return global.mongodb; })(); +export const connectionLogMongo = (() => { + if (!global.mongodbLog) { + global.mongodbLog = new Mongoose(); + } + return global.mongodbLog; +})(); + const addCommonMiddleware = (schema: mongoose.Schema) => { const operations = [ /^find/, @@ -71,6 +80,19 @@ export const getMongoModel = (name: string, schema: mongoose.Schema) => { return model; }; +export const getMongoLogModel = (name: string, schema: mongoose.Schema) => { + if (connectionLogMongo.models[name]) return connectionLogMongo.models[name] as Model; + console.log('Load model======', name); + addCommonMiddleware(schema); + + const model = connectionLogMongo.model(name, schema); + + // Sync index + syncMongoIndex(model); + + return model; +}; + const syncMongoIndex = async (model: Model) => { if (process.env.SYNC_INDEX !== '0' && process.env.NODE_ENV !== 'test') { try { diff --git a/packages/service/common/mongo/init.ts b/packages/service/common/mongo/init.ts index 50cb8f463..1202d42a9 100644 --- a/packages/service/common/mongo/init.ts +++ b/packages/service/common/mongo/init.ts @@ -1,6 +1,5 @@ import { delay } from '@fastgpt/global/common/system/utils'; import { addLog } from '../system/log'; -import { connectionMongo } from './index'; import type { Mongoose } from 'mongoose'; const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20)); @@ -8,41 +7,41 @@ const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20)); /** * connect MongoDB and init data */ -export async function connectMongo(): Promise { +export async function connectMongo(db: Mongoose, url: string): Promise { /* Connecting, connected will return */ - if (connectionMongo.connection.readyState !== 0) { - return connectionMongo; + if (db.connection.readyState !== 0) { + return db; } - console.log('mongo start connect'); + console.log('MongoDB start connect'); try { // Remove existing listeners to prevent duplicates - connectionMongo.connection.removeAllListeners('error'); - connectionMongo.connection.removeAllListeners('disconnected'); - connectionMongo.set('strictQuery', 'throw'); + db.connection.removeAllListeners('error'); + db.connection.removeAllListeners('disconnected'); + db.set('strictQuery', 'throw'); - connectionMongo.connection.on('error', async (error) => { + db.connection.on('error', async (error) => { console.log('mongo error', error); try { - if (connectionMongo.connection.readyState !== 0) { - await connectionMongo.disconnect(); + if (db.connection.readyState !== 0) { + await db.disconnect(); await delay(1000); - await connectMongo(); + await connectMongo(db, url); } } catch (error) {} }); - connectionMongo.connection.on('disconnected', async () => { + db.connection.on('disconnected', async () => { console.log('mongo disconnected'); try { - if (connectionMongo.connection.readyState !== 0) { - await connectionMongo.disconnect(); + if (db.connection.readyState !== 0) { + await db.disconnect(); await delay(1000); - await connectMongo(); + await connectMongo(db, url); } } catch (error) {} }); - await connectionMongo.connect(process.env.MONGODB_URI as string, { + const options = { bufferCommands: true, maxConnecting: maxConnecting, maxPoolSize: maxConnecting, @@ -53,18 +52,18 @@ export async function connectMongo(): Promise { maxIdleTimeMS: 300000, retryWrites: true, retryReads: true + }; - // readPreference: 'secondaryPreferred', - // readConcern: { level: 'local' }, - // writeConcern: { w: 'majority', j: true } - }); + db.connect(url, options); console.log('mongo connected'); - return connectionMongo; + return db; } catch (error) { - addLog.error('mongo connect error', error); - await connectionMongo.disconnect(); + addLog.error('Mongo connect error', error); + + await db.disconnect(); + await delay(1000); - return connectMongo(); + return connectMongo(db, url); } } diff --git a/packages/service/common/mongo/type.d.ts b/packages/service/common/mongo/type.d.ts index e75ccfdbf..31704203d 100644 --- a/packages/service/common/mongo/type.d.ts +++ b/packages/service/common/mongo/type.d.ts @@ -3,4 +3,5 @@ import type { Logger } from 'winston'; declare global { var mongodb: Mongoose | undefined; + var mongodbLog: Mongoose | undefined; } diff --git a/packages/service/common/redis/cache.ts b/packages/service/common/redis/cache.ts new file mode 100644 index 000000000..d27f5b37c --- /dev/null +++ b/packages/service/common/redis/cache.ts @@ -0,0 +1,38 @@ +import { getGlobalRedisCacheConnection } from './index'; +import { addLog } from '../system/log'; +import { retryFn } from '@fastgpt/global/common/system/utils'; + +export enum CacheKeyEnum { + team_vector_count = 'team_vector_count' +} + +export const setRedisCache = async ( + key: string, + data: string | Buffer | number, + expireSeconds?: number +) => { + return await retryFn(async () => { + try { + const redis = getGlobalRedisCacheConnection(); + + if (expireSeconds) { + await redis.set(key, data, 'EX', expireSeconds); + } else { + await redis.set(key, data); + } + } catch (error) { + addLog.error('Set cache error:', error); + return Promise.reject(error); + } + }); +}; + +export const getRedisCache = async (key: string) => { + const redis = getGlobalRedisCacheConnection(); + return await retryFn(() => redis.get(key)); +}; + +export const delRedisCache = async (key: string) => { + const redis = getGlobalRedisCacheConnection(); + await retryFn(() => redis.del(key)); +}; diff --git a/packages/service/common/redis/index.ts b/packages/service/common/redis/index.ts new file mode 100644 index 000000000..1ec8a159c --- /dev/null +++ b/packages/service/common/redis/index.ts @@ -0,0 +1,43 @@ +import { addLog } from '../system/log'; +import Redis from 'ioredis'; + +const REDIS_URL = process.env.REDIS_URL ?? 'redis://localhost:6379'; + +export const newQueueRedisConnection = () => { + const redis = new Redis(REDIS_URL); + redis.on('connect', () => { + console.log('Redis connected'); + }); + redis.on('error', (error) => { + console.error('Redis connection error', error); + }); + return redis; +}; + +export const newWorkerRedisConnection = () => { + const redis = new Redis(REDIS_URL, { + maxRetriesPerRequest: null + }); + redis.on('connect', () => { + console.log('Redis connected'); + }); + redis.on('error', (error) => { + console.error('Redis connection error', error); + }); + return redis; +}; + +export const getGlobalRedisCacheConnection = () => { + if (global.redisCache) return global.redisCache; + + global.redisCache = new Redis(REDIS_URL, { keyPrefix: 'fastgpt:cache:' }); + + global.redisCache.on('connect', () => { + addLog.info('Redis connected'); + }); + global.redisCache.on('error', (error) => { + addLog.error('Redis connection error', error); + }); + + return global.redisCache; +}; diff --git a/packages/service/common/redis/type.d.ts b/packages/service/common/redis/type.d.ts new file mode 100644 index 000000000..caddd8c52 --- /dev/null +++ b/packages/service/common/redis/type.d.ts @@ -0,0 +1,5 @@ +import Redis from 'ioredis'; + +declare global { + var redisCache: Redis | null; +} diff --git a/packages/service/common/system/log/schema.ts b/packages/service/common/system/log/schema.ts index c14fabba1..e96d493e6 100644 --- a/packages/service/common/system/log/schema.ts +++ b/packages/service/common/system/log/schema.ts @@ -1,4 +1,4 @@ -import { getMongoModel, Schema } from '../../../common/mongo'; +import { getMongoLogModel as getMongoModel, Schema } from '../../../common/mongo'; import { SystemLogType } from './type'; import { LogLevelEnum } from './constant'; diff --git a/packages/service/common/system/timerLock/constants.ts b/packages/service/common/system/timerLock/constants.ts index 1e79cb446..538ac4a21 100644 --- a/packages/service/common/system/timerLock/constants.ts +++ b/packages/service/common/system/timerLock/constants.ts @@ -1,5 +1,5 @@ export enum TimerIdEnum { - checkInValidDatasetFiles = 'checkInValidDatasetFiles', + checkExpiredFiles = 'checkExpiredFiles', checkInvalidDatasetData = 'checkInvalidDatasetData', checkInvalidVector = 'checkInvalidVector', clearExpiredSubPlan = 'clearExpiredSubPlan', diff --git a/packages/service/common/vectorStore/controller.ts b/packages/service/common/vectorStore/controller.ts index fd3b1521c..4f488a721 100644 --- a/packages/service/common/vectorStore/controller.ts +++ b/packages/service/common/vectorStore/controller.ts @@ -2,10 +2,12 @@ import { PgVectorCtrl } from './pg/class'; import { ObVectorCtrl } from './oceanbase/class'; import { getVectorsByText } from '../../core/ai/embedding'; -import { InsertVectorProps } from './controller.d'; +import { DelDatasetVectorCtrlProps, InsertVectorProps } from './controller.d'; import { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d'; import { MILVUS_ADDRESS, PG_ADDRESS, OCEANBASE_ADDRESS } from './constants'; import { MilvusCtrl } from './milvus/class'; +import { setRedisCache, getRedisCache, delRedisCache, CacheKeyEnum } from '../redis/cache'; +import { throttle } from 'lodash'; const getVectorObj = () => { if (PG_ADDRESS) return new PgVectorCtrl(); @@ -15,13 +17,33 @@ const getVectorObj = () => { return new PgVectorCtrl(); }; +const getChcheKey = (teamId: string) => `${CacheKeyEnum.team_vector_count}:${teamId}`; +const onDelCache = throttle((teamId: string) => delRedisCache(getChcheKey(teamId)), 30000, { + leading: true, + trailing: true +}); + const Vector = getVectorObj(); export const initVectorStore = Vector.init; -export const deleteDatasetDataVector = Vector.delete; export const recallFromVectorStore = Vector.embRecall; export const getVectorDataByTime = Vector.getVectorDataByTime; -export const getVectorCountByTeamId = Vector.getVectorCountByTeamId; + +export const getVectorCountByTeamId = async (teamId: string) => { + const key = getChcheKey(teamId); + + const countStr = await getRedisCache(key); + if (countStr) { + return Number(countStr); + } + + const count = await Vector.getVectorCountByTeamId(teamId); + + await setRedisCache(key, count, 30 * 60); + + return count; +}; + export const getVectorCountByDatasetId = Vector.getVectorCountByDatasetId; export const getVectorCountByCollectionId = Vector.getVectorCountByCollectionId; @@ -43,8 +65,16 @@ export const insertDatasetDataVector = async ({ vector: vectors[0] }); + onDelCache(props.teamId); + return { tokens, insertId }; }; + +export const deleteDatasetDataVector = async (props: DelDatasetVectorCtrlProps) => { + const result = await Vector.delete(props); + onDelCache(props.teamId); + return result; +}; diff --git a/packages/service/core/ai/utils.ts b/packages/service/core/ai/utils.ts index 6b950b2e3..3cae8b086 100644 --- a/packages/service/core/ai/utils.ts +++ b/packages/service/core/ai/utils.ts @@ -2,6 +2,7 @@ import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d'; import { ChatCompletionCreateParamsNonStreaming, ChatCompletionCreateParamsStreaming, + CompletionFinishReason, StreamChatType } from '@fastgpt/global/core/ai/type'; import { getLLMModel } from './model'; @@ -142,26 +143,40 @@ export const parseReasoningStreamContent = () => { content?: string; reasoning_content?: string; }; + finish_reason?: CompletionFinishReason; }[]; }, parseThinkTag = false - ): [string, string] => { + ): { + reasoningContent: string; + content: string; + finishReason: CompletionFinishReason; + } => { const content = part.choices?.[0]?.delta?.content || ''; + const finishReason = part.choices?.[0]?.finish_reason || null; // @ts-ignore const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || ''; if (reasoningContent || !parseThinkTag) { isInThinkTag = false; - return [reasoningContent, content]; + return { reasoningContent, content, finishReason }; } if (!content) { - return ['', '']; + return { + reasoningContent: '', + content: '', + finishReason + }; } // 如果不在 think 标签中,或者有 reasoningContent(接口已解析),则返回 reasoningContent 和 content if (isInThinkTag === false) { - return ['', content]; + return { + reasoningContent: '', + content, + finishReason + }; } // 检测是否为 think 标签开头的数据 @@ -170,17 +185,29 @@ export const parseReasoningStreamContent = () => { startTagBuffer += content; // 太少内容时候,暂时不解析 if (startTagBuffer.length < startTag.length) { - return ['', '']; + return { + reasoningContent: '', + content: '', + finishReason + }; } if (startTagBuffer.startsWith(startTag)) { isInThinkTag = true; - return [startTagBuffer.slice(startTag.length), '']; + return { + reasoningContent: startTagBuffer.slice(startTag.length), + content: '', + finishReason + }; } // 如果未命中 think 标签,则认为不在 think 标签中,返回 buffer 内容作为 content isInThinkTag = false; - return ['', startTagBuffer]; + return { + reasoningContent: '', + content: startTagBuffer, + finishReason + }; } // 确认是 think 标签内容,开始返回 think 内容,并实时检测 @@ -201,19 +228,35 @@ export const parseReasoningStreamContent = () => { if (endTagBuffer.includes(endTag)) { isInThinkTag = false; const answer = endTagBuffer.slice(endTag.length); - return ['', answer]; + return { + reasoningContent: '', + content: answer, + finishReason + }; } else if (endTagBuffer.length >= endTag.length) { // 缓存内容超出尾标签长度,且仍未命中 ,则认为本次猜测 失败,仍处于 think 阶段。 const tmp = endTagBuffer; endTagBuffer = ''; - return [tmp, '']; + return { + reasoningContent: tmp, + content: '', + finishReason + }; } - return ['', '']; + return { + reasoningContent: '', + content: '', + finishReason + }; } else if (content.includes(endTag)) { // 返回内容,完整命中,直接结束 isInThinkTag = false; const [think, answer] = content.split(endTag); - return [think, answer]; + return { + reasoningContent: think, + content: answer, + finishReason + }; } else { // 无 buffer,且未命中 ,开始疑似 检测。 for (let i = 1; i < endTag.length; i++) { @@ -222,13 +265,21 @@ export const parseReasoningStreamContent = () => { if (content.endsWith(partialEndTag)) { const think = content.slice(0, -partialEndTag.length); endTagBuffer += partialEndTag; - return [think, '']; + return { + reasoningContent: think, + content: '', + finishReason + }; } } } // 完全未命中尾标签,还是 think 阶段。 - return [content, '']; + return { + reasoningContent: content, + content: '', + finishReason + }; }; const getStartTagBuffer = () => startTagBuffer; diff --git a/packages/service/core/dataset/collection/controller.ts b/packages/service/core/dataset/collection/controller.ts index 44e5d07da..14c1c0bcd 100644 --- a/packages/service/core/dataset/collection/controller.ts +++ b/packages/service/core/dataset/collection/controller.ts @@ -1,6 +1,7 @@ import { DatasetCollectionTypeEnum, - DatasetCollectionDataProcessModeEnum + DatasetCollectionDataProcessModeEnum, + DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants'; import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d'; import { MongoDatasetCollection } from './schema'; @@ -104,7 +105,8 @@ export const createCollectionAndInsertData = async ({ hashRawText: hashStr(rawText), rawTextLength: rawText.length, nextSyncTime: (() => { - if (!dataset.autoSync) return undefined; + // ignore auto collections sync for website datasets + if (!dataset.autoSync && dataset.type === DatasetTypeEnum.websiteDataset) return undefined; if ( [DatasetCollectionTypeEnum.link, DatasetCollectionTypeEnum.apiFile].includes( createCollectionParams.type diff --git a/packages/service/core/dataset/collection/schema.ts b/packages/service/core/dataset/collection/schema.ts index 9522c69f2..1b1ceb913 100644 --- a/packages/service/core/dataset/collection/schema.ts +++ b/packages/service/core/dataset/collection/schema.ts @@ -1,13 +1,8 @@ import { connectionMongo, getMongoModel } from '../../../common/mongo'; -const { Schema, model, models } = connectionMongo; +const { Schema } = connectionMongo; import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type.d'; -import { - DatasetCollectionTypeMap, - DatasetCollectionDataProcessModeEnum, - ChunkSettingModeEnum, - DataChunkSplitModeEnum -} from '@fastgpt/global/core/dataset/constants'; -import { DatasetCollectionName } from '../schema'; +import { DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constants'; +import { ChunkSettings, DatasetCollectionName } from '../schema'; import { TeamCollectionName, TeamMemberCollectionName @@ -90,25 +85,7 @@ const DatasetCollectionSchema = new Schema({ customPdfParse: Boolean, // Chunk settings - imageIndex: Boolean, - autoIndexes: Boolean, - trainingType: { - type: String, - enum: Object.values(DatasetCollectionDataProcessModeEnum) - }, - chunkSettingMode: { - type: String, - enum: Object.values(ChunkSettingModeEnum) - }, - chunkSplitMode: { - type: String, - enum: Object.values(DataChunkSplitModeEnum) - }, - chunkSize: Number, - chunkSplitter: String, - - indexSize: Number, - qaPrompt: String + ...ChunkSettings }); DatasetCollectionSchema.virtual('dataset', { diff --git a/packages/service/core/dataset/controller.ts b/packages/service/core/dataset/controller.ts index 06be050a9..6cda361ac 100644 --- a/packages/service/core/dataset/controller.ts +++ b/packages/service/core/dataset/controller.ts @@ -9,6 +9,8 @@ import { deleteDatasetDataVector } from '../../common/vectorStore/controller'; import { MongoDatasetDataText } from './data/dataTextSchema'; import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset'; import { retryFn } from '@fastgpt/global/common/system/utils'; +import { removeWebsiteSyncJobScheduler } from './websiteSync'; +import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants'; /* ============= dataset ========== */ /* find all datasetId by top datasetId */ diff --git a/packages/service/core/dataset/schema.ts b/packages/service/core/dataset/schema.ts index 22f79fd25..43bbe3a89 100644 --- a/packages/service/core/dataset/schema.ts +++ b/packages/service/core/dataset/schema.ts @@ -1,7 +1,8 @@ import { getMongoModel, Schema } from '../../common/mongo'; import { - DatasetStatusEnum, - DatasetStatusMap, + ChunkSettingModeEnum, + DataChunkSplitModeEnum, + DatasetCollectionDataProcessModeEnum, DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants'; @@ -13,6 +14,28 @@ import type { DatasetSchemaType } from '@fastgpt/global/core/dataset/type.d'; export const DatasetCollectionName = 'datasets'; +export const ChunkSettings = { + imageIndex: Boolean, + autoIndexes: Boolean, + trainingType: { + type: String, + enum: Object.values(DatasetCollectionDataProcessModeEnum) + }, + chunkSettingMode: { + type: String, + enum: Object.values(ChunkSettingModeEnum) + }, + chunkSplitMode: { + type: String, + enum: Object.values(DataChunkSplitModeEnum) + }, + chunkSize: Number, + chunkSplitter: String, + + indexSize: Number, + qaPrompt: String +}; + const DatasetSchema = new Schema({ parentId: { type: Schema.Types.ObjectId, @@ -40,11 +63,6 @@ const DatasetSchema = new Schema({ required: true, default: DatasetTypeEnum.dataset }, - status: { - type: String, - enum: Object.keys(DatasetStatusMap), - default: DatasetStatusEnum.active - }, avatar: { type: String, default: '/icon/logo.svg' @@ -84,6 +102,9 @@ const DatasetSchema = new Schema({ } } }, + chunkSettings: { + type: ChunkSettings + }, inheritPermission: { type: Boolean, default: true @@ -98,9 +119,8 @@ const DatasetSchema = new Schema({ type: Object }, - autoSync: Boolean, - // abandoned + autoSync: Boolean, externalReadUrl: { type: String }, diff --git a/packages/service/core/dataset/training/schema.ts b/packages/service/core/dataset/training/schema.ts index d11d2e109..ad4a26fa0 100644 --- a/packages/service/core/dataset/training/schema.ts +++ b/packages/service/core/dataset/training/schema.ts @@ -98,7 +98,9 @@ const TrainingDataSchema = new Schema({ } ], default: [] - } + }, + + errorMsg: String }); TrainingDataSchema.virtual('dataset', { diff --git a/packages/service/core/dataset/websiteSync/index.ts b/packages/service/core/dataset/websiteSync/index.ts new file mode 100644 index 000000000..df1364491 --- /dev/null +++ b/packages/service/core/dataset/websiteSync/index.ts @@ -0,0 +1,101 @@ +import { Processor } from 'bullmq'; +import { getQueue, getWorker, QueueNames } from '../../../common/bullmq'; +import { DatasetStatusEnum } from '@fastgpt/global/core/dataset/constants'; + +export type WebsiteSyncJobData = { + datasetId: string; +}; + +export const websiteSyncQueue = getQueue(QueueNames.websiteSync, { + defaultJobOptions: { + attempts: 3, // retry 3 times + backoff: { + type: 'exponential', + delay: 1000 // delay 1 second between retries + } + } +}); +export const getWebsiteSyncWorker = (processor: Processor) => { + return getWorker(QueueNames.websiteSync, processor, { + removeOnFail: { + age: 15 * 24 * 60 * 60, // Keep up to 15 days + count: 1000 // Keep up to 1000 jobs + }, + concurrency: 1 // Set worker to process only 1 job at a time + }); +}; + +export const addWebsiteSyncJob = (data: WebsiteSyncJobData) => { + const datasetId = String(data.datasetId); + // deduplication: make sure only 1 job + return websiteSyncQueue.add(datasetId, data, { deduplication: { id: datasetId } }); +}; + +export const getWebsiteSyncDatasetStatus = async (datasetId: string) => { + const jobId = await websiteSyncQueue.getDeduplicationJobId(datasetId); + if (!jobId) { + return { + status: DatasetStatusEnum.active, + errorMsg: undefined + }; + } + const job = await websiteSyncQueue.getJob(jobId); + if (!job) { + return { + status: DatasetStatusEnum.active, + errorMsg: undefined + }; + } + + const jobState = await job.getState(); + + if (jobState === 'failed' || jobState === 'unknown') { + return { + status: DatasetStatusEnum.error, + errorMsg: job.failedReason + }; + } + if (['waiting-children', 'waiting'].includes(jobState)) { + return { + status: DatasetStatusEnum.waiting, + errorMsg: undefined + }; + } + if (jobState === 'active') { + return { + status: DatasetStatusEnum.syncing, + errorMsg: undefined + }; + } + + return { + status: DatasetStatusEnum.active, + errorMsg: undefined + }; +}; + +// Scheduler setting +const repeatDuration = 24 * 60 * 60 * 1000; // every day +export const upsertWebsiteSyncJobScheduler = (data: WebsiteSyncJobData, startDate?: number) => { + const datasetId = String(data.datasetId); + + return websiteSyncQueue.upsertJobScheduler( + datasetId, + { + every: repeatDuration, + startDate: startDate || new Date().getTime() + repeatDuration // First run tomorrow + }, + { + name: datasetId, + data + } + ); +}; + +export const getWebsiteSyncJobScheduler = (datasetId: string) => { + return websiteSyncQueue.getJobScheduler(String(datasetId)); +}; + +export const removeWebsiteSyncJobScheduler = (datasetId: string) => { + return websiteSyncQueue.removeJobScheduler(String(datasetId)); +}; diff --git a/packages/service/core/workflow/dispatch/agent/runTool/index.ts b/packages/service/core/workflow/dispatch/agent/runTool/index.ts index ccf03ba6b..08c9ff85b 100644 --- a/packages/service/core/workflow/dispatch/agent/runTool/index.ts +++ b/packages/service/core/workflow/dispatch/agent/runTool/index.ts @@ -176,7 +176,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise< toolNodeOutputTokens, completeMessages = [], // The actual message sent to AI(just save text) assistantResponses = [], // FastGPT system store assistant.value response - runTimes + runTimes, + finish_reason } = await (async () => { const adaptMessages = chats2GPTMessages({ messages, @@ -276,7 +277,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise< useVision ), toolDetail: childToolResponse, - mergeSignId: nodeId + mergeSignId: nodeId, + finishReason: finish_reason }, [DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [ // 工具调用本身的积分消耗 diff --git a/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts b/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts index b14ecf1f5..808006a7f 100644 --- a/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts +++ b/packages/service/core/workflow/dispatch/agent/runTool/promptCall.ts @@ -1,6 +1,10 @@ import { createChatCompletion } from '../../../../ai/config'; import { filterGPTMessageByMaxContext, loadRequestMessages } from '../../../../chat/utils'; -import { StreamChatType, ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type'; +import { + StreamChatType, + ChatCompletionMessageParam, + CompletionFinishReason +} from '@fastgpt/global/core/ai/type'; import { NextApiResponse } from 'next'; import { responseWriteController } from '../../../../../common/response'; import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants'; @@ -252,9 +256,9 @@ export const runToolWithPromptCall = async ( } }); - const { answer, reasoning } = await (async () => { + const { answer, reasoning, finish_reason } = await (async () => { if (res && isStreamResponse) { - const { answer, reasoning } = await streamResponse({ + const { answer, reasoning, finish_reason } = await streamResponse({ res, toolNodes, stream: aiResponse, @@ -262,8 +266,9 @@ export const runToolWithPromptCall = async ( aiChatReasoning }); - return { answer, reasoning }; + return { answer, reasoning, finish_reason }; } else { + const finish_reason = aiResponse.choices?.[0]?.finish_reason as CompletionFinishReason; const content = aiResponse.choices?.[0]?.message?.content || ''; const reasoningContent: string = aiResponse.choices?.[0]?.message?.reasoning_content || ''; @@ -271,14 +276,16 @@ export const runToolWithPromptCall = async ( if (reasoningContent || !aiChatReasoning) { return { answer: content, - reasoning: reasoningContent + reasoning: reasoningContent, + finish_reason }; } const [think, answer] = parseReasoningContent(content); return { answer, - reasoning: think + reasoning: think, + finish_reason }; } })(); @@ -525,7 +532,8 @@ ANSWER: `; toolNodeInputTokens, toolNodeOutputTokens, assistantResponses: toolNodeAssistants, - runTimes + runTimes, + finish_reason } ); }; @@ -550,15 +558,18 @@ async function streamResponse({ let startResponseWrite = false; let answer = ''; let reasoning = ''; + let finish_reason: CompletionFinishReason = null; const { parsePart, getStartTagBuffer } = parseReasoningStreamContent(); for await (const part of stream) { if (res.closed) { stream.controller?.abort(); + finish_reason = 'close'; break; } - const [reasoningContent, content] = parsePart(part, aiChatReasoning); + const { reasoningContent, content, finishReason } = parsePart(part, aiChatReasoning); + finish_reason = finish_reason || finishReason; answer += content; reasoning += reasoningContent; @@ -618,7 +629,7 @@ async function streamResponse({ } } - return { answer, reasoning }; + return { answer, reasoning, finish_reason }; } const parseAnswer = ( diff --git a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts index 5bbfc838f..429329ccf 100644 --- a/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts +++ b/packages/service/core/workflow/dispatch/agent/runTool/toolChoice.ts @@ -7,7 +7,8 @@ import { ChatCompletionToolMessageParam, ChatCompletionMessageParam, ChatCompletionTool, - ChatCompletionAssistantMessageParam + ChatCompletionAssistantMessageParam, + CompletionFinishReason } from '@fastgpt/global/core/ai/type'; import { NextApiResponse } from 'next'; import { responseWriteController } from '../../../../../common/response'; @@ -300,7 +301,7 @@ export const runToolWithToolChoice = async ( } }); - const { answer, toolCalls } = await (async () => { + const { answer, toolCalls, finish_reason } = await (async () => { if (res && isStreamResponse) { return streamResponse({ res, @@ -310,6 +311,7 @@ export const runToolWithToolChoice = async ( }); } else { const result = aiResponse as ChatCompletion; + const finish_reason = result.choices?.[0]?.finish_reason as CompletionFinishReason; const calls = result.choices?.[0]?.message?.tool_calls || []; const answer = result.choices?.[0]?.message?.content || ''; @@ -350,7 +352,8 @@ export const runToolWithToolChoice = async ( return { answer, - toolCalls: toolCalls + toolCalls: toolCalls, + finish_reason }; } })(); @@ -549,8 +552,9 @@ export const runToolWithToolChoice = async ( toolNodeOutputTokens, completeMessages, assistantResponses: toolNodeAssistants, + toolWorkflowInteractiveResponse, runTimes, - toolWorkflowInteractiveResponse + finish_reason }; } @@ -565,7 +569,8 @@ export const runToolWithToolChoice = async ( toolNodeInputTokens, toolNodeOutputTokens, assistantResponses: toolNodeAssistants, - runTimes + runTimes, + finish_reason } ); } else { @@ -588,7 +593,8 @@ export const runToolWithToolChoice = async ( completeMessages, assistantResponses: [...assistantResponses, ...toolNodeAssistant.value], - runTimes: (response?.runTimes || 0) + 1 + runTimes: (response?.runTimes || 0) + 1, + finish_reason }; } }; @@ -612,14 +618,18 @@ async function streamResponse({ let textAnswer = ''; let callingTool: { name: string; arguments: string } | null = null; let toolCalls: ChatCompletionMessageToolCall[] = []; + let finishReason: CompletionFinishReason = null; for await (const part of stream) { if (res.closed) { stream.controller?.abort(); + finishReason = 'close'; break; } const responseChoice = part.choices?.[0]?.delta; + const finish_reason = part.choices?.[0]?.finish_reason as CompletionFinishReason; + finishReason = finishReason || finish_reason; if (responseChoice?.content) { const content = responseChoice.content || ''; @@ -705,5 +715,5 @@ async function streamResponse({ } } - return { answer: textAnswer, toolCalls }; + return { answer: textAnswer, toolCalls, finish_reason: finishReason }; } diff --git a/packages/service/core/workflow/dispatch/agent/runTool/type.d.ts b/packages/service/core/workflow/dispatch/agent/runTool/type.d.ts index 61cb6b217..52ec7c4bc 100644 --- a/packages/service/core/workflow/dispatch/agent/runTool/type.d.ts +++ b/packages/service/core/workflow/dispatch/agent/runTool/type.d.ts @@ -1,4 +1,4 @@ -import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type'; +import { ChatCompletionMessageParam, CompletionFinishReason } from '@fastgpt/global/core/ai/type'; import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants'; import type { ModuleDispatchProps, @@ -43,6 +43,7 @@ export type RunToolResponse = { assistantResponses?: AIChatItemValueItemType[]; toolWorkflowInteractiveResponse?: WorkflowInteractiveResponseType; [DispatchNodeResponseKeyEnum.runTimes]: number; + finish_reason?: CompletionFinishReason; }; export type ToolNodeItemType = RuntimeNodeItemType & { toolParams: RuntimeNodeItemType['inputs']; diff --git a/packages/service/core/workflow/dispatch/chat/oneapi.ts b/packages/service/core/workflow/dispatch/chat/oneapi.ts index 743c0f30f..1e60ec388 100644 --- a/packages/service/core/workflow/dispatch/chat/oneapi.ts +++ b/packages/service/core/workflow/dispatch/chat/oneapi.ts @@ -6,7 +6,11 @@ import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/cons import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils'; import { parseReasoningContent, parseReasoningStreamContent } from '../../../ai/utils'; import { createChatCompletion } from '../../../ai/config'; -import type { ChatCompletionMessageParam, StreamChatType } from '@fastgpt/global/core/ai/type.d'; +import type { + ChatCompletionMessageParam, + CompletionFinishReason, + StreamChatType +} from '@fastgpt/global/core/ai/type.d'; import { formatModelChars2Points } from '../../../../support/wallet/usage/utils'; import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d'; import { postTextCensor } from '../../../../common/api/requestPlusApi'; @@ -101,7 +105,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise { + const { answerText, reasoningText, finish_reason } = await (async () => { if (isStreamResponse) { if (!res) { return { answerText: '', - reasoningText: '' + reasoningText: '', + finish_reason: 'close' as const }; } // sse response - const { answer, reasoning } = await streamResponse({ + const { answer, reasoning, finish_reason } = await streamResponse({ res, stream: response, aiChatReasoning, @@ -215,9 +220,12 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise { const content = response.choices?.[0]?.message?.content || ''; // @ts-ignore @@ -260,7 +268,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise { if (!item.required) return; diff --git a/packages/service/core/workflow/dispatch/utils.ts b/packages/service/core/workflow/dispatch/utils.ts index 002272a57..10f5cabfe 100644 --- a/packages/service/core/workflow/dispatch/utils.ts +++ b/packages/service/core/workflow/dispatch/utils.ts @@ -53,7 +53,8 @@ export const getWorkflowResponseWrite = ({ [SseResponseEventEnum.toolCall]: 1, [SseResponseEventEnum.toolParams]: 1, [SseResponseEventEnum.toolResponse]: 1, - [SseResponseEventEnum.updateVariables]: 1 + [SseResponseEventEnum.updateVariables]: 1, + [SseResponseEventEnum.flowNodeResponse]: 1 }; if (!detail && detailEvent[event]) return; diff --git a/packages/service/package.json b/packages/service/package.json index 8bf1784ed..fc9d35e4d 100644 --- a/packages/service/package.json +++ b/packages/service/package.json @@ -7,6 +7,7 @@ "@xmldom/xmldom": "^0.8.10", "@zilliz/milvus2-sdk-node": "2.4.2", "axios": "^1.8.2", + "bullmq": "^5.44.0", "chalk": "^5.3.0", "cheerio": "1.0.0-rc.12", "cookie": "^0.7.1", @@ -18,6 +19,7 @@ "file-type": "^19.0.0", "form-data": "^4.0.0", "iconv-lite": "^0.6.3", + "ioredis": "^5.6.0", "joplin-turndown-plugin-gfm": "^1.0.12", "json5": "^2.2.3", "jsonpath-plus": "^10.3.0", @@ -27,7 +29,7 @@ "mongoose": "^8.10.1", "multer": "1.4.5-lts.1", "mysql2": "^3.11.3", - "next": "14.2.25", + "next": "14.2.26", "nextjs-cors": "^2.2.0", "node-cron": "^3.0.3", "node-xlsx": "^0.24.0", diff --git a/packages/service/tsconfig.json b/packages/service/tsconfig.json index 272c5509d..3e496cf7c 100644 --- a/packages/service/tsconfig.json +++ b/packages/service/tsconfig.json @@ -1,7 +1,7 @@ { - "extends":"../../tsconfig.json", + "extends": "../../tsconfig.json", "compilerOptions": { "baseUrl": "." }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../**/*.d.ts"] + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../../**/*.d.ts"] } diff --git a/packages/web/components/common/Icon/constants.ts b/packages/web/components/common/Icon/constants.ts index 65817717f..50734b3e6 100644 --- a/packages/web/components/common/Icon/constants.ts +++ b/packages/web/components/common/Icon/constants.ts @@ -67,6 +67,7 @@ export const iconPaths = { 'common/list': () => import('./icons/common/list.svg'), 'common/loading': () => import('./icons/common/loading.svg'), 'common/logLight': () => import('./icons/common/logLight.svg'), + 'common/maximize': () => import('./icons/common/maximize.svg'), 'common/microsoft': () => import('./icons/common/microsoft.svg'), 'common/model': () => import('./icons/common/model.svg'), 'common/monitor': () => import('./icons/common/monitor.svg'), @@ -85,6 +86,7 @@ export const iconPaths = { 'common/rightArrowFill': () => import('./icons/common/rightArrowFill.svg'), 'common/rightArrowLight': () => import('./icons/common/rightArrowLight.svg'), 'common/routePushLight': () => import('./icons/common/routePushLight.svg'), + 'common/running': () => import('./icons/common/running.svg'), 'common/saveFill': () => import('./icons/common/saveFill.svg'), 'common/searchLight': () => import('./icons/common/searchLight.svg'), 'common/select': () => import('./icons/common/select.svg'), @@ -181,6 +183,7 @@ export const iconPaths = { 'core/chat/feedback/goodLight': () => import('./icons/core/chat/feedback/goodLight.svg'), 'core/chat/fileSelect': () => import('./icons/core/chat/fileSelect.svg'), 'core/chat/finishSpeak': () => import('./icons/core/chat/finishSpeak.svg'), + 'core/chat/backText':() => import('./icons/core/chat/backText.svg'), 'core/chat/imgSelect': () => import('./icons/core/chat/imgSelect.svg'), 'core/chat/quoteFill': () => import('./icons/core/chat/quoteFill.svg'), 'core/chat/quoteSign': () => import('./icons/core/chat/quoteSign.svg'), diff --git a/packages/web/components/common/Icon/icons/change.svg b/packages/web/components/common/Icon/icons/change.svg index 8ab5546eb..546b49d89 100644 --- a/packages/web/components/common/Icon/icons/change.svg +++ b/packages/web/components/common/Icon/icons/change.svg @@ -1,9 +1,9 @@ - - - + + + diff --git a/packages/web/components/common/Icon/icons/common/check.svg b/packages/web/components/common/Icon/icons/common/check.svg index f9d5d37f1..1662108f8 100644 --- a/packages/web/components/common/Icon/icons/common/check.svg +++ b/packages/web/components/common/Icon/icons/common/check.svg @@ -1,3 +1,3 @@ - - + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/common/maximize.svg b/packages/web/components/common/Icon/icons/common/maximize.svg new file mode 100644 index 000000000..d5545f478 --- /dev/null +++ b/packages/web/components/common/Icon/icons/common/maximize.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/common/running.svg b/packages/web/components/common/Icon/icons/common/running.svg new file mode 100644 index 000000000..2ad538b38 --- /dev/null +++ b/packages/web/components/common/Icon/icons/common/running.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/core/chat/backText.svg b/packages/web/components/common/Icon/icons/core/chat/backText.svg new file mode 100644 index 000000000..0dabfef58 --- /dev/null +++ b/packages/web/components/common/Icon/icons/core/chat/backText.svg @@ -0,0 +1,4 @@ + + + \ No newline at end of file diff --git a/packages/web/components/common/Input/NumberInput/index.tsx b/packages/web/components/common/Input/NumberInput/index.tsx index fb817d8e5..cde19e69c 100644 --- a/packages/web/components/common/Input/NumberInput/index.tsx +++ b/packages/web/components/common/Input/NumberInput/index.tsx @@ -26,23 +26,43 @@ const MyNumberInput = (props: Props) => { { - if (!onBlur) return; const numE = Number(e.target.value); - if (isNaN(numE)) { - // @ts-ignore - onBlur(''); - } else { - onBlur(numE); + if (onBlur) { + if (isNaN(numE)) { + // @ts-ignore + onBlur(''); + } else { + onBlur(numE); + } + } + if (register && name) { + const event = { + target: { + name, + value: numE + } + }; + register(name).onBlur(event); } }} onChange={(e) => { - if (!onChange) return; const numE = Number(e); - if (isNaN(numE)) { - // @ts-ignore - onChange(''); - } else { - onChange(numE); + if (onChange) { + if (isNaN(numE)) { + // @ts-ignore + onChange(''); + } else { + onChange(numE); + } + } + if (register && name) { + const event = { + target: { + name, + value: numE + } + }; + register(name).onChange(event); } }} > diff --git a/packages/web/components/common/Tag/index.tsx b/packages/web/components/common/Tag/index.tsx index 7236f424b..b62974555 100644 --- a/packages/web/components/common/Tag/index.tsx +++ b/packages/web/components/common/Tag/index.tsx @@ -1,5 +1,5 @@ import React, { useMemo } from 'react'; -import { Box, Flex, type FlexProps } from '@chakra-ui/react'; +import { Box, BoxProps, Flex, type FlexProps } from '@chakra-ui/react'; type ColorSchemaType = 'white' | 'blue' | 'green' | 'red' | 'yellow' | 'gray' | 'purple' | 'adora'; @@ -8,6 +8,7 @@ export type TagProps = FlexProps & { colorSchema?: ColorSchemaType; type?: 'fill' | 'borderFill' | 'borderSolid'; showDot?: boolean; + DotStyles?: BoxProps; }; const colorMap: Record< @@ -60,7 +61,14 @@ const colorMap: Record< } }; -const MyTag = ({ children, colorSchema = 'blue', type = 'fill', showDot, ...props }: TagProps) => { +const MyTag = ({ + children, + colorSchema = 'blue', + type = 'fill', + showDot, + DotStyles, + ...props +}: TagProps) => { const theme = useMemo(() => { return colorMap[colorSchema]; }, [colorSchema]); @@ -81,7 +89,9 @@ const MyTag = ({ children, colorSchema = 'blue', type = 'fill', showDot, ...prop bg={type !== 'borderSolid' ? theme.bg : 'transparent'} {...props} > - {showDot && } + {showDot && ( + + )} {children} ); diff --git a/packages/web/hooks/useScrollPagination.tsx b/packages/web/hooks/useScrollPagination.tsx index 8188f4a4d..9fae0ce95 100644 --- a/packages/web/hooks/useScrollPagination.tsx +++ b/packages/web/hooks/useScrollPagination.tsx @@ -308,7 +308,13 @@ export function useScrollPagination< ); return ( - + {scrollLoadType === 'top' && total > 0 && isLoading && ( {t('common:common.is_requesting')} diff --git a/packages/web/i18n/en/chat.json b/packages/web/i18n/en/chat.json index 280b57410..4b8803b7f 100644 --- a/packages/web/i18n/en/chat.json +++ b/packages/web/i18n/en/chat.json @@ -3,6 +3,7 @@ "Delete_all": "Clear All Lexicon", "LLM_model_response_empty": "The model flow response is empty, please check whether the model flow output is normal.", "ai_reasoning": "Thinking process", + "back_to_text": "Text input", "chat.quote.No Data": "The file cannot be found", "chat.quote.deleted": "This data has been deleted ~", "chat_history": "Conversation History", @@ -10,12 +11,22 @@ "chat_test_app": "Debug-{{name}}", "citations": "{{num}} References", "click_contextual_preview": "Click to see contextual preview", + "completion_finish_close": "Disconnection", + "completion_finish_content_filter": "Trigger safe wind control", + "completion_finish_function_call": "Function Calls", + "completion_finish_length": "Reply limit exceeded", + "completion_finish_null": "unknown", + "completion_finish_reason": "Reason for completion", + "completion_finish_stop": "Completed normally", + "completion_finish_tool_calls": "Tool calls", "config_input_guide": "Set Up Input Guide", "config_input_guide_lexicon": "Set Up Lexicon", "config_input_guide_lexicon_title": "Set Up Lexicon", "content_empty": "No Content", "contextual": "{{num}} Contexts", "contextual_preview": "Contextual Preview {{num}} Items", + "core.chat.moveCancel": "Swipe to Cancel", + "core.chat.shortSpeak": "Speaking Time is Too Short", "csv_input_lexicon_tip": "Only CSV batch import is supported, click to download the template", "custom_input_guide_url": "Custom Lexicon URL", "data_source": "Source Dataset: {{name}}", @@ -41,11 +52,14 @@ "not_query": "Missing query content", "not_select_file": "No file selected", "plugins_output": "Plugin Output", + "press_to_speak": "Hold down to speak", "query_extension_IO_tokens": "Problem Optimization Input/Output Tokens", "query_extension_result": "Problem optimization results", "question_tip": "From top to bottom, the response order of each module", "read_raw_source": "Open the original text", "reasoning_text": "Thinking process", + "release_cancel": "Release Cancel", + "release_send": "Release send, slide up to cancel", "response.child total points": "Sub-workflow point consumption", "response.dataset_concat_length": "Combined total", "response.node_inputs": "Node Inputs", diff --git a/packages/web/i18n/en/common.json b/packages/web/i18n/en/common.json index 5581d22e2..3498a8549 100644 --- a/packages/web/i18n/en/common.json +++ b/packages/web/i18n/en/common.json @@ -1,5 +1,6 @@ { "App": "Application", + "Click_to_expand": "Click to expand", "Download": "Download", "Export": "Export", "FAQ.ai_point_a": "Each time you use the AI model, a certain amount of AI points will be deducted. For detailed calculation standards, please refer to the 'AI Points Calculation Standards' above.\nToken calculation uses the same formula as GPT-3.5, where 1 Token ≈ 0.7 Chinese characters ≈ 0.9 English words. Consecutive characters may be considered as 1 Token.", @@ -511,7 +512,7 @@ "core.dataset.Query extension intro": "Enabling the question optimization function can improve the accuracy of Dataset searches during continuous conversations. After enabling this function, when performing Dataset searches, the AI will complete the missing information of the question based on the conversation history.", "core.dataset.Quote Length": "Quote Content Length", "core.dataset.Read Dataset": "View Dataset Details", - "core.dataset.Set Website Config": "Start Configuring Website Information", + "core.dataset.Set Website Config": "Start Configuring", "core.dataset.Start export": "Export Started", "core.dataset.Table collection": "Table Dataset", "core.dataset.Text collection": "Text Dataset", @@ -527,7 +528,6 @@ "core.dataset.collection.Website Empty Tip": "No Website Associated Yet", "core.dataset.collection.Website Link": "Website Address", "core.dataset.collection.id": "Collection ID", - "core.dataset.collection.metadata.Chunk Size": "Chunk Size", "core.dataset.collection.metadata.Createtime": "Creation Time", "core.dataset.collection.metadata.Raw text length": "Raw Text Length", "core.dataset.collection.metadata.Updatetime": "Update Time", @@ -538,6 +538,7 @@ "core.dataset.collection.metadata.source name": "Source Name", "core.dataset.collection.metadata.source size": "Source Size", "core.dataset.collection.status.active": "Ready", + "core.dataset.collection.status.error": "Error", "core.dataset.collection.sync.result.sameRaw": "Content Unchanged, No Update Needed", "core.dataset.collection.sync.result.success": "Sync Started", "core.dataset.data.Data Content": "Related Data Content", @@ -628,6 +629,7 @@ "core.dataset.search.search mode": "Search Method", "core.dataset.status.active": "Ready", "core.dataset.status.syncing": "Syncing", + "core.dataset.status.waiting": "Waiting", "core.dataset.test.Batch test": "Batch Test", "core.dataset.test.Batch test Placeholder": "Select a CSV File", "core.dataset.test.Search Test": "Search Test", diff --git a/packages/web/i18n/en/dataset.json b/packages/web/i18n/en/dataset.json index 529c08b86..3f5aa5c42 100644 --- a/packages/web/i18n/en/dataset.json +++ b/packages/web/i18n/en/dataset.json @@ -7,6 +7,7 @@ "auto_indexes_tips": "Additional index generation is performed through large models to improve semantic richness and improve retrieval accuracy.", "auto_training_queue": "Enhanced index queueing", "chunk_max_tokens": "max_tokens", + "chunk_size": "Block size", "close_auto_sync": "Are you sure you want to turn off automatic sync?", "collection.Create update time": "Creation/Update Time", "collection.Training type": "Training", @@ -28,9 +29,24 @@ "custom_data_process_params_desc": "Customize data processing rules", "custom_split_sign_tip": "Allows you to chunk according to custom delimiters. \nUsually used for processed data, using specific separators for precise chunking. \nYou can use the | symbol to represent multiple splitters, such as: \".|.\" to represent a period in Chinese and English.\n\nTry to avoid using special symbols related to regular, such as: * () [] {}, etc.", "data_amount": "{{dataAmount}} Datas, {{indexAmount}} Indexes", + "data_error_amount": "{{errorAmount}} Group training exception", "data_index_num": "Index {{index}}", "data_process_params": "Params", "data_process_setting": "Processing config", + "dataset.Chunk_Number": "Block number", + "dataset.Completed": "Finish", + "dataset.Delete_Chunk": "delete", + "dataset.Edit_Chunk": "edit", + "dataset.Error_Message": "Report an error message", + "dataset.No_Error": "No exception information yet", + "dataset.Operation": "operate", + "dataset.ReTrain": "Retrain", + "dataset.Training Process": "Training status", + "dataset.Training_Count": "{{count}} Group training", + "dataset.Training_Errors": "Errors", + "dataset.Training_QA": "{{count}} Group Q&A pair training", + "dataset.Training_Status": "Training status", + "dataset.Training_Waiting": "Need to wait for {{count}} group data", "dataset.Unsupported operation": "dataset.Unsupported operation", "dataset.no_collections": "No datasets available", "dataset.no_tags": "No tags available", @@ -55,6 +71,7 @@ "image_auto_parse": "Automatic image indexing", "image_auto_parse_tips": "Call VLM to automatically label the pictures in the document and generate additional search indexes", "image_training_queue": "Queue of image processing", + "immediate_sync": "Immediate Synchronization", "import.Auto mode Estimated Price Tips": "The text understanding model needs to be called, which requires more points: {{price}} points/1K tokens", "import.Embedding Estimated Price Tips": "Only use the index model and consume a small amount of AI points: {{price}} points/1K tokens", "import_confirm": "Confirm upload", @@ -71,6 +88,7 @@ "keep_image": "Keep the picture", "move.hint": "After moving, the selected knowledge base/folder will inherit the permission settings of the new folder, and the original permission settings will become invalid.", "open_auto_sync": "After scheduled synchronization is turned on, the system will try to synchronize the collection from time to time every day. During the collection synchronization period, the collection data will not be searched.", + "params_config": "Config", "params_setting": "Parameter settings", "pdf_enhance_parse": "PDF enhancement analysis", "pdf_enhance_parse_price": "{{price}} points/page", @@ -82,6 +100,13 @@ "preview_chunk_empty": "Unable to read the contents of the file", "preview_chunk_intro": "A total of {{total}} blocks, up to 10", "preview_chunk_not_selected": "Click on the file on the left to preview", + "process.Auto_Index": "Automatic index generation", + "process.Get QA": "Q&A extraction", + "process.Image_Index": "Image index generation", + "process.Is_Ready": "Ready", + "process.Parsing": "Parsing", + "process.Vectorizing": "Index vectorization", + "process.Waiting": "Queue", "rebuild_embedding_start_tip": "Index model switching task has started", "rebuilding_index_count": "Number of indexes being rebuilt: {{count}}", "request_headers": "Request headers, will automatically append 'Bearer '", @@ -99,6 +124,7 @@ "split_sign_question": "question mark", "split_sign_semicolon": "semicolon", "start_sync_website_tip": "Confirm to start synchronizing data? \nThe old data will be deleted and retrieved again, please confirm!", + "status_error": "Running exception", "sync_collection_failed": "Synchronization collection error, please check whether the source file can be accessed normally", "sync_schedule": "Timing synchronization", "sync_schedule_tip": "Only existing collections will be synchronized. \nIncludes linked collections and all collections in the API knowledge base. \nThe system will poll for updates every day, and the specific update time cannot be determined.", @@ -114,11 +140,15 @@ "tag.total_tags": "Total {{total}} tags", "the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "The Dataset has indexes that are being trained or rebuilt", "total_num_files": "Total {{total}} files", + "training.Error": "{{count}} Group exception", + "training.Normal": "Normal", "training_mode": "Chunk mode", + "training_ready": "{{count}} Group", "vector_model_max_tokens_tip": "Each chunk of data has a maximum length of 3000 tokens", "vllm_model": "Image understanding model", "website_dataset": "Website Sync", "website_dataset_desc": "Website sync allows you to build a Dataset directly using a web link.", + "website_info": "Website Information", "yuque_dataset": "Yuque Dataset", "yuque_dataset_config": "Yuque Dataset Config", "yuque_dataset_desc": "Can build a dataset using Yuque documents by configuring permissions, without secondary storage" diff --git a/packages/web/i18n/zh-CN/chat.json b/packages/web/i18n/zh-CN/chat.json index 5250c44d6..421b77861 100644 --- a/packages/web/i18n/zh-CN/chat.json +++ b/packages/web/i18n/zh-CN/chat.json @@ -3,6 +3,7 @@ "Delete_all": "清空词库", "LLM_model_response_empty": "模型流响应为空,请检查模型流输出是否正常", "ai_reasoning": "思考过程", + "back_to_text": "返回输入", "chat.quote.No Data": "找不到该文件", "chat.quote.deleted": "该数据已被删除~", "chat_history": "聊天记录", @@ -10,12 +11,22 @@ "chat_test_app": "调试-{{name}}", "citations": "{{num}}条引用", "click_contextual_preview": "点击查看上下文预览", + "completion_finish_close": "连接断开", + "completion_finish_content_filter": "触发安全风控", + "completion_finish_function_call": "函数调用", + "completion_finish_length": "超出回复限制", + "completion_finish_null": "未知", + "completion_finish_reason": "完成原因", + "completion_finish_stop": "正常完成", + "completion_finish_tool_calls": "工具调用", "config_input_guide": "配置输入引导", "config_input_guide_lexicon": "配置词库", "config_input_guide_lexicon_title": "配置词库", "content_empty": "内容为空", "contextual": "{{num}}条上下文", "contextual_preview": "上下文预览 {{num}} 条", + "core.chat.moveCancel": "上滑取消", + "core.chat.shortSpeak": "说话时间太短", "csv_input_lexicon_tip": "仅支持 CSV 批量导入,点击下载模板", "custom_input_guide_url": "自定义词库地址", "data_source": "来源知识库: {{name}}", @@ -41,11 +52,14 @@ "not_query": "缺少查询内容", "not_select_file": "未选择文件", "plugins_output": "插件输出", + "press_to_speak": "按住说话", "query_extension_IO_tokens": "问题优化输入/输出 Tokens", "query_extension_result": "问题优化结果", "question_tip": "从上到下,为各个模块的响应顺序", "read_raw_source": "打开原文", "reasoning_text": "思考过程", + "release_cancel": "松开取消", + "release_send": "松开发送,上滑取消", "response.child total points": "子工作流积分消耗", "response.dataset_concat_length": "合并后总数", "response.node_inputs": "节点输入", diff --git a/packages/web/i18n/zh-CN/common.json b/packages/web/i18n/zh-CN/common.json index c7068843b..1f68a9b57 100644 --- a/packages/web/i18n/zh-CN/common.json +++ b/packages/web/i18n/zh-CN/common.json @@ -1,5 +1,6 @@ { "App": "应用", + "Click_to_expand": "点击查看详情", "Download": "下载", "Export": "导出", "FAQ.ai_point_a": "每次调用AI模型时,都会消耗一定的AI积分。具体的计算标准可参考上方的“AI 积分计算标准”。\nToken计算采用GPT3.5相同公式,1Token≈0.7中文字符≈0.9英文单词,连续出现的字符可能被认为是1个Tokens。", @@ -514,7 +515,7 @@ "core.dataset.Query extension intro": "开启问题优化功能,可以提高提高连续对话时,知识库搜索的精度。开启该功能后,在进行知识库搜索时,会根据对话记录,利用 AI 补全问题缺失的信息。", "core.dataset.Quote Length": "引用内容长度", "core.dataset.Read Dataset": "查看知识库详情", - "core.dataset.Set Website Config": "开始配置网站信息", + "core.dataset.Set Website Config": "开始配置", "core.dataset.Start export": "已开始导出", "core.dataset.Table collection": "表格数据集", "core.dataset.Text collection": "文本数据集", @@ -530,7 +531,6 @@ "core.dataset.collection.Website Empty Tip": "还没有关联网站", "core.dataset.collection.Website Link": "Web 站点地址", "core.dataset.collection.id": "集合 ID", - "core.dataset.collection.metadata.Chunk Size": "分割大小", "core.dataset.collection.metadata.Createtime": "创建时间", "core.dataset.collection.metadata.Raw text length": "原文长度", "core.dataset.collection.metadata.Updatetime": "更新时间", @@ -541,6 +541,7 @@ "core.dataset.collection.metadata.source name": "来源名", "core.dataset.collection.metadata.source size": "来源大小", "core.dataset.collection.status.active": "已就绪", + "core.dataset.collection.status.error": "训练异常", "core.dataset.collection.sync.result.sameRaw": "内容未变动,无需更新", "core.dataset.collection.sync.result.success": "开始同步", "core.dataset.data.Data Content": "相关数据内容", @@ -631,6 +632,7 @@ "core.dataset.search.search mode": "搜索方式", "core.dataset.status.active": "已就绪", "core.dataset.status.syncing": "同步中", + "core.dataset.status.waiting": "排队中", "core.dataset.test.Batch test": "批量测试", "core.dataset.test.Batch test Placeholder": "选择一个 CSV 文件", "core.dataset.test.Search Test": "搜索测试", @@ -1289,4 +1291,4 @@ "yes": "是", "yesterday": "昨天", "yesterday_detail_time": "昨天 {{time}}" -} \ No newline at end of file +} diff --git a/packages/web/i18n/zh-CN/dataset.json b/packages/web/i18n/zh-CN/dataset.json index 7e11fa021..076c2d211 100644 --- a/packages/web/i18n/zh-CN/dataset.json +++ b/packages/web/i18n/zh-CN/dataset.json @@ -7,6 +7,7 @@ "auto_indexes_tips": "通过大模型进行额外索引生成,提高语义丰富度,提高检索的精度。", "auto_training_queue": "增强索引排队", "chunk_max_tokens": "分块上限", + "chunk_size": "分块大小", "close_auto_sync": "确认关闭自动同步功能?", "collection.Create update time": "创建/更新时间", "collection.Training type": "训练模式", @@ -28,9 +29,24 @@ "custom_data_process_params_desc": "自定义设置数据处理规则", "custom_split_sign_tip": "允许你根据自定义的分隔符进行分块。通常用于已处理好的数据,使用特定的分隔符来精确分块。可以使用 | 符号表示多个分割符,例如:“。|.” 表示中英文句号。\n尽量避免使用正则相关特殊符号,例如: * () [] {} 等。", "data_amount": "{{dataAmount}} 组数据, {{indexAmount}} 组索引", + "data_error_amount": "{{errorAmount}} 组训练异常", "data_index_num": "索引 {{index}}", "data_process_params": "处理参数", "data_process_setting": "数据处理配置", + "dataset.Chunk_Number": "分块号", + "dataset.Completed": "完成", + "dataset.Delete_Chunk": "删除", + "dataset.Edit_Chunk": "编辑", + "dataset.Error_Message": "报错信息", + "dataset.No_Error": "暂无异常信息", + "dataset.Operation": "操作", + "dataset.ReTrain": "重试", + "dataset.Training Process": "训练状态", + "dataset.Training_Count": "{{count}} 组训练中", + "dataset.Training_Errors": "异常 ({{count}})", + "dataset.Training_QA": "{{count}} 组问答对训练中", + "dataset.Training_Status": "训练状态", + "dataset.Training_Waiting": "需等待 {{count}} 组数据", "dataset.Unsupported operation": "操作不支持", "dataset.no_collections": "暂无数据集", "dataset.no_tags": "暂无标签", @@ -55,6 +71,7 @@ "image_auto_parse": "图片自动索引", "image_auto_parse_tips": "调用 VLM 自动标注文档里的图片,并生成额外的检索索引", "image_training_queue": "图片处理排队", + "immediate_sync": "立即同步", "import.Auto mode Estimated Price Tips": "需调用文本理解模型,需要消耗较多AI 积分:{{price}} 积分/1K tokens", "import.Embedding Estimated Price Tips": "仅使用索引模型,消耗少量 AI 积分:{{price}} 积分/1K tokens", "import_confirm": "确认上传", @@ -71,6 +88,7 @@ "keep_image": "保留图片", "move.hint": "移动后,所选知识库/文件夹将继承新文件夹的权限设置,原先的权限设置失效。", "open_auto_sync": "开启定时同步后,系统将会每天不定时尝试同步集合,集合同步期间,会出现无法搜索到该集合数据现象。", + "params_config": "配置", "params_setting": "参数设置", "pdf_enhance_parse": "PDF增强解析", "pdf_enhance_parse_price": "{{price}}积分/页", @@ -82,6 +100,14 @@ "preview_chunk_empty": "无法读取该文件内容", "preview_chunk_intro": "共 {{total}} 个分块,最多展示 10 个", "preview_chunk_not_selected": "点击左侧文件后进行预览", + "process.Auto_Index": "自动索引生成", + "process.Get QA": "问答对提取", + "process.Image_Index": "图片索引生成", + "process.Is_Ready": "已就绪", + "process.Is_Ready_Count": "{{count}} 组已就绪", + "process.Parsing": "内容解析中", + "process.Vectorizing": "索引向量化", + "process.Waiting": "排队中", "rebuild_embedding_start_tip": "切换索引模型任务已开始", "rebuilding_index_count": "重建中索引数量:{{count}}", "request_headers": "请求头参数,会自动补充 Bearer", @@ -99,6 +125,7 @@ "split_sign_question": "问号", "split_sign_semicolon": "分号", "start_sync_website_tip": "确认开始同步数据?将会删除旧数据后重新获取,请确认!", + "status_error": "运行异常", "sync_collection_failed": "同步集合错误,请检查是否能正常访问源文件", "sync_schedule": "定时同步", "sync_schedule_tip": "仅会同步已存在的集合。包括链接集合以及 API 知识库里所有集合。系统会每天进行轮询更新,无法确定具体的更新时间。", @@ -114,11 +141,15 @@ "tag.total_tags": "共{{total}}个标签", "the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "知识库有训练中或正在重建的索引", "total_num_files": "共 {{total}} 个文件", + "training.Error": "{{count}} 组异常", + "training.Normal": "正常", "training_mode": "处理方式", + "training_ready": "{{count}} 组", "vector_model_max_tokens_tip": "每个分块数据,最大长度为 3000 tokens", "vllm_model": "图片理解模型", "website_dataset": "Web 站点同步", "website_dataset_desc": "Web 站点同步允许你直接使用一个网页链接构建知识库", + "website_info": "网站信息", "yuque_dataset": "语雀知识库", "yuque_dataset_config": "配置语雀知识库", "yuque_dataset_desc": "可通过配置语雀文档权限,使用语雀文档构建知识库,文档不会进行二次存储" diff --git a/packages/web/i18n/zh-Hant/chat.json b/packages/web/i18n/zh-Hant/chat.json index 8026afe8d..d7a9c5ab6 100644 --- a/packages/web/i18n/zh-Hant/chat.json +++ b/packages/web/i18n/zh-Hant/chat.json @@ -3,6 +3,7 @@ "Delete_all": "清除所有詞彙", "LLM_model_response_empty": "模型流程回應為空,請檢查模型流程輸出是否正常", "ai_reasoning": "思考過程", + "back_to_text": "返回輸入", "chat.quote.No Data": "找不到該文件", "chat.quote.deleted": "該數據已被刪除~", "chat_history": "對話紀錄", @@ -10,6 +11,14 @@ "chat_test_app": "調試-{{name}}", "citations": "{{num}} 筆引用", "click_contextual_preview": "點選檢視上下文預覽", + "completion_finish_close": "連接斷開", + "completion_finish_content_filter": "觸發安全風控", + "completion_finish_function_call": "函數調用", + "completion_finish_length": "超出回复限制", + "completion_finish_null": "未知", + "completion_finish_reason": "完成原因", + "completion_finish_stop": "正常完成", + "completion_finish_tool_calls": "工具調用", "config_input_guide": "設定輸入導引", "config_input_guide_lexicon": "設定詞彙庫", "config_input_guide_lexicon_title": "設定詞彙庫", @@ -35,16 +44,20 @@ "is_chatting": "對話進行中...請稍候", "items": "筆", "module_runtime_and": "模組執行總時間", + "moveCancel": "上滑取消", "multiple_AI_conversations": "多組 AI 對話", "new_input_guide_lexicon": "新增詞彙庫", "no_workflow_response": "無工作流程資料", "not_query": "缺少查詢內容", "not_select_file": "尚未選取檔案", "plugins_output": "外掛程式輸出", + "press_to_speak": "按住說話", "query_extension_IO_tokens": "問題優化輸入/輸出 Tokens", "question_tip": "由上至下,各個模組的回應順序", "read_raw_source": "打開原文", "reasoning_text": "思考過程", + "release_cancel": "鬆開取消", + "release_send": "鬆開發送,上滑取消", "response.child total points": "子工作流程點數消耗", "response.dataset_concat_length": "合併總數", "response.node_inputs": "節點輸入", @@ -53,6 +66,7 @@ "select_file": "上傳檔案", "select_file_img": "上傳檔案 / 圖片", "select_img": "上傳圖片", + "shortSpeak ": "說話時間太短", "source_cronJob": "定時執行", "stream_output": "串流輸出", "to_dataset": "前往知識庫", diff --git a/packages/web/i18n/zh-Hant/common.json b/packages/web/i18n/zh-Hant/common.json index 2d3336cc7..bd820fb05 100644 --- a/packages/web/i18n/zh-Hant/common.json +++ b/packages/web/i18n/zh-Hant/common.json @@ -1,5 +1,6 @@ { "App": "應用程式", + "Click_to_expand": "點擊查看詳情", "Download": "下載", "Export": "匯出", "FAQ.ai_point_a": "每次呼叫 AI 模型時,都會消耗一定數量的 AI 點數。詳細的計算標準請參考上方的「AI 點數計算標準」。\nToken 計算採用與 GPT3.5 相同的公式,1 Token ≈ 0.7 個中文字 ≈ 0.9 個英文單字,連續出現的字元可能會被視為 1 個 Token。", @@ -510,7 +511,7 @@ "core.dataset.Query extension intro": "開啟問題最佳化功能,可以提高連續對話時知識庫搜尋的準確度。開啟此功能後,在進行知識庫搜尋時,系統會根據對話記錄,利用 AI 補充問題中缺少的資訊。", "core.dataset.Quote Length": "引用內容長度", "core.dataset.Read Dataset": "檢視知識庫詳細資料", - "core.dataset.Set Website Config": "開始設定網站資訊", + "core.dataset.Set Website Config": "開始設定", "core.dataset.Start export": "已開始匯出", "core.dataset.Table collection": "表格資料集", "core.dataset.Text collection": "文字資料集", @@ -526,7 +527,6 @@ "core.dataset.collection.Website Empty Tip": "還沒有關聯網站", "core.dataset.collection.Website Link": "網站網址", "core.dataset.collection.id": "集合 ID", - "core.dataset.collection.metadata.Chunk Size": "分割大小", "core.dataset.collection.metadata.Createtime": "建立時間", "core.dataset.collection.metadata.Raw text length": "原始文字長度", "core.dataset.collection.metadata.Updatetime": "更新時間", @@ -537,6 +537,7 @@ "core.dataset.collection.metadata.source name": "來源名稱", "core.dataset.collection.metadata.source size": "來源大小", "core.dataset.collection.status.active": "已就緒", + "core.dataset.collection.status.error": "訓練異常", "core.dataset.collection.sync.result.sameRaw": "內容未變更,無需更新", "core.dataset.collection.sync.result.success": "開始同步", "core.dataset.data.Data Content": "相關資料內容", @@ -627,6 +628,7 @@ "core.dataset.search.search mode": "搜索方式", "core.dataset.status.active": "已就緒", "core.dataset.status.syncing": "同步中", + "core.dataset.status.waiting": "排队中", "core.dataset.test.Batch test": "批次測試", "core.dataset.test.Batch test Placeholder": "選擇一個 CSV 檔案", "core.dataset.test.Search Test": "搜尋測試", diff --git a/packages/web/i18n/zh-Hant/dataset.json b/packages/web/i18n/zh-Hant/dataset.json index 76e510142..a90e14cae 100644 --- a/packages/web/i18n/zh-Hant/dataset.json +++ b/packages/web/i18n/zh-Hant/dataset.json @@ -7,6 +7,7 @@ "auto_indexes_tips": "通過大模型進行額外索引生成,提高語義豐富度,提高檢索的精度。", "auto_training_queue": "增強索引排隊", "chunk_max_tokens": "分塊上限", + "chunk_size": "分塊大小", "close_auto_sync": "確認關閉自動同步功能?", "collection.Create update time": "建立/更新時間", "collection.Training type": "分段模式", @@ -28,9 +29,24 @@ "custom_data_process_params_desc": "自訂資料處理規則", "custom_split_sign_tip": "允許你根據自定義的分隔符進行分塊。\n通常用於已處理好的數據,使用特定的分隔符來精確分塊。\n可以使用 | 符號表示多個分割符,例如:“。|.” 表示中英文句號。\n\n盡量避免使用正則相關特殊符號,例如: * () [] {} 等。", "data_amount": "{{dataAmount}} 組數據, {{indexAmount}} 組索引", + "data_error_amount": "{{errorAmount}} 組訓練異常", "data_index_num": "索引 {{index}}", "data_process_params": "處理參數", "data_process_setting": "資料處理設定", + "dataset.Chunk_Number": "分塊號", + "dataset.Completed": "完成", + "dataset.Delete_Chunk": "刪除", + "dataset.Edit_Chunk": "編輯", + "dataset.Error_Message": "報錯信息", + "dataset.No_Error": "暫無異常信息", + "dataset.Operation": "操作", + "dataset.ReTrain": "重試", + "dataset.Training Process": "訓練狀態", + "dataset.Training_Count": "{{count}} 組訓練中", + "dataset.Training_Errors": "異常", + "dataset.Training_QA": "{{count}} 組問答對訓練中", + "dataset.Training_Status": "訓練狀態", + "dataset.Training_Waiting": "需等待 {{count}} 組數據", "dataset.Unsupported operation": "操作不支持", "dataset.no_collections": "尚無資料集", "dataset.no_tags": "尚無標籤", @@ -55,6 +71,7 @@ "image_auto_parse": "圖片自動索引", "image_auto_parse_tips": "調用 VLM 自動標註文檔裡的圖片,並生成額外的檢索索引", "image_training_queue": "圖片處理排隊", + "immediate_sync": "立即同步", "import.Auto mode Estimated Price Tips": "需呼叫文字理解模型,將消耗較多 AI 點數:{{price}} 點數 / 1K tokens", "import.Embedding Estimated Price Tips": "僅使用索引模型,消耗少量 AI 點數:{{price}} 點數 / 1K tokens", "import_confirm": "確認上傳", @@ -71,6 +88,7 @@ "keep_image": "保留圖片", "move.hint": "移動後,所選資料集/資料夾將繼承新資料夾的權限設定,原先的權限設定將失效。", "open_auto_sync": "開啟定時同步後,系統將每天不定時嘗試同步集合,集合同步期間,會出現無法搜尋到該集合資料現象。", + "params_config": "配置", "params_setting": "參數設置", "pdf_enhance_parse": "PDF增強解析", "pdf_enhance_parse_price": "{{price}}積分/頁", @@ -82,6 +100,13 @@ "preview_chunk_empty": "無法讀取該文件內容", "preview_chunk_intro": "共 {{total}} 個分塊,最多展示 10 個", "preview_chunk_not_selected": "點擊左側文件後進行預覽", + "process.Auto_Index": "自動索引生成", + "process.Get QA": "問答對提取", + "process.Image_Index": "圖片索引生成", + "process.Is_Ready": "已就緒", + "process.Parsing": "內容解析中", + "process.Vectorizing": "索引向量化", + "process.Waiting": "排隊中", "rebuild_embedding_start_tip": "切換索引模型任務已開始", "rebuilding_index_count": "重建中索引數量:{{count}}", "request_headers": "請求頭", @@ -99,6 +124,7 @@ "split_sign_question": "問號", "split_sign_semicolon": "分號", "start_sync_website_tip": "確認開始同步資料?\n將會刪除舊資料後重新獲取,請確認!", + "status_error": "運行異常", "sync_collection_failed": "同步集合錯誤,請檢查是否能正常存取來源文件", "sync_schedule": "定時同步", "sync_schedule_tip": "只會同步已存在的集合。\n包括連結集合以及 API 知識庫裡所有集合。\n系統會每天進行輪詢更新,無法確定特定的更新時間。", @@ -114,11 +140,15 @@ "tag.total_tags": "共 {{total}} 個標籤", "the_knowledge_base_has_indexes_that_are_being_trained_or_being_rebuilt": "資料集有索引正在訓練或重建中", "total_num_files": "共 {{total}} 個文件", + "training.Error": "{{count}} 組異常", + "training.Normal": "正常", "training_mode": "分段模式", + "training_ready": "{{count}} 組", "vector_model_max_tokens_tip": "每個分塊數據,最大長度為 3000 tokens", "vllm_model": "圖片理解模型", "website_dataset": "網站同步", "website_dataset_desc": "網站同步功能讓您可以直接使用網頁連結建立資料集", + "website_info": "網站資訊", "yuque_dataset": "語雀知識庫", "yuque_dataset_config": "配置語雀知識庫", "yuque_dataset_desc": "可通過配置語雀文檔權限,使用語雀文檔構建知識庫,文檔不會進行二次存儲" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9ba6886bb..7d5899544 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -25,7 +25,7 @@ importers: version: 13.3.0 next-i18next: specifier: 15.4.2 - version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) prettier: specifier: 3.2.4 version: 3.2.4 @@ -75,8 +75,8 @@ importers: specifier: ^5.1.3 version: 5.1.3 next: - specifier: 14.2.25 - version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + specifier: 14.2.26 + version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) openai: specifier: 4.61.0 version: 4.61.0(encoding@0.1.13)(zod@3.24.2) @@ -169,6 +169,9 @@ importers: axios: specifier: ^1.8.2 version: 1.8.3 + bullmq: + specifier: ^5.44.0 + version: 5.44.0 chalk: specifier: ^5.3.0 version: 5.4.1 @@ -202,6 +205,9 @@ importers: iconv-lite: specifier: ^0.6.3 version: 0.6.3 + ioredis: + specifier: ^5.6.0 + version: 5.6.0 joplin-turndown-plugin-gfm: specifier: ^1.0.12 version: 1.0.12 @@ -230,11 +236,11 @@ importers: specifier: ^3.11.3 version: 3.13.0 next: - specifier: 14.2.25 - version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + specifier: 14.2.26 + version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) nextjs-cors: specifier: ^2.2.0 - version: 2.2.0(next@14.2.25(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)) + version: 2.2.0(next@14.2.26(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)) node-cron: specifier: ^3.0.3 version: 3.0.3 @@ -316,7 +322,7 @@ importers: version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1) '@chakra-ui/next-js': specifier: 2.4.2 - version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) + version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) '@chakra-ui/react': specifier: 2.10.7 version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -379,7 +385,7 @@ importers: version: 4.17.21 next-i18next: specifier: 15.4.2 - version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) papaparse: specifier: ^5.4.1 version: 5.4.1 @@ -440,7 +446,7 @@ importers: version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1) '@chakra-ui/next-js': specifier: 2.4.2 - version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) + version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) '@chakra-ui/react': specifier: 2.10.7 version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -535,11 +541,11 @@ importers: specifier: ^5.1.3 version: 5.1.3 next: - specifier: 14.2.25 - version: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + specifier: 14.2.26 + version: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) next-i18next: specifier: 15.4.2 - version: 15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) nprogress: specifier: ^0.2.0 version: 0.2.0 @@ -2044,6 +2050,9 @@ packages: resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} deprecated: Use @eslint/object-schema instead + '@ioredis/commands@1.2.0': + resolution: {integrity: sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -2314,6 +2323,36 @@ packages: '@mongodb-js/saslprep@1.2.0': resolution: {integrity: sha512-+ywrb0AqkfaYuhHs6LxKWgqbh3I72EpEgESCw37o+9qPx9WTCkgDm2B+eMrwehGtHBWHFU4GXvnSCNiFhhausg==} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} + cpu: [arm64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==} + cpu: [x64] + os: [darwin] + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==} + cpu: [arm64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==} + cpu: [arm] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==} + cpu: [x64] + os: [linux] + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==} + cpu: [x64] + os: [win32] + '@napi-rs/wasm-runtime@0.2.7': resolution: {integrity: sha512-5yximcFK5FNompXfJFoWanu5l8v1hNGqNHh9du1xETp9HWk/B/PzvchX55WYOPaIeNglG8++68AAiauBAtbnzw==} @@ -2421,62 +2460,62 @@ packages: '@nestjs/platform-express': optional: true - '@next/env@14.2.25': - resolution: {integrity: sha512-JnzQ2cExDeG7FxJwqAksZ3aqVJrHjFwZQAEJ9gQZSoEhIow7SNoKZzju/AwQ+PLIR4NY8V0rhcVozx/2izDO0w==} + '@next/env@14.2.26': + resolution: {integrity: sha512-vO//GJ/YBco+H7xdQhzJxF7ub3SUwft76jwaeOyVVQFHCi5DCnkP16WHB+JBylo4vOKPoZBlR94Z8xBxNBdNJA==} '@next/eslint-plugin-next@14.2.24': resolution: {integrity: sha512-FDL3qs+5DML0AJz56DCVr+KnFYivxeAX73En8QbPw9GjJZ6zbfvqDy+HrarHFzbsIASn7y8y5ySJ/lllSruNVQ==} - '@next/swc-darwin-arm64@14.2.25': - resolution: {integrity: sha512-09clWInF1YRd6le00vt750s3m7SEYNehz9C4PUcSu3bAdCTpjIV4aTYQZ25Ehrr83VR1rZeqtKUPWSI7GfuKZQ==} + '@next/swc-darwin-arm64@14.2.26': + resolution: {integrity: sha512-zDJY8gsKEseGAxG+C2hTMT0w9Nk9N1Sk1qV7vXYz9MEiyRoF5ogQX2+vplyUMIfygnjn9/A04I6yrUTRTuRiyQ==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@next/swc-darwin-x64@14.2.25': - resolution: {integrity: sha512-V+iYM/QR+aYeJl3/FWWU/7Ix4b07ovsQ5IbkwgUK29pTHmq+5UxeDr7/dphvtXEq5pLB/PucfcBNh9KZ8vWbug==} + '@next/swc-darwin-x64@14.2.26': + resolution: {integrity: sha512-U0adH5ryLfmTDkahLwG9sUQG2L0a9rYux8crQeC92rPhi3jGQEY47nByQHrVrt3prZigadwj/2HZ1LUUimuSbg==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@next/swc-linux-arm64-gnu@14.2.25': - resolution: {integrity: sha512-LFnV2899PJZAIEHQ4IMmZIgL0FBieh5keMnriMY1cK7ompR+JUd24xeTtKkcaw8QmxmEdhoE5Mu9dPSuDBgtTg==} + '@next/swc-linux-arm64-gnu@14.2.26': + resolution: {integrity: sha512-SINMl1I7UhfHGM7SoRiw0AbwnLEMUnJ/3XXVmhyptzriHbWvPPbbm0OEVG24uUKhuS1t0nvN/DBvm5kz6ZIqpg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-arm64-musl@14.2.25': - resolution: {integrity: sha512-QC5y5PPTmtqFExcKWKYgUNkHeHE/z3lUsu83di488nyP0ZzQ3Yse2G6TCxz6nNsQwgAx1BehAJTZez+UQxzLfw==} + '@next/swc-linux-arm64-musl@14.2.26': + resolution: {integrity: sha512-s6JaezoyJK2DxrwHWxLWtJKlqKqTdi/zaYigDXUJ/gmx/72CrzdVZfMvUc6VqnZ7YEvRijvYo+0o4Z9DencduA==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-x64-gnu@14.2.25': - resolution: {integrity: sha512-y6/ML4b9eQ2D/56wqatTJN5/JR8/xdObU2Fb1RBidnrr450HLCKr6IJZbPqbv7NXmje61UyxjF5kvSajvjye5w==} + '@next/swc-linux-x64-gnu@14.2.26': + resolution: {integrity: sha512-FEXeUQi8/pLr/XI0hKbe0tgbLmHFRhgXOUiPScz2hk0hSmbGiU8aUqVslj/6C6KA38RzXnWoJXo4FMo6aBxjzg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-linux-x64-musl@14.2.25': - resolution: {integrity: sha512-sPX0TSXHGUOZFvv96GoBXpB3w4emMqKeMgemrSxI7A6l55VBJp/RKYLwZIB9JxSqYPApqiREaIIap+wWq0RU8w==} + '@next/swc-linux-x64-musl@14.2.26': + resolution: {integrity: sha512-BUsomaO4d2DuXhXhgQCVt2jjX4B4/Thts8nDoIruEJkhE5ifeQFtvW5c9JkdOtYvE5p2G0hcwQ0UbRaQmQwaVg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-win32-arm64-msvc@14.2.25': - resolution: {integrity: sha512-ReO9S5hkA1DU2cFCsGoOEp7WJkhFzNbU/3VUF6XxNGUCQChyug6hZdYL/istQgfT/GWE6PNIg9cm784OI4ddxQ==} + '@next/swc-win32-arm64-msvc@14.2.26': + resolution: {integrity: sha512-5auwsMVzT7wbB2CZXQxDctpWbdEnEW/e66DyXO1DcgHxIyhP06awu+rHKshZE+lPLIGiwtjo7bsyeuubewwxMw==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@next/swc-win32-ia32-msvc@14.2.25': - resolution: {integrity: sha512-DZ/gc0o9neuCDyD5IumyTGHVun2dCox5TfPQI/BJTYwpSNYM3CZDI4i6TOdjeq1JMo+Ug4kPSMuZdwsycwFbAw==} + '@next/swc-win32-ia32-msvc@14.2.26': + resolution: {integrity: sha512-GQWg/Vbz9zUGi9X80lOeGsz1rMH/MtFO/XqigDznhhhTfDlDoynCM6982mPCbSlxJ/aveZcKtTlwfAjwhyxDpg==} engines: {node: '>= 10'} cpu: [ia32] os: [win32] - '@next/swc-win32-x64-msvc@14.2.25': - resolution: {integrity: sha512-KSznmS6eFjQ9RJ1nEc66kJvtGIL1iZMYmGEXsZPh2YtnLtqrgdVvKXJY2ScjjoFnG6nGLyPFR0UiEvDwVah4Tw==} + '@next/swc-win32-x64-msvc@14.2.26': + resolution: {integrity: sha512-2rdB3T1/Gp7bv1eQTTm9d1Y1sv9UuJ2LAwOE0Pe2prHKe32UNscj7YS13fRB37d0GAiGNR+Y7ZcW8YjDI8Ns0w==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -4014,6 +4053,9 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + bullmq@5.44.0: + resolution: {integrity: sha512-OnEtkuXyrUx2Jm5BpH92+ttrobblBdCbkhOe3OoR0hxZuAilI3mPWlwELslhfImRpDv8rK+C/0/VK7I8f3xIig==} + bundle-n-require@1.1.2: resolution: {integrity: sha512-bEk2jakVK1ytnZ9R2AAiZEeK/GxPUM8jvcRxHZXifZDMcjkI4EG/GlsJ2YGSVYT9y/p/gA9/0yDY8rCGsSU6Tg==} @@ -4248,6 +4290,10 @@ packages: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} + cluster-key-slot@1.1.2: + resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==} + engines: {node: '>=0.10.0'} + co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} @@ -5860,6 +5906,10 @@ packages: intersection-observer@0.12.2: resolution: {integrity: sha512-7m1vEcPCxXYI8HqnL8CKI6siDyD+eIWSwgB3DZA+ZTogxk9I4CDnj4wilt9x/+/QbHI4YG5YZNmC6458/e9Ktg==} + ioredis@5.6.0: + resolution: {integrity: sha512-tBZlIIWbndeWBWCXWZiqtOF/yxf6yZX3tAlTJ7nfo5jhd6dctNxF7QnYlZLZ1a0o0pDoen7CgZqO+zjNaFbJAg==} + engines: {node: '>=12.22.0'} + ip-address@9.0.5: resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} engines: {node: '>= 12'} @@ -6554,9 +6604,15 @@ packages: lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + lodash.includes@4.3.0: resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + lodash.isarguments@3.1.0: + resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==} + lodash.isboolean@3.0.3: resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} @@ -7128,6 +7184,13 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + msgpackr-extract@3.0.3: + resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==} + hasBin: true + + msgpackr@1.11.2: + resolution: {integrity: sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==} + mssql@11.0.1: resolution: {integrity: sha512-KlGNsugoT90enKlR8/G36H0kTxPthDhmtNUCwEHvgRza5Cjpjoj+P2X6eMpFUDN7pFrJZsKadL4x990G8RBE1w==} engines: {node: '>=18'} @@ -7203,8 +7266,8 @@ packages: react: '>= 17.0.2' react-i18next: '>= 13.5.0' - next@14.2.25: - resolution: {integrity: sha512-N5M7xMc4wSb4IkPvEV5X2BRRXUmhVHNyaXwEM86+voXthSZz8ZiRyQW4p9mwAoAPIm6OzuVZtn7idgEJeAJN3Q==} + next@14.2.26: + resolution: {integrity: sha512-b81XSLihMwCfwiUVRRja3LphLo4uBBMZEzBBWMaISbKTwOmq3wPknIETy/8000tr7Gq4WmbuFYPS7jOYIf+ZJw==} engines: {node: '>=18.17.0'} hasBin: true peerDependencies: @@ -7260,6 +7323,10 @@ packages: encoding: optional: true + node-gyp-build-optional-packages@5.2.2: + resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==} + hasBin: true + node-gyp@10.3.1: resolution: {integrity: sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==} engines: {node: ^16.14.0 || >=18.0.0} @@ -8041,6 +8108,14 @@ packages: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + redis-errors@1.2.0: + resolution: {integrity: sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==} + engines: {node: '>=4'} + + redis-parser@3.0.0: + resolution: {integrity: sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==} + engines: {node: '>=4'} + redux@4.2.1: resolution: {integrity: sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==} @@ -8490,6 +8565,9 @@ packages: stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + standard-as-callback@2.1.0: + resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==} + state-local@1.0.7: resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} @@ -10668,12 +10746,12 @@ snapshots: '@chakra-ui/system': 2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1) react: 18.3.1 - '@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)': + '@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)': dependencies: '@chakra-ui/react': 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@emotion/cache': 11.14.0 '@emotion/react': 11.11.1(@types/react@18.3.1)(react@18.3.1) - next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) react: 18.3.1 '@chakra-ui/object-utils@2.1.0': {} @@ -11160,6 +11238,8 @@ snapshots: '@humanwhocodes/object-schema@2.0.3': {} + '@ioredis/commands@1.2.0': {} + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -11565,6 +11645,24 @@ snapshots: dependencies: sparse-bitfield: 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3': + optional: true + + '@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3': + optional: true + '@napi-rs/wasm-runtime@0.2.7': dependencies: '@emnapi/core': 1.3.1 @@ -11681,37 +11779,37 @@ snapshots: '@nestjs/core': 10.4.15(@nestjs/common@10.4.15(reflect-metadata@0.2.2)(rxjs@7.8.2))(encoding@0.1.13)(reflect-metadata@0.2.2)(rxjs@7.8.2) tslib: 2.8.1 - '@next/env@14.2.25': {} + '@next/env@14.2.26': {} '@next/eslint-plugin-next@14.2.24': dependencies: glob: 10.3.10 - '@next/swc-darwin-arm64@14.2.25': + '@next/swc-darwin-arm64@14.2.26': optional: true - '@next/swc-darwin-x64@14.2.25': + '@next/swc-darwin-x64@14.2.26': optional: true - '@next/swc-linux-arm64-gnu@14.2.25': + '@next/swc-linux-arm64-gnu@14.2.26': optional: true - '@next/swc-linux-arm64-musl@14.2.25': + '@next/swc-linux-arm64-musl@14.2.26': optional: true - '@next/swc-linux-x64-gnu@14.2.25': + '@next/swc-linux-x64-gnu@14.2.26': optional: true - '@next/swc-linux-x64-musl@14.2.25': + '@next/swc-linux-x64-musl@14.2.26': optional: true - '@next/swc-win32-arm64-msvc@14.2.25': + '@next/swc-win32-arm64-msvc@14.2.26': optional: true - '@next/swc-win32-ia32-msvc@14.2.25': + '@next/swc-win32-ia32-msvc@14.2.26': optional: true - '@next/swc-win32-x64-msvc@14.2.25': + '@next/swc-win32-x64-msvc@14.2.26': optional: true '@node-rs/jieba-android-arm-eabi@2.0.1': @@ -13456,6 +13554,18 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 + bullmq@5.44.0: + dependencies: + cron-parser: 4.9.0 + ioredis: 5.6.0 + msgpackr: 1.11.2 + node-abort-controller: 3.1.1 + semver: 7.7.1 + tslib: 2.8.1 + uuid: 9.0.1 + transitivePeerDependencies: + - supports-color + bundle-n-require@1.1.2: dependencies: esbuild: 0.25.1 @@ -13713,6 +13823,8 @@ snapshots: clsx@2.1.1: {} + cluster-key-slot@1.1.2: {} + co@4.6.0: {} collapse-white-space@1.0.6: {} @@ -14627,7 +14739,7 @@ snapshots: eslint: 8.56.0 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0) - eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0) + eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0) eslint-plugin-jsx-a11y: 6.10.2(eslint@8.56.0) eslint-plugin-react: 7.37.4(eslint@8.56.0) eslint-plugin-react-hooks: 5.0.0-canary-7118f5dd7-20230705(eslint@8.56.0) @@ -14657,7 +14769,7 @@ snapshots: stable-hash: 0.0.5 tinyglobby: 0.2.12 optionalDependencies: - eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0) + eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0) transitivePeerDependencies: - supports-color @@ -14672,7 +14784,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.56.0): + eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.8.2))(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.8 @@ -15692,6 +15804,20 @@ snapshots: intersection-observer@0.12.2: {} + ioredis@5.6.0: + dependencies: + '@ioredis/commands': 1.2.0 + cluster-key-slot: 1.1.2 + debug: 4.4.0 + denque: 2.1.0 + lodash.defaults: 4.2.0 + lodash.isarguments: 3.1.0 + redis-errors: 1.2.0 + redis-parser: 3.0.0 + standard-as-callback: 2.1.0 + transitivePeerDependencies: + - supports-color + ip-address@9.0.5: dependencies: jsbn: 1.1.0 @@ -16558,8 +16684,12 @@ snapshots: lodash.debounce@4.0.8: {} + lodash.defaults@4.2.0: {} + lodash.includes@4.3.0: {} + lodash.isarguments@3.1.0: {} + lodash.isboolean@3.0.3: {} lodash.isinteger@4.0.4: {} @@ -17481,6 +17611,22 @@ snapshots: ms@2.1.3: {} + msgpackr-extract@3.0.3: + dependencies: + node-gyp-build-optional-packages: 5.2.2 + optionalDependencies: + '@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3 + '@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3 + optional: true + + msgpackr@1.11.2: + optionalDependencies: + msgpackr-extract: 3.0.3 + mssql@11.0.1: dependencies: '@tediousjs/connection-string': 0.5.0 @@ -17552,7 +17698,7 @@ snapshots: transitivePeerDependencies: - supports-color - next-i18next@15.4.2(i18next@23.16.8)(next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): + next-i18next@15.4.2(i18next@23.16.8)(next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.26.10 '@types/hoist-non-react-statics': 3.3.6 @@ -17560,13 +17706,13 @@ snapshots: hoist-non-react-statics: 3.3.2 i18next: 23.16.8 i18next-fs-backend: 2.6.0 - next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) react: 18.3.1 react-i18next: 14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - next@14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1): + next@14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1): dependencies: - '@next/env': 14.2.25 + '@next/env': 14.2.26 '@swc/helpers': 0.5.5 busboy: 1.6.0 caniuse-lite: 1.0.30001704 @@ -17576,24 +17722,24 @@ snapshots: react-dom: 18.3.1(react@18.3.1) styled-jsx: 5.1.1(@babel/core@7.26.10)(react@18.3.1) optionalDependencies: - '@next/swc-darwin-arm64': 14.2.25 - '@next/swc-darwin-x64': 14.2.25 - '@next/swc-linux-arm64-gnu': 14.2.25 - '@next/swc-linux-arm64-musl': 14.2.25 - '@next/swc-linux-x64-gnu': 14.2.25 - '@next/swc-linux-x64-musl': 14.2.25 - '@next/swc-win32-arm64-msvc': 14.2.25 - '@next/swc-win32-ia32-msvc': 14.2.25 - '@next/swc-win32-x64-msvc': 14.2.25 + '@next/swc-darwin-arm64': 14.2.26 + '@next/swc-darwin-x64': 14.2.26 + '@next/swc-linux-arm64-gnu': 14.2.26 + '@next/swc-linux-arm64-musl': 14.2.26 + '@next/swc-linux-x64-gnu': 14.2.26 + '@next/swc-linux-x64-musl': 14.2.26 + '@next/swc-win32-arm64-msvc': 14.2.26 + '@next/swc-win32-ia32-msvc': 14.2.26 + '@next/swc-win32-x64-msvc': 14.2.26 sass: 1.85.1 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros - nextjs-cors@2.2.0(next@14.2.25(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)): + nextjs-cors@2.2.0(next@14.2.26(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1)): dependencies: cors: 2.8.5 - next: 14.2.25(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) + next: 14.2.26(@babel/core@7.26.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) node-abi@3.74.0: dependencies: @@ -17624,6 +17770,11 @@ snapshots: optionalDependencies: encoding: 0.1.13 + node-gyp-build-optional-packages@5.2.2: + dependencies: + detect-libc: 2.0.3 + optional: true + node-gyp@10.3.1: dependencies: env-paths: 2.2.1 @@ -18499,6 +18650,12 @@ snapshots: tiny-invariant: 1.3.3 victory-vendor: 36.9.2 + redis-errors@1.2.0: {} + + redis-parser@3.0.0: + dependencies: + redis-errors: 1.2.0 + redux@4.2.1: dependencies: '@babel/runtime': 7.26.10 @@ -19048,6 +19205,8 @@ snapshots: stackback@0.0.2: {} + standard-as-callback@2.1.0: {} + state-local@1.0.7: {} state-toggle@1.0.3: {} diff --git a/projects/app/.env.template b/projects/app/.env.template index 297b19b31..2b8cf8807 100644 --- a/projects/app/.env.template +++ b/projects/app/.env.template @@ -20,9 +20,12 @@ AIPROXY_API_TOKEN=xxxxx # 强制将图片转成 base64 传递给模型 MULTIPLE_DATA_TO_BASE64=true +# Redis URL +REDIS_URL=redis://default:password@127.0.0.1:6379 # mongo 数据库连接参数,本地开发连接远程数据库时,可能需要增加 directConnection=true 参数,才能连接上。 MONGODB_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin - +# 日志库 +MONGODB_LOG_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin # 向量库优先级: pg > oceanbase > milvus # PG 向量库连接参数 PG_URL=postgresql://username:password@host:port/postgres @@ -48,6 +51,8 @@ LOG_LEVEL=debug STORE_LOG_LEVEL=warn # 安全配置 +# 对话文件 n 天过期 +CHAT_FILE_EXPIRE_TIME=7 # 启动 IP 限流(true),部分接口增加了 ip 限流策略,防止非正常请求操作。 USE_IP_LIMIT=false # 工作流最大运行次数,避免极端的死循环情况 @@ -65,4 +70,4 @@ CHECK_INTERNAL_IP=false # # 日志来源ID前缀 # CHAT_LOG_SOURCE_ID_PREFIX=fastgpt- # 自定义跨域,不配置时,默认都允许跨域(逗号分割) -ALLOWED_ORIGINS= \ No newline at end of file +ALLOWED_ORIGINS= diff --git a/projects/app/next.config.js b/projects/app/next.config.js index bc191f87e..4df4cf741 100644 --- a/projects/app/next.config.js +++ b/projects/app/next.config.js @@ -83,6 +83,7 @@ const nextConfig = { serverComponentsExternalPackages: [ 'mongoose', 'pg', + 'bullmq', '@zilliz/milvus2-sdk-node', "tiktoken", ], diff --git a/projects/app/package.json b/projects/app/package.json index b63af67ed..9ec59a459 100644 --- a/projects/app/package.json +++ b/projects/app/package.json @@ -1,6 +1,6 @@ { "name": "app", - "version": "4.9.3", + "version": "4.9.4", "private": false, "scripts": { "dev": "next dev", @@ -42,7 +42,7 @@ "lodash": "^4.17.21", "mermaid": "^10.2.3", "nanoid": "^5.1.3", - "next": "14.2.25", + "next": "14.2.26", "next-i18next": "15.4.2", "nprogress": "^0.2.0", "qrcode": "^1.5.4", diff --git a/projects/app/src/components/core/ai/SettingLLMModel/index.tsx b/projects/app/src/components/core/ai/SettingLLMModel/index.tsx index 1ebf9e67f..5002d522f 100644 --- a/projects/app/src/components/core/ai/SettingLLMModel/index.tsx +++ b/projects/app/src/components/core/ai/SettingLLMModel/index.tsx @@ -99,7 +99,6 @@ const SettingLLMModel = ({ { - console.log(e); onChange(e); onCloseAIChatSetting(); }} diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx index 58cddd016..7b9cc9b5b 100644 --- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/ChatInput.tsx @@ -1,7 +1,6 @@ -import { useSpeech } from '@/web/common/hooks/useSpeech'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { Box, Flex, Spinner, Textarea } from '@chakra-ui/react'; -import React, { useRef, useEffect, useCallback, useMemo } from 'react'; +import React, { useRef, useEffect, useCallback, useMemo, useState } from 'react'; import { useTranslation } from 'next-i18next'; import MyTooltip from '@fastgpt/web/components/common/MyTooltip'; import MyIcon from '@fastgpt/web/components/common/Icon'; @@ -18,6 +17,7 @@ import FilePreview from '../../components/FilePreview'; import { useFileUpload } from '../hooks/useFileUpload'; import ComplianceTip from '@/components/common/ComplianceTip/index'; import { useToast } from '@fastgpt/web/hooks/useToast'; +import VoiceInput, { type VoiceInputComponentRef } from './VoiceInput'; const InputGuideBox = dynamic(() => import('./InputGuideBox')); @@ -44,6 +44,7 @@ const ChatInput = ({ const { t } = useTranslation(); const { toast } = useToast(); const { isPc } = useSystem(); + const VoiceInputRef = useRef(null); const { setValue, watch, control } = chatForm; const inputValue = watch('input'); @@ -53,7 +54,6 @@ const ChatInput = ({ const chatId = useContextSelector(ChatBoxContext, (v) => v.chatId); const isChatting = useContextSelector(ChatBoxContext, (v) => v.isChatting); const whisperConfig = useContextSelector(ChatBoxContext, (v) => v.whisperConfig); - const autoTTSResponse = useContextSelector(ChatBoxContext, (v) => v.autoTTSResponse); const chatInputGuide = useContextSelector(ChatBoxContext, (v) => v.chatInputGuide); const fileSelectConfig = useContextSelector(ChatBoxContext, (v) => v.fileSelectConfig); @@ -106,86 +106,6 @@ const ChatInput = ({ [TextareaDom, canSendMessage, fileList, onSendMessage, replaceFiles] ); - /* whisper init */ - const canvasRef = useRef(null); - const { - isSpeaking, - isTransCription, - stopSpeak, - startSpeak, - speakingTimeString, - renderAudioGraph, - stream - } = useSpeech({ appId, ...outLinkAuthData }); - const onWhisperRecord = useCallback(() => { - const finishWhisperTranscription = (text: string) => { - if (!text) return; - if (whisperConfig?.autoSend) { - onSendMessage({ - text, - files: fileList, - autoTTSResponse - }); - replaceFiles([]); - } else { - resetInputVal({ text }); - } - }; - if (isSpeaking) { - return stopSpeak(); - } - startSpeak(finishWhisperTranscription); - }, [ - autoTTSResponse, - fileList, - isSpeaking, - onSendMessage, - replaceFiles, - resetInputVal, - startSpeak, - stopSpeak, - whisperConfig?.autoSend - ]); - useEffect(() => { - if (!stream) { - return; - } - const audioContext = new AudioContext(); - const analyser = audioContext.createAnalyser(); - analyser.fftSize = 4096; - analyser.smoothingTimeConstant = 1; - const source = audioContext.createMediaStreamSource(stream); - source.connect(analyser); - const renderCurve = () => { - if (!canvasRef.current) return; - renderAudioGraph(analyser, canvasRef.current); - window.requestAnimationFrame(renderCurve); - }; - renderCurve(); - }, [renderAudioGraph, stream]); - - const RenderTranslateLoading = useMemo( - () => ( - - - {t('common:core.chat.Converting to text')} - - ), - [isSpeaking, isTransCription, t] - ); - const RenderTextarea = useMemo( () => ( 0 ? 1 : 0} pl={[2, 4]}> @@ -198,7 +118,6 @@ const ChatInput = ({ cursor={'pointer'} transform={'translateY(1px)'} onClick={() => { - if (isSpeaking) return; onOpenSelectFile(); }} > @@ -208,7 +127,6 @@ const ChatInput = ({ onSelectFile({ files })} /> )} - {/* input area */} diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/VoiceInput.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/VoiceInput.tsx new file mode 100644 index 000000000..1d851b21f --- /dev/null +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/Input/VoiceInput.tsx @@ -0,0 +1,369 @@ +import { useSpeech } from '@/web/common/hooks/useSpeech'; +import { Box, Flex, HStack, Spinner } from '@chakra-ui/react'; +import React, { + useRef, + useEffect, + useCallback, + useState, + forwardRef, + useImperativeHandle, + useMemo +} from 'react'; +import { useTranslation } from 'next-i18next'; +import MyTooltip from '@fastgpt/web/components/common/MyTooltip'; +import MyIcon from '@fastgpt/web/components/common/Icon'; +import { useSystem } from '@fastgpt/web/hooks/useSystem'; +import { useContextSelector } from 'use-context-selector'; +import { ChatBoxContext } from '../Provider'; +import MyIconButton from '@/pageComponents/account/team/OrgManage/IconButton'; + +export interface VoiceInputComponentRef { + onSpeak: () => void; +} + +type VoiceInputProps = { + onSendMessage: (params: { text: string; files?: any[]; autoTTSResponse?: boolean }) => void; + resetInputVal: (val: { text: string }) => void; +}; + +// PC voice input +const PCVoiceInput = ({ + speakingTimeString, + stopSpeak, + canvasRef +}: { + speakingTimeString: string; + stopSpeak: (param: boolean) => void; + canvasRef: React.RefObject; +}) => { + const { t } = useTranslation(); + + return ( + + + {t('common:core.chat.Speaking')} + + + + {speakingTimeString} + + + stopSpeak(true)} + /> + + + stopSpeak(false)} + /> + + + ); +}; + +// mobile voice input +const MobileVoiceInput = ({ + isSpeaking, + onStartSpeak, + onCloseSpeak, + stopSpeak, + canvasRef +}: { + isSpeaking: boolean; + onStartSpeak: () => void; + onCloseSpeak: () => any; + stopSpeak: (param: boolean) => void; + canvasRef: React.RefObject; +}) => { + const { t } = useTranslation(); + + const isPressing = useRef(false); + const startTimeRef = useRef(0); // 防抖 + + const startYRef = useRef(0); + + const [isCancel, setIsCancel] = useState(false); + const canvasPosition = canvasRef.current?.getBoundingClientRect(); + const maskBottom = canvasPosition ? `${window.innerHeight - canvasPosition.top}px` : '50px'; + + const handleTouchStart = useCallback( + (e: React.TouchEvent) => { + isPressing.current = true; + setIsCancel(false); + + startTimeRef.current = Date.now(); + const touch = e.touches[0]; + startYRef.current = touch.pageY; + + onStartSpeak(); + }, + [onStartSpeak] + ); + + const handleTouchMove = useCallback( + (e: React.TouchEvent) => { + const touch = e.touches[0] as Touch; + const currentY = touch.pageY; + const deltaY = startYRef.current - currentY; + + if (deltaY > 90) { + setIsCancel(true); + } else if (deltaY <= 90) { + setIsCancel(false); + } + }, + [startYRef] + ); + + const handleTouchEnd = useCallback( + (e: React.TouchEvent) => { + if (!isPressing.current) return; + + const endTime = Date.now(); + const timeDifference = endTime - startTimeRef.current; + + if (isCancel || timeDifference < 200) { + stopSpeak(true); + } else { + stopSpeak(false); + } + }, + [isCancel, stopSpeak] + ); + + return ( + + {/* Back Icon */} + {!isSpeaking && ( + + + + )} + { + stopSpeak(true); + }} + zIndex={4} + > + {t('chat:press_to_speak')} + + + + {/* Mask */} + {isSpeaking && ( + + + {isCancel ? t('chat:release_cancel') : t('chat:release_send')} + + + )} + + ); +}; + +const VoiceInput = forwardRef( + ({ onSendMessage, resetInputVal }, ref) => { + const { t } = useTranslation(); + const { isPc } = useSystem(); + + const outLinkAuthData = useContextSelector(ChatBoxContext, (v) => v.outLinkAuthData); + const appId = useContextSelector(ChatBoxContext, (v) => v.appId); + const whisperConfig = useContextSelector(ChatBoxContext, (v) => v.whisperConfig); + const autoTTSResponse = useContextSelector(ChatBoxContext, (v) => v.autoTTSResponse); + const canvasRef = useRef(null); + + const { + isSpeaking, + isTransCription, + stopSpeak, + startSpeak, + speakingTimeString, + renderAudioGraphPc, + renderAudioGraphMobile, + stream + } = useSpeech({ appId, ...outLinkAuthData }); + + const [mobilePreSpeak, setMobilePreSpeak] = useState(false); + + // Canvas render + useEffect(() => { + if (!stream) { + return; + } + + const audioContext = new AudioContext(); + const analyser = audioContext.createAnalyser(); + analyser.fftSize = 4096; + analyser.smoothingTimeConstant = 1; + const source = audioContext.createMediaStreamSource(stream); + source.connect(analyser); + + let animationFrameId: number | null = null; + const renderCurve = () => { + const canvas = canvasRef.current; + if (!canvas) return; + + const ctx = canvas.getContext('2d'); + if (!ctx) return; + + if (!stream.active) { + ctx.clearRect(0, 0, canvas.width, canvas.height); + if (animationFrameId) { + window.cancelAnimationFrame(animationFrameId); + animationFrameId = null; + } + return; + } + + if (isPc) { + renderAudioGraphPc(analyser, canvas); + } else { + renderAudioGraphMobile(analyser, canvas); + } + animationFrameId = window.requestAnimationFrame(renderCurve); + }; + + renderCurve(); + + return () => { + if (animationFrameId) { + window.cancelAnimationFrame(animationFrameId); + } + audioContext.close(); + source.disconnect(); + analyser.disconnect(); + }; + }, [stream, canvasRef, renderAudioGraphPc, renderAudioGraphMobile, isPc]); + + const onStartSpeak = useCallback(() => { + const finishWhisperTranscription = (text: string) => { + if (!text) return; + if (whisperConfig?.autoSend) { + onSendMessage({ + text, + autoTTSResponse + }); + } else { + resetInputVal({ text }); + } + }; + startSpeak(finishWhisperTranscription); + }, [autoTTSResponse, onSendMessage, resetInputVal, startSpeak, whisperConfig?.autoSend]); + + const onSpeach = useCallback(() => { + if (isPc) { + onStartSpeak(); + } else { + setMobilePreSpeak(true); + } + }, [isPc, onStartSpeak]); + useImperativeHandle(ref, () => ({ + onSpeak: onSpeach + })); + + if (!whisperConfig?.open) return null; + if (!mobilePreSpeak && !isSpeaking && !isTransCription) return null; + + return ( + e.preventDefault()} + > + {isPc ? ( + + ) : ( + setMobilePreSpeak(false)} + stopSpeak={stopSpeak} + canvasRef={canvasRef} + /> + )} + + {isTransCription && ( + + + {t('common:core.chat.Converting to text')} + + )} + + ); + } +); +VoiceInput.displayName = 'VoiceInput'; + +export default VoiceInput; diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx index 177011aec..22642d610 100644 --- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/index.tsx @@ -219,7 +219,8 @@ const ChatBox = ({ tool, interactive, autoTTSResponse, - variables + variables, + nodeResponse }: generatingMessageProps & { autoTTSResponse?: boolean }) => { setChatRecords((state) => state.map((item, index) => { @@ -232,7 +233,14 @@ const ChatBox = ({ JSON.stringify(item.value[item.value.length - 1]) ); - if (event === SseResponseEventEnum.flowNodeStatus && status) { + if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) { + return { + ...item, + responseData: item.responseData + ? [...item.responseData, nodeResponse] + : [nodeResponse] + }; + } else if (event === SseResponseEventEnum.flowNodeStatus && status) { return { ...item, status, @@ -518,36 +526,34 @@ const ChatBox = ({ reserveTool: true }); - const { - responseData, - responseText, - isNewChat = false - } = await onStartChat({ + const { responseText } = await onStartChat({ messages, // 保证最后一条是 Human 的消息 responseChatItemId: responseChatId, controller: abortSignal, generatingMessage: (e) => generatingMessage({ ...e, autoTTSResponse }), variables: requestVariables }); - if (responseData?.[responseData.length - 1]?.error) { - toast({ - title: t(responseData[responseData.length - 1].error?.message), - status: 'error' - }); - } // Set last chat finish status let newChatHistories: ChatSiteItemType[] = []; setChatRecords((state) => { newChatHistories = state.map((item, index) => { if (index !== state.length - 1) return item; + + // Check node response error + const responseData = mergeChatResponseData(item.responseData || []); + if (responseData[responseData.length - 1]?.error) { + toast({ + title: t(responseData[responseData.length - 1].error?.message), + status: 'error' + }); + } + return { ...item, status: ChatStatusEnum.finish, time: new Date(), - responseData: item.responseData - ? mergeChatResponseData([...item.responseData, ...responseData]) - : responseData + responseData }; }); return newChatHistories; @@ -567,7 +573,7 @@ const ChatBox = ({ } catch (err: any) { console.log(err); toast({ - title: t(getErrText(err, 'core.chat.error.Chat error') as any), + title: t(getErrText(err, t('common:core.chat.error.Chat error') as any)), status: 'error', duration: 5000, isClosable: true @@ -807,12 +813,14 @@ const ChatBox = ({ showEmptyIntro && chatRecords.length === 0 && !variableList?.length && + !externalVariableList?.length && !welcomeText, [ chatRecords.length, feConfigs?.show_emptyChat, showEmptyIntro, variableList?.length, + externalVariableList?.length, welcomeText ] ); diff --git a/projects/app/src/components/core/chat/ChatContainer/PluginRunBox/context.tsx b/projects/app/src/components/core/chat/ChatContainer/PluginRunBox/context.tsx index 23b3364a8..08eff427b 100644 --- a/projects/app/src/components/core/chat/ChatContainer/PluginRunBox/context.tsx +++ b/projects/app/src/components/core/chat/ChatContainer/PluginRunBox/context.tsx @@ -18,6 +18,7 @@ import { ChatItemContext } from '@/web/core/chat/context/chatItemContext'; import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext'; import { AppFileSelectConfigType } from '@fastgpt/global/core/app/type'; import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants'; +import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils'; type PluginRunContextType = PluginRunBoxProps & { isChatting: boolean; @@ -46,11 +47,12 @@ const PluginRunContextProvider = ({ const pluginInputs = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.pluginInputs); const setTab = useContextSelector(ChatItemContext, (v) => v.setPluginRunTab); + const variablesForm = useContextSelector(ChatItemContext, (v) => v.variablesForm); + const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig); + const setChatRecords = useContextSelector(ChatRecordContext, (v) => v.setChatRecords); const chatRecords = useContextSelector(ChatRecordContext, (v) => v.chatRecords); - const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig); - const { instruction = '', fileSelectConfig = defaultAppSelectFileConfig } = useMemo( () => chatConfig || {}, [chatConfig] @@ -65,7 +67,7 @@ const PluginRunContextProvider = ({ }, []); const generatingMessage = useCallback( - ({ event, text = '', status, name, tool }: generatingMessageProps) => { + ({ event, text = '', status, name, tool, nodeResponse, variables }: generatingMessageProps) => { setChatRecords((state) => state.map((item, index) => { if (index !== state.length - 1 || item.obj !== ChatRoleEnum.AI) return item; @@ -74,7 +76,14 @@ const PluginRunContextProvider = ({ JSON.stringify(item.value[item.value.length - 1]) ); - if (event === SseResponseEventEnum.flowNodeStatus && status) { + if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) { + return { + ...item, + responseData: item.responseData + ? [...item.responseData, nodeResponse] + : [nodeResponse] + }; + } else if (event === SseResponseEventEnum.flowNodeStatus && status) { return { ...item, status, @@ -144,13 +153,15 @@ const PluginRunContextProvider = ({ return val; }) }; + } else if (event === SseResponseEventEnum.updateVariables && variables) { + variablesForm.setValue('variables', variables); } return item; }) ); }, - [setChatRecords] + [setChatRecords, variablesForm] ); const isChatting = useMemo( @@ -226,7 +237,7 @@ const PluginRunContextProvider = ({ } } - const { responseData } = await onStartChat({ + await onStartChat({ messages, controller: chatController.current, generatingMessage, @@ -235,16 +246,20 @@ const PluginRunContextProvider = ({ ...formatVariables } }); - if (responseData?.[responseData.length - 1]?.error) { - toast({ - title: responseData[responseData.length - 1].error?.message, - status: 'error' - }); - } setChatRecords((state) => state.map((item, index) => { if (index !== state.length - 1) return item; + + // Check node response error + const responseData = mergeChatResponseData(item.responseData || []); + if (responseData[responseData.length - 1]?.error) { + toast({ + title: t(responseData[responseData.length - 1].error?.message), + status: 'error' + }); + } + return { ...item, status: 'finish', diff --git a/projects/app/src/components/core/chat/ChatContainer/type.d.ts b/projects/app/src/components/core/chat/ChatContainer/type.d.ts index a8b8494a7..d7d0e5340 100644 --- a/projects/app/src/components/core/chat/ChatContainer/type.d.ts +++ b/projects/app/src/components/core/chat/ChatContainer/type.d.ts @@ -1,6 +1,10 @@ import { StreamResponseType } from '@/web/common/api/fetch'; import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type'; -import { ChatSiteItemType, ToolModuleResponseItemType } from '@fastgpt/global/core/chat/type'; +import { + ChatHistoryItemResType, + ChatSiteItemType, + ToolModuleResponseItemType +} from '@fastgpt/global/core/chat/type'; import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type'; export type generatingMessageProps = { @@ -12,6 +16,7 @@ export type generatingMessageProps = { tool?: ToolModuleResponseItemType; interactive?: WorkflowInteractiveResponseType; variables?: Record; + nodeResponse?: ChatHistoryItemResType; }; export type StartChatFnProps = { diff --git a/projects/app/src/components/core/chat/components/WholeResponseModal.tsx b/projects/app/src/components/core/chat/components/WholeResponseModal.tsx index 60a76d998..babef946c 100644 --- a/projects/app/src/components/core/chat/components/WholeResponseModal.tsx +++ b/projects/app/src/components/core/chat/components/WholeResponseModal.tsx @@ -17,6 +17,7 @@ import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider'; import { useRequest2 } from '@fastgpt/web/hooks/useRequest'; import { getFileIcon } from '@fastgpt/global/common/file/icon'; import EmptyTip from '@fastgpt/web/components/common/EmptyTip'; +import { completionFinishReasonMap } from '@fastgpt/global/core/ai/constants'; type sideTabItemType = { moduleLogo?: string; @@ -196,6 +197,13 @@ export const WholeResponseContent = ({ label={t('common:core.chat.response.module maxToken')} value={activeModule?.maxToken} /> + {activeModule?.finishReason && ( + + )} + {t('common:core.module.Default Value')} - + {(inputType === FlowNodeInputTypeEnum.numberInput || (inputType === VariableInputEnum.custom && valueType === WorkflowIOValueTypeEnum.number)) && ( diff --git a/projects/app/src/pageComponents/app/detail/useChatTest.tsx b/projects/app/src/pageComponents/app/detail/useChatTest.tsx index 222ae0c48..7dc1be34d 100644 --- a/projects/app/src/pageComponents/app/detail/useChatTest.tsx +++ b/projects/app/src/pageComponents/app/detail/useChatTest.tsx @@ -48,7 +48,7 @@ export const useChatTest = ({ const histories = messages.slice(-1); // 流请求,获取数据 - const { responseText, responseData } = await streamFetch({ + const { responseText } = await streamFetch({ url: '/api/core/chat/chatTest', data: { // Send histories and user messages @@ -66,7 +66,7 @@ export const useChatTest = ({ abortCtrl: controller }); - return { responseText, responseData }; + return { responseText }; } ); diff --git a/projects/app/src/pageComponents/chat/ChatHistorySlider.tsx b/projects/app/src/pageComponents/chat/ChatHistorySlider.tsx index b8496ede2..b0ba3aba8 100644 --- a/projects/app/src/pageComponents/chat/ChatHistorySlider.tsx +++ b/projects/app/src/pageComponents/chat/ChatHistorySlider.tsx @@ -84,7 +84,6 @@ const ChatHistorySlider = ({ confirmClearText }: { confirmClearText: string }) = return ( import('./WebsiteConfig')); @@ -66,7 +65,7 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => const router = useRouter(); const { parentId = '' } = router.query as { parentId: string }; - const { datasetDetail, datasetId, updateDataset } = useContextSelector( + const { datasetDetail, datasetId, updateDataset, loadDatasetDetail } = useContextSelector( DatasetPageContext, (v) => v ); @@ -75,30 +74,32 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => const { openConfirm: openWebSyncConfirm, ConfirmModal: ConfirmWebSyncModal } = useConfirm({ content: t('dataset:start_sync_website_tip') }); + const syncWebsite = async () => { + await checkTeamWebSyncLimit(); + postWebsiteSync({ datasetId: datasetId }).then(() => { + loadDatasetDetail(datasetId); + }); + }; const { isOpen: isOpenWebsiteModal, onOpen: onOpenWebsiteModal, onClose: onCloseWebsiteModal } = useDisclosure(); - const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({ - mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => { - onCloseWebsiteModal(); - await checkTeamWebSyncLimit(); + const { runAsync: onUpdateDatasetWebsiteConfig } = useRequest2( + async (websiteConfig: WebsiteConfigFormType) => { await updateDataset({ id: datasetId, - websiteConfig, - status: DatasetStatusEnum.syncing + websiteConfig: websiteConfig.websiteConfig, + chunkSettings: websiteConfig.chunkSettings }); - const billId = await postCreateTrainingUsage({ - name: t('common:core.dataset.training.Website Sync'), - datasetId: datasetId - }); - await postWebsiteSync({ datasetId: datasetId, billId }); - - return; + await syncWebsite(); }, - errorToast: t('common:common.Update Failed') - }); + { + onSuccess() { + onCloseWebsiteModal(); + } + } + ); // collection list const [searchText, setSearchText] = useState(''); @@ -124,7 +125,7 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => }); const contextValue: CollectionPageContextType = { - openWebSyncConfirm: openWebSyncConfirm(onUpdateDatasetWebsiteConfig), + openWebSyncConfirm: openWebSyncConfirm(syncWebsite), onOpenWebsiteModal, searchText, @@ -149,10 +150,6 @@ const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => )} diff --git a/projects/app/src/pageComponents/dataset/detail/CollectionCard/EmptyCollectionTip.tsx b/projects/app/src/pageComponents/dataset/detail/CollectionCard/EmptyCollectionTip.tsx index d73c3a6a1..30c098e71 100644 --- a/projects/app/src/pageComponents/dataset/detail/CollectionCard/EmptyCollectionTip.tsx +++ b/projects/app/src/pageComponents/dataset/detail/CollectionCard/EmptyCollectionTip.tsx @@ -25,6 +25,9 @@ const EmptyCollectionTip = () => { {datasetDetail.status === DatasetStatusEnum.syncing && ( <>{t('common:core.dataset.status.syncing')} )} + {datasetDetail.status === DatasetStatusEnum.waiting && ( + <>{t('common:core.dataset.status.waiting')} + )} {datasetDetail.status === DatasetStatusEnum.active && ( <> {!datasetDetail?.websiteConfig?.url ? ( diff --git a/projects/app/src/pageComponents/dataset/detail/CollectionCard/Header.tsx b/projects/app/src/pageComponents/dataset/detail/CollectionCard/Header.tsx index 7fc193f40..f2cf88bdb 100644 --- a/projects/app/src/pageComponents/dataset/detail/CollectionCard/Header.tsx +++ b/projects/app/src/pageComponents/dataset/detail/CollectionCard/Header.tsx @@ -1,35 +1,23 @@ import React from 'react'; -import { - Box, - Flex, - MenuButton, - Button, - Link, - useTheme, - useDisclosure, - HStack -} from '@chakra-ui/react'; +import { Box, Flex, MenuButton, Button, Link, useDisclosure, HStack } from '@chakra-ui/react'; import { getDatasetCollectionPathById, postDatasetCollection, putDatasetCollectionById } from '@/web/core/dataset/api'; -import { useQuery } from '@tanstack/react-query'; import { useTranslation } from 'next-i18next'; import MyIcon from '@fastgpt/web/components/common/Icon'; import MyInput from '@/components/MyInput'; -import { useRequest, useRequest2 } from '@fastgpt/web/hooks/useRequest'; +import { useRequest2 } from '@fastgpt/web/hooks/useRequest'; import { useRouter } from 'next/router'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import MyMenu from '@fastgpt/web/components/common/MyMenu'; import { useEditTitle } from '@/web/common/hooks/useEditTitle'; import { DatasetCollectionTypeEnum, - TrainingModeEnum, DatasetTypeEnum, DatasetTypeMap, - DatasetStatusEnum, - DatasetCollectionDataProcessModeEnum + DatasetStatusEnum } from '@fastgpt/global/core/dataset/constants'; import EditFolderModal, { useEditFolder } from '../../EditFolderModal'; import { TabEnum } from '../../../../pages/dataset/detail/index'; @@ -43,26 +31,36 @@ import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContex import { useSystem } from '@fastgpt/web/hooks/useSystem'; import HeaderTagPopOver from './HeaderTagPopOver'; import MyBox from '@fastgpt/web/components/common/MyBox'; +import Icon from '@fastgpt/web/components/common/Icon'; +import MyTag from '@fastgpt/web/components/common/Tag/index'; +import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip'; const FileSourceSelector = dynamic(() => import('../Import/components/FileSourceSelector')); -const Header = ({}: {}) => { +const Header = ({ hasTrainingData }: { hasTrainingData: boolean }) => { const { t } = useTranslation(); - const theme = useTheme(); - const { feConfigs } = useSystemStore(); + const { isPc } = useSystem(); + const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail); const router = useRouter(); const { parentId = '' } = router.query as { parentId: string }; - const { isPc } = useSystem(); - const { searchText, setSearchText, total, getData, pageNum, onOpenWebsiteModal } = - useContextSelector(CollectionPageContext, (v) => v); + const { + searchText, + setSearchText, + total, + getData, + pageNum, + onOpenWebsiteModal, + openWebSyncConfirm + } = useContextSelector(CollectionPageContext, (v) => v); - const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () => - getDatasetCollectionPathById(parentId) - ); + const { data: paths = [] } = useRequest2(() => getDatasetCollectionPathById(parentId), { + refreshDeps: [parentId], + manual: false + }); const { editFolderData, setEditFolderData } = useEditFolder(); const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } = @@ -72,13 +70,14 @@ const Header = ({}: {}) => { canEmpty: false }); + // Import collection const { isOpen: isOpenFileSourceSelector, onOpen: onOpenFileSourceSelector, onClose: onCloseFileSourceSelector } = useDisclosure(); - const { runAsync: onCreateCollection, loading: onCreating } = useRequest2( + const { runAsync: onCreateCollection } = useRequest2( async ({ name, type }: { name: string; type: DatasetCollectionTypeEnum }) => { const id = await postDatasetCollection({ parentId, @@ -100,7 +99,7 @@ const Header = ({}: {}) => { const isWebSite = datasetDetail?.type === DatasetTypeEnum.websiteDataset; return ( - + { {!isWebSite && } {t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel as any)}({total}) + {/* Website sync */} {datasetDetail?.websiteConfig?.url && ( - {t('common:core.dataset.website.Base Url')}: + {t('common:core.dataset.website.Base Url')}: {datasetDetail.websiteConfig.url} @@ -171,12 +172,14 @@ const Header = ({}: {}) => { )} {/* Tag */} - {datasetDetail.permission.hasWritePer && feConfigs?.isPlus && } + {datasetDetail.type !== DatasetTypeEnum.websiteDataset && + datasetDetail.permission.hasWritePer && + feConfigs?.isPlus && } {/* diff collection button */} {datasetDetail.permission.hasWritePer && ( - + {datasetDetail?.type === DatasetTypeEnum.dataset && ( { onClick: () => { onOpenCreateVirtualFileModal({ defaultVal: '', - onSuccess: (name) => { - onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual }); - } + onSuccess: (name) => + onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual }) }); } }, @@ -272,35 +274,70 @@ const Header = ({}: {}) => { {datasetDetail?.type === DatasetTypeEnum.websiteDataset && ( <> {datasetDetail?.websiteConfig?.url ? ( - + <> {datasetDetail.status === DatasetStatusEnum.active && ( - + + } + > + {t('dataset:params_config')} + + {!hasTrainingData && ( + } + > + {t('dataset:immediate_sync')} + + )} + )} {datasetDetail.status === DatasetStatusEnum.syncing && ( - - - - {t('common:core.dataset.status.syncing')} - - + {t('common:core.dataset.status.syncing')} + )} - + {datasetDetail.status === DatasetStatusEnum.waiting && ( + + {t('common:core.dataset.status.waiting')} + + )} + {datasetDetail.status === DatasetStatusEnum.error && ( + + + {t('dataset:status_error')} + + + + )} + ) : ( - )} diff --git a/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx b/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx new file mode 100644 index 000000000..c7636d71b --- /dev/null +++ b/projects/app/src/pageComponents/dataset/detail/CollectionCard/TrainingStates.tsx @@ -0,0 +1,502 @@ +import { + Box, + Button, + Flex, + ModalBody, + Table, + TableContainer, + Tbody, + Td, + Th, + Thead, + Tr +} from '@chakra-ui/react'; +import MyModal from '@fastgpt/web/components/common/MyModal'; +import { useTranslation } from 'next-i18next'; +import MyTag from '@fastgpt/web/components/common/Tag/index'; +import FillRowTabs from '@fastgpt/web/components/common/Tabs/FillRowTabs'; +import { useMemo, useState } from 'react'; +import { useRequest2 } from '@fastgpt/web/hooks/useRequest'; +import { + deleteTrainingData, + getDatasetCollectionTrainingDetail, + getTrainingDataDetail, + getTrainingError, + updateTrainingData +} from '@/web/core/dataset/api'; +import { DatasetCollectionDataProcessModeEnum } from '@fastgpt/global/core/dataset/constants'; +import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants'; +import MyIcon from '@fastgpt/web/components/common/Icon'; +import MyTooltip from '@fastgpt/web/components/common/MyTooltip'; +import { getTrainingDataDetailResponse } from '@/pages/api/core/dataset/training/getTrainingDataDetail'; +import MyTextarea from '@/components/common/Textarea/MyTextarea'; +import { TrainingProcess } from '@/web/core/dataset/constants'; +import { useForm } from 'react-hook-form'; +import type { getTrainingDetailResponse } from '@/pages/api/core/dataset/collection/trainingDetail'; +import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination'; +import EmptyTip from '@fastgpt/web/components/common/EmptyTip'; + +enum TrainingStatus { + NotStart = 'NotStart', + Queued = 'Queued', // wait count>0 + Running = 'Running', // wait count=0; training count>0. + Ready = 'Ready', + Error = 'Error' +} + +const ProgressView = ({ trainingDetail }: { trainingDetail: getTrainingDetailResponse }) => { + const { t } = useTranslation(); + + const isQA = trainingDetail?.trainingType === DatasetCollectionDataProcessModeEnum.qa; + + /* + 状态计算 + 1. 暂时没有内容解析的状态 + 2. 完全没有训练数据时候,已就绪 + 3. 有训练数据,中间过程全部是进行中 + */ + const statesArray = useMemo(() => { + const isReady = + Object.values(trainingDetail.queuedCounts).every((count) => count === 0) && + Object.values(trainingDetail.trainingCounts).every((count) => count === 0) && + Object.values(trainingDetail.errorCounts).every((count) => count === 0); + + const getTrainingStatus = ({ errorCount }: { errorCount: number }) => { + if (isReady) return TrainingStatus.Ready; + if (errorCount > 0) { + return TrainingStatus.Error; + } + return TrainingStatus.Running; + }; + + // 只显示排队和处理中的数量 + const getStatusText = (mode: TrainingModeEnum) => { + if (isReady) return; + + if (trainingDetail.queuedCounts[mode] > 0) { + return t('dataset:dataset.Training_Waiting', { + count: trainingDetail.queuedCounts[mode] + }); + } + if (trainingDetail.trainingCounts[mode] > 0) { + return t('dataset:dataset.Training_Count', { + count: trainingDetail.trainingCounts[mode] + }); + } + return; + }; + + const states: { + label: string; + statusText?: string; + status: TrainingStatus; + errorCount: number; + }[] = [ + // { + // label: TrainingProcess.waiting.label, + // status: TrainingStatus.Queued, + // statusText: t('dataset:dataset.Completed') + // }, + { + label: t(TrainingProcess.parsing.label), + status: TrainingStatus.Ready, + errorCount: 0 + }, + ...(isQA + ? [ + { + errorCount: trainingDetail.errorCounts.qa, + label: t(TrainingProcess.getQA.label), + statusText: getStatusText(TrainingModeEnum.qa), + status: getTrainingStatus({ + errorCount: trainingDetail.errorCounts.qa + }) + } + ] + : []), + ...(trainingDetail?.advancedTraining.imageIndex && !isQA + ? [ + { + errorCount: trainingDetail.errorCounts.image, + label: t(TrainingProcess.imageIndex.label), + statusText: getStatusText(TrainingModeEnum.image), + status: getTrainingStatus({ + errorCount: trainingDetail.errorCounts.image + }) + } + ] + : []), + ...(trainingDetail?.advancedTraining.autoIndexes && !isQA + ? [ + { + errorCount: trainingDetail.errorCounts.auto, + label: t(TrainingProcess.autoIndex.label), + statusText: getStatusText(TrainingModeEnum.auto), + status: getTrainingStatus({ + errorCount: trainingDetail.errorCounts.auto + }) + } + ] + : []), + { + errorCount: trainingDetail.errorCounts.chunk, + label: t(TrainingProcess.vectorizing.label), + statusText: getStatusText(TrainingModeEnum.chunk), + status: getTrainingStatus({ + errorCount: trainingDetail.errorCounts.chunk + }) + }, + { + errorCount: 0, + label: t('dataset:process.Is_Ready'), + status: isReady ? TrainingStatus.Ready : TrainingStatus.NotStart, + statusText: isReady + ? undefined + : t('dataset:training_ready', { + count: trainingDetail.trainedCount + }) + } + ]; + + return states; + }, [trainingDetail, t, isQA]); + + return ( + + {statesArray.map((item, index) => ( + + {/* Status round */} + + {item.status === TrainingStatus.Ready && ( + + )} + + {/* Card */} + + + {t(item.label as any)} + + {item.status === TrainingStatus.Error && ( + + {t('dataset:training.Error', { count: item.errorCount })} + + )} + + {!!item.statusText && ( + + {item.statusText} + + )} + + + ))} + + ); +}; + +const ErrorView = ({ datasetId, collectionId }: { datasetId: string; collectionId: string }) => { + const { t } = useTranslation(); + const TrainingText = { + [TrainingModeEnum.chunk]: t('dataset:process.Vectorizing'), + [TrainingModeEnum.qa]: t('dataset:process.Get QA'), + [TrainingModeEnum.image]: t('dataset:process.Image_Index'), + [TrainingModeEnum.auto]: t('dataset:process.Auto_Index') + }; + + const [editChunk, setEditChunk] = useState(); + + const { + data: errorList, + ScrollData, + isLoading, + refreshList + } = useScrollPagination(getTrainingError, { + pageSize: 15, + params: { + collectionId + }, + EmptyTip: + }); + + const { runAsync: getData, loading: getDataLoading } = useRequest2( + (data: { datasetId: string; collectionId: string; dataId: string }) => { + return getTrainingDataDetail(data); + }, + { + manual: true, + onSuccess: (data) => { + setEditChunk(data); + } + } + ); + const { runAsync: deleteData, loading: deleteLoading } = useRequest2( + (data: { datasetId: string; collectionId: string; dataId: string }) => { + return deleteTrainingData(data); + }, + { + manual: true, + onSuccess: () => { + refreshList(); + } + } + ); + const { runAsync: updateData, loading: updateLoading } = useRequest2( + (data: { datasetId: string; collectionId: string; dataId: string; q?: string; a?: string }) => { + return updateTrainingData(data); + }, + { + manual: true, + onSuccess: () => { + refreshList(); + setEditChunk(undefined); + } + } + ); + + if (editChunk) { + return ( + setEditChunk(undefined)} + onSave={(data) => { + updateData({ + datasetId, + collectionId, + dataId: editChunk._id, + ...data + }); + }} + /> + ); + } + + return ( + + + + + + + + + + + + + {errorList.map((item, index) => ( + + + + + + + ))} + +
{t('dataset:dataset.Chunk_Number')}{t('dataset:dataset.Training_Status')}{t('dataset:dataset.Error_Message')}{t('dataset:dataset.Operation')}
{item.chunkIndex + 1}{TrainingText[item.mode]} + {item.errorMsg} + + + + + + + + +
+ + + ); +}; + +const EditView = ({ + editChunk, + onCancel, + onSave +}: { + editChunk: getTrainingDataDetailResponse; + onCancel: () => void; + onSave: (data: { q: string; a?: string }) => void; +}) => { + const { t } = useTranslation(); + const { register, handleSubmit } = useForm({ + defaultValues: { + q: editChunk?.q || '', + a: editChunk?.a || '' + } + }); + + return ( + + {editChunk?.a && q} + + {editChunk?.a && ( + <> + a + + + )} + + + + + + ); +}; + +const TrainingStates = ({ + datasetId, + collectionId, + defaultTab = 'states', + onClose +}: { + datasetId: string; + collectionId: string; + defaultTab?: 'states' | 'errors'; + onClose: () => void; +}) => { + const { t } = useTranslation(); + const [tab, setTab] = useState(defaultTab); + + const { data: trainingDetail, loading } = useRequest2( + () => getDatasetCollectionTrainingDetail(collectionId), + { + pollingInterval: 5000, + pollingWhenHidden: false, + manual: false + } + ); + + const errorCounts = (Object.values(trainingDetail?.errorCounts || {}) as number[]).reduce( + (acc, count) => acc + count, + 0 + ); + + return ( + + + setTab(e as 'states' | 'errors')} + list={[ + { label: t('dataset:dataset.Training Process'), value: 'states' }, + { + label: t('dataset:dataset.Training_Errors', { + count: errorCounts + }), + value: 'errors' + } + ]} + /> + {tab === 'states' && trainingDetail && } + {tab === 'errors' && } + + + ); +}; + +export default TrainingStates; diff --git a/projects/app/src/pageComponents/dataset/detail/CollectionCard/WebsiteConfig.tsx b/projects/app/src/pageComponents/dataset/detail/CollectionCard/WebsiteConfig.tsx index e66d4c658..374bf7ecf 100644 --- a/projects/app/src/pageComponents/dataset/detail/CollectionCard/WebsiteConfig.tsx +++ b/projects/app/src/pageComponents/dataset/detail/CollectionCard/WebsiteConfig.tsx @@ -1,110 +1,215 @@ -import React from 'react'; import MyModal from '@fastgpt/web/components/common/MyModal'; import { useTranslation } from 'next-i18next'; -import { Box, Button, Input, Link, ModalBody, ModalFooter } from '@chakra-ui/react'; import { strIsLink } from '@fastgpt/global/common/string/tools'; import { useToast } from '@fastgpt/web/hooks/useToast'; import { useForm } from 'react-hook-form'; import { useConfirm } from '@fastgpt/web/hooks/useConfirm'; import { getDocPath } from '@/web/common/system/doc'; import { useSystemStore } from '@/web/common/system/useSystemStore'; +import { useMyStep } from '@fastgpt/web/hooks/useStep'; +import MyDivider from '@fastgpt/web/components/common/MyDivider'; +import React, { useRef } from 'react'; +import { + Box, + Link, + Input, + Button, + ModalBody, + ModalFooter, + Textarea, + Stack +} from '@chakra-ui/react'; +import { + DataChunkSplitModeEnum, + DatasetCollectionDataProcessModeEnum +} from '@fastgpt/global/core/dataset/constants'; +import { ChunkSettingModeEnum } from '@fastgpt/global/core/dataset/constants'; +import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent'; +import { useContextSelector } from 'use-context-selector'; +import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext'; +import CollectionChunkForm, { + collectionChunkForm2StoreChunkData, + type CollectionChunkFormType +} from '../Form/CollectionChunkForm'; +import { getLLMDefaultChunkSize } from '@fastgpt/global/core/dataset/training/utils'; +import { ChunkSettingsType } from '@fastgpt/global/core/dataset/type'; -type FormType = { - url?: string | undefined; - selector?: string | undefined; +export type WebsiteConfigFormType = { + websiteConfig: { + url: string; + selector: string; + }; + chunkSettings: ChunkSettingsType; }; const WebsiteConfigModal = ({ onClose, - onSuccess, - defaultValue = { - url: '', - selector: '' - } + onSuccess }: { onClose: () => void; - onSuccess: (data: FormType) => void; - defaultValue?: FormType; + onSuccess: (data: WebsiteConfigFormType) => void; }) => { const { t } = useTranslation(); const { feConfigs } = useSystemStore(); const { toast } = useToast(); - const { register, handleSubmit } = useForm({ - defaultValues: defaultValue + const steps = [ + { + title: t('dataset:website_info') + }, + { + title: t('dataset:params_config') + } + ]; + + const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail); + const websiteConfig = datasetDetail.websiteConfig; + const chunkSettings = datasetDetail.chunkSettings; + + const { + register: websiteInfoForm, + handleSubmit: websiteInfoHandleSubmit, + getValues: websiteInfoGetValues + } = useForm({ + defaultValues: { + url: websiteConfig?.url || '', + selector: websiteConfig?.selector || '' + } }); - const isEdit = !!defaultValue.url; - const confirmTip = isEdit - ? t('common:core.dataset.website.Confirm Update Tips') - : t('common:core.dataset.website.Confirm Create Tips'); + + const isEdit = !!websiteConfig?.url; const { ConfirmModal, openConfirm } = useConfirm({ type: 'common' }); + const { activeStep, goToPrevious, goToNext, MyStep } = useMyStep({ + defaultStep: 0, + steps + }); + + const form = useForm({ + defaultValues: { + trainingType: chunkSettings?.trainingType || DatasetCollectionDataProcessModeEnum.chunk, + imageIndex: chunkSettings?.imageIndex || false, + autoIndexes: chunkSettings?.autoIndexes || false, + + chunkSettingMode: chunkSettings?.chunkSettingMode || ChunkSettingModeEnum.auto, + chunkSplitMode: chunkSettings?.chunkSplitMode || DataChunkSplitModeEnum.size, + embeddingChunkSize: chunkSettings?.chunkSize || 2000, + qaChunkSize: chunkSettings?.chunkSize || getLLMDefaultChunkSize(datasetDetail.agentModel), + indexSize: chunkSettings?.indexSize || datasetDetail.vectorModel?.defaultToken || 512, + + chunkSplitter: chunkSettings?.chunkSplitter || '', + qaPrompt: chunkSettings?.qaPrompt || Prompt_AgentQA.description + } + }); + return ( - - - {t('common:core.dataset.website.Config Description')} - {feConfigs?.docUrl && ( - + + + + + {activeStep == 0 && ( + <> + - {t('common:common.course.Read Course')} - - )} - - - {t('common:core.dataset.website.Base Url')} - - - - - {t('common:core.dataset.website.Selector')}({t('common:common.choosable')}) - - - + {t('common:core.dataset.website.Config Description')} + {feConfigs?.docUrl && ( + + {t('common:common.course.Read Course')} + + )} + + + {t('common:core.dataset.website.Base Url')} + + + + + {t('common:core.dataset.website.Selector')}({t('common:common.choosable')}) + + + + + )} + {activeStep == 1 && } - - + {activeStep == 0 && ( + <> + + + + )} + {activeStep == 1 && ( + <> + + + + )} @@ -112,3 +217,42 @@ const WebsiteConfigModal = ({ }; export default WebsiteConfigModal; + +const PromptTextarea = ({ + defaultValue, + onChange, + onClose +}: { + defaultValue: string; + onChange: (e: string) => void; + onClose: () => void; +}) => { + const ref = useRef(null); + const { t } = useTranslation(); + + return ( + + +