V4.9.4 feature (#4470)

* Training status (#4424)

* dataset data training state (#4311)

* dataset data training state

* fix

* fix ts

* fix

* fix api format

* fix

* fix

* perf: count training

* format

* fix: dataset training state (#4417)

* fix

* add test

* fix

* fix

* fix test

* fix test

* perf: training count

* count

* loading status

---------

Co-authored-by: heheer <heheer@sealos.io>

* doc

* website sync feature (#4429)

* perf: introduce BullMQ for website sync (#4403)

* perf: introduce BullMQ for website sync

* feat: new redis module

* fix: remove graceful shutdown

* perf: improve UI in dataset detail

- Updated the "change" icon SVG file.
- Modified i18n strings.
- Added new i18n string "immediate_sync".
- Improved UI in dataset detail page, including button icons and
background colors.

* refactor: Add chunkSettings to DatasetSchema

* perf: website sync ux

* env template

* fix: clean up website dataset when updating chunk settings (#4420)

* perf: check setting updated

* perf: worker currency

* feat: init script for website sync refactor (#4425)

* website feature doc

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>

* pro migration (#4388) (#4433)

* pro migration

* reuse customPdfParseType

Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>

* perf: remove loading ui

* feat: config chat file expired time

* Redis cache (#4436)

* perf: add Redis cache for vector counting (#4432)

* feat: cache

* perf: get cache key

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>

* perf: mobile voice input (#4437)

* update:Mobile voice interaction (#4362)

* Add files via upload

* Add files via upload

* Update ollama.md

* Update ollama.md

* Add files via upload

* Update useSpeech.ts

* Update ChatInput.tsx

* Update useSpeech.ts

* Update ChatInput.tsx

* Update useSpeech.ts

* Update constants.ts

* Add files via upload

* Update ChatInput.tsx

* Update useSpeech.ts

* Update useSpeech.ts

* Update useSpeech.ts

* Update ChatInput.tsx

* Add files via upload

* Update common.json

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update useSpeech.ts

* Update useSpeech.ts

* Update common.json

* Update common.json

* Update common.json

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update useSpeech.ts

* Update common.json

* Update chat.json

* Update common.json

* Update chat.json

* Update common.json

* Update chat.json

* Update VoiceInput.tsx

* Update ChatInput.tsx

* Update useSpeech.ts

* Update VoiceInput.tsx

* speech ui

* 优化语音输入组件,调整输入框显示逻辑,修复语音输入遮罩层样式,更新画布背景透明度,增强用户交互体验。 (#4435)

* perf: mobil voice input

---------

Co-authored-by: dreamer6680 <1468683855@qq.com>

* Test completion v2 (#4438)

* add v2 completions (#4364)

* add v2 completions

* completion config

* config version

* fix

* frontend

* doc

* fix

* fix: completions v2 api

---------

Co-authored-by: heheer <heheer@sealos.io>

* package

* Test mongo log (#4443)

* feat: mongodb-log (#4426)

* perf: mongo log

* feat: completions stop reasoner

* mongo db log

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>

* update doc

* Update doc

* fix external var ui (#4444)

* action

* fix: ts (#4458)

* preview doc action

add docs preview permission

update preview action

udpate action

* update doc (#4460)

* update preview action

* update doc

* remove

* update

* schema

* update mq export;perf: redis cache  (#4465)

* perf: redis cache

* update mq export

* perf: website sync error tip

* add error worker

* website sync ui (#4466)

* Updated the dynamic display of the voice input pop-up (#4469)

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* Update VoiceInput.tsx

* fix: voice input

---------

Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
Co-authored-by: dreamer6680 <1468683855@qq.com>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
This commit is contained in:
Archer
2025-04-08 12:05:04 +08:00
committed by GitHub
parent 5839325f77
commit f642c9603b
151 changed files with 5434 additions and 1354 deletions

View File

@@ -0,0 +1,58 @@
import handler, {
type deleteTrainingDataBody,
type deleteTrainingDataResponse
} from '@/pages/api/core/dataset/training/deleteTrainingData';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getRootUser } from '@test/datas/users';
import { Call } from '@test/utils/request';
import { describe, expect, it } from 'vitest';
describe('delete training data test', () => {
it('should delete training data', async () => {
const root = await getRootUser();
const dataset = await MongoDataset.create({
name: 'test',
teamId: root.teamId,
tmbId: root.tmbId
});
const collection = await MongoDatasetCollection.create({
name: 'test',
type: DatasetCollectionTypeEnum.file,
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id
});
const trainingData = await MongoDatasetTraining.create({
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id,
collectionId: collection._id,
mode: TrainingModeEnum.chunk,
model: 'test'
});
const res = await Call<deleteTrainingDataBody, {}, deleteTrainingDataResponse>(handler, {
auth: root,
body: {
datasetId: dataset._id,
collectionId: collection._id,
dataId: trainingData._id
}
});
const deletedTrainingData = await MongoDatasetTraining.findOne({
teamId: root.teamId,
datasetId: dataset._id,
_id: trainingData._id
});
expect(res.code).toBe(200);
expect(deletedTrainingData).toBeNull();
});
});

View File

@@ -0,0 +1,59 @@
import handler, {
type getTrainingDataDetailBody,
type getTrainingDataDetailResponse
} from '@/pages/api/core/dataset/training/getTrainingDataDetail';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getRootUser } from '@test/datas/users';
import { Call } from '@test/utils/request';
import { describe, expect, it } from 'vitest';
describe('get training data detail test', () => {
it('should return training data detail', async () => {
const root = await getRootUser();
const dataset = await MongoDataset.create({
name: 'test',
teamId: root.teamId,
tmbId: root.tmbId
});
const collection = await MongoDatasetCollection.create({
name: 'test',
type: DatasetCollectionTypeEnum.file,
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id
});
const trainingData = await MongoDatasetTraining.create({
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id,
collectionId: collection._id,
model: 'test',
mode: TrainingModeEnum.chunk,
q: 'test',
a: 'test'
});
const res = await Call<getTrainingDataDetailBody, {}, getTrainingDataDetailResponse>(handler, {
auth: root,
body: {
datasetId: dataset._id,
collectionId: collection._id,
dataId: trainingData._id
}
});
expect(res.code).toBe(200);
expect(res.data).toBeDefined();
expect(res.data?._id).toStrictEqual(trainingData._id);
expect(res.data?.datasetId).toStrictEqual(dataset._id);
expect(res.data?.mode).toBe(TrainingModeEnum.chunk);
expect(res.data?.q).toBe('test');
expect(res.data?.a).toBe('test');
});
});

View File

@@ -0,0 +1,56 @@
import handler, {
type getTrainingErrorBody,
type getTrainingErrorResponse
} from '@/pages/api/core/dataset/training/getTrainingError';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getRootUser } from '@test/datas/users';
import { Call } from '@test/utils/request';
import { describe, expect, it } from 'vitest';
describe('training error list test', () => {
it('should return training error list', async () => {
const root = await getRootUser();
const dataset = await MongoDataset.create({
name: 'test',
teamId: root.teamId,
tmbId: root.tmbId
});
const collection = await MongoDatasetCollection.create({
name: 'test',
type: DatasetCollectionTypeEnum.file,
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id
});
await MongoDatasetTraining.create(
[...Array(10).keys()].map((i) => ({
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id,
collectionId: collection._id,
mode: TrainingModeEnum.chunk,
model: 'test',
errorMsg: 'test'
}))
);
const res = await Call<getTrainingErrorBody, {}, getTrainingErrorResponse>(handler, {
auth: root,
body: {
collectionId: collection._id,
pageSize: 10,
offset: 0
}
});
expect(res.code).toBe(200);
expect(res.data.total).toBe(10);
expect(res.data.list.length).toBe(10);
});
});

View File

@@ -0,0 +1,63 @@
import handler, {
type updateTrainingDataBody,
type updateTrainingDataResponse
} from '@/pages/api/core/dataset/training/updateTrainingData';
import {
DatasetCollectionTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getRootUser } from '@test/datas/users';
import { Call } from '@test/utils/request';
import { describe, expect, it } from 'vitest';
describe('update training data test', () => {
it('should update training data', async () => {
const root = await getRootUser();
const dataset = await MongoDataset.create({
name: 'test',
teamId: root.teamId,
tmbId: root.tmbId
});
const collection = await MongoDatasetCollection.create({
name: 'test',
type: DatasetCollectionTypeEnum.file,
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id
});
const trainingData = await MongoDatasetTraining.create({
teamId: root.teamId,
tmbId: root.tmbId,
datasetId: dataset._id,
collectionId: collection._id,
mode: TrainingModeEnum.chunk,
model: 'test'
});
const res = await Call<updateTrainingDataBody, {}, updateTrainingDataResponse>(handler, {
auth: root,
body: {
datasetId: dataset._id,
collectionId: collection._id,
dataId: trainingData._id,
q: 'test',
a: 'test',
chunkIndex: 1
}
});
const updatedTrainingData = await MongoDatasetTraining.findOne({
teamId: root.teamId,
datasetId: dataset._id,
_id: trainingData._id
});
expect(res.code).toBe(200);
expect(updatedTrainingData?.q).toBe('test');
expect(updatedTrainingData?.a).toBe('test');
expect(updatedTrainingData?.chunkIndex).toBe(1);
});
});