mirror of
https://github.com/labring/FastGPT.git
synced 2026-05-07 01:02:55 +08:00
289da0f7b0
* sandbox-sync-agent * refactor: host pro as submodule * chore: checkpoint host pro restructure * refactor workspace test layout and startup init * chore: update next turbopack setup * chore: snapshot current work before actions fix * chore: update pro submodule * chore: point pro submodule url to upstream https * fix: Dockerfile * chore: update pro submodule * ci: support private pro submodule token and skip fork jobs * fix(ci): build sdk workspace deps before code-sandbox bundle * fix(app): exclude vitest configs from production typecheck * fix(app-image): build sdk packages before next build * fix(ci): align dockerfiles with workspace sdk build flow * chore(docker): upgrade node20 docker images to node24 * fix(ci): read admin coverage output path in pro test workflow * fix(app-image): include next-i18next config and locale assets * chore: update pro submodule * chore: do not specify branch for submodule * chore: remove most ts-nocheck sign * chore: update pro submodule * chore: remove sandbox-agent-sync package * chore: do not modify "pushData" file logic * fix: health check * chore: restore dev axios proxy state * fix: test-fastgpt report workflow * fix: use valid vitest coverage action inputs
216 lines
5.8 KiB
TypeScript
216 lines
5.8 KiB
TypeScript
import { describe, it, expect, vi } from 'vitest';
|
|
import { parseS3UploadError } from '@fastgpt/global/common/error/s3';
|
|
|
|
const createTranslator = () =>
|
|
vi.fn((key: string, params?: Record<string, string>) =>
|
|
params ? `${key}:${JSON.stringify(params)}` : key
|
|
);
|
|
|
|
describe('parseS3UploadError', () => {
|
|
it('should handle raw string EntityTooLarge error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: 'EntityTooLarge: size exceeds limit',
|
|
maxSize: 10 * 1024 * 1024
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_file_too_large:{"max":"10 MB"}');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_file_too_large', { max: '10 MB' });
|
|
});
|
|
|
|
it('should handle axios response EntityTooLarge error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: '<Error><Code>EntityTooLarge</Code></Error>'
|
|
}
|
|
},
|
|
maxSize: 1024
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_file_too_large:{"max":"1 KB"}');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_file_too_large', { max: '1 KB' });
|
|
});
|
|
|
|
it('should handle proxy json EntityTooLarge error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: {
|
|
message: 'EntityTooLarge'
|
|
}
|
|
}
|
|
},
|
|
maxSize: 2 * 1024 * 1024
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_file_too_large:{"max":"2 MB"}');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_file_too_large', { max: '2 MB' });
|
|
});
|
|
|
|
it('should handle invalid upload file type errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: {
|
|
message: 'UploadFileTypeMismatch'
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_invalid_file_type');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_invalid_file_type');
|
|
});
|
|
|
|
it('should handle AccessDenied error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: 'AccessDenied'
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_auth_failed');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_auth_failed');
|
|
});
|
|
|
|
it('should handle proxy json unauthorized error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: {
|
|
statusText: 'unAuthFile',
|
|
message: 'error.unAuthFile'
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_auth_failed');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_auth_failed');
|
|
});
|
|
|
|
it('should handle invalid access key or signature errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: 'InvalidAccessKeyId'
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_auth_failed');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_auth_failed');
|
|
});
|
|
|
|
it('should handle NoSuchBucket error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: 'NoSuchBucket'
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_bucket_not_found');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_bucket_not_found');
|
|
});
|
|
|
|
it('should handle RequestTimeout error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
response: {
|
|
data: 'RequestTimeout'
|
|
}
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_timeout');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_timeout');
|
|
});
|
|
|
|
it('should handle network errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
code: 'ECONNREFUSED'
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_network_error');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_network_error');
|
|
});
|
|
|
|
it('should handle axios timeout errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
code: 'ECONNABORTED'
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_timeout');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_timeout');
|
|
});
|
|
|
|
it('should handle timeout message errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
message: 'timeout of 3000ms exceeded'
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_timeout');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_timeout');
|
|
});
|
|
|
|
it('should handle client side file size validation errors', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
message: 'file size is too large'
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_file_too_large:{"max":"-"}');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_file_too_large', { max: '-' });
|
|
});
|
|
|
|
it('should return default network error for unknown error', () => {
|
|
const t = createTranslator();
|
|
const result = parseS3UploadError({
|
|
t,
|
|
error: {
|
|
message: 'unknown error'
|
|
}
|
|
});
|
|
|
|
expect(result).toBe('common:error:s3_upload_network_error');
|
|
expect(t).toHaveBeenCalledWith('common:error:s3_upload_network_error');
|
|
});
|
|
});
|