4.8.9 test fix (#2291)

* perf: read file icon

* perf:icon

* fix: i18n

* perf: hide pro api

* perf: upload expired time

* perf: upload file frequency limit

* perf: upload file ux

* perf: input file tip

* perf: qa custom chunk size

* feat: dataset openapi

* fix: auth dataset list

* fix: openapi doc

* perf: zero temperature change to 0.01

* perf: read file prompt

* perf: read file prompt

* perf: free plan tip

* feat: cron job usage
This commit is contained in:
Archer
2024-08-08 10:07:24 +08:00
committed by GitHub
parent 7b388b287a
commit 3ba9c21828
42 changed files with 822 additions and 813 deletions

View File

@@ -45,7 +45,7 @@ export const stream2Encoding = async (stream: NodeJS.ReadableStream) => {
})();
const enc = detectFileEncoding(buffer);
console.log('Get encoding time', Date.now() - start, enc);
return {
encoding: enc,
stream: copyStream

View File

@@ -3,7 +3,6 @@ import multer from 'multer';
import path from 'path';
import { BucketNameEnum, bucketNameMap } from '@fastgpt/global/common/file/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { tmpFileDirPath } from './constants';
type FileType = {
fieldname: string;

View File

@@ -228,7 +228,7 @@ const toolChoice = async (props: ActionProps) => {
const response = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0,
temperature: 0.01,
messages: filterMessages,
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
@@ -273,7 +273,7 @@ const functionCall = async (props: ActionProps) => {
const response = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0,
temperature: 0.01,
messages: filterMessages,
function_call: {
name: agentFunName

View File

@@ -49,7 +49,7 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
chatConfig,
params: { fileUrlList = [] }
} = props;
const maxFiles = chatConfig?.fileSelectConfig?.maxFiles || 0;
const maxFiles = chatConfig?.fileSelectConfig?.maxFiles || 20;
// Get files from histories
const filesFromHistories = histories
@@ -70,29 +70,35 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
})
.flat();
const parseUrlList = [...fileUrlList, ...filesFromHistories].slice(0, maxFiles);
const readFilesResult = await Promise.all(
parseUrlList
.map(async (url) => {
// System file
if (url.startsWith('/') || (requestOrigin && url.startsWith(requestOrigin))) {
// Parse url, get filename query. Keep only documents that can be parsed
const parseUrl = new URL(url);
const filenameQuery = parseUrl.searchParams.get('filename');
if (filenameQuery) {
const extensionQuery = filenameQuery.split('.').pop()?.toLowerCase() || '';
if (!documentFileType.includes(extensionQuery)) {
return;
}
}
// Remove the origin(Make intranet requests directly)
if (requestOrigin && url.startsWith(requestOrigin)) {
url = url.replace(requestOrigin, '');
// Concat fileUrlList and filesFromHistories; remove not supported files
const parseUrlList = [...fileUrlList, ...filesFromHistories]
.map((url) => {
// System file
if (url.startsWith('/') || (requestOrigin && url.startsWith(requestOrigin))) {
// Parse url, get filename query. Keep only documents that can be parsed
const parseUrl = new URL(url);
const filenameQuery = parseUrl.searchParams.get('filename');
if (filenameQuery) {
const extensionQuery = filenameQuery.split('.').pop()?.toLowerCase() || '';
if (!documentFileType.includes(extensionQuery)) {
return '';
}
}
// Remove the origin(Make intranet requests directly)
if (requestOrigin && url.startsWith(requestOrigin)) {
url = url.replace(requestOrigin, '');
}
}
return url;
})
.filter(Boolean)
.slice(0, maxFiles);
console.log(parseUrlList);
const readFilesResult = await Promise.all(
parseUrlList
.map(async (url) => {
// Get from buffer
const fileBuffer = await MongoRawTextBuffer.findOne({ sourceId: url }, undefined, {
...readFromSecondary

View File

@@ -188,7 +188,7 @@ const toolChoice = async (props: ActionProps) => {
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
temperature: 0.01,
messages: filterMessages,
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
@@ -235,7 +235,7 @@ const functionCall = async (props: ActionProps) => {
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
temperature: 0.01,
messages: filterMessages,
function_call: {
name: agentFunName

View File

@@ -10,6 +10,8 @@ import { MongoResourcePermission } from './schema';
import { ClientSession } from 'mongoose';
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
import { ResourcePermissionType } from '@fastgpt/global/support/permission/type';
import { bucketNameMap } from '@fastgpt/global/common/file/constants';
import { addMinutes } from 'date-fns';
export const getResourcePermission = async ({
resourceType,
@@ -250,15 +252,15 @@ export const clearCookie = (res: NextApiResponse) => {
};
/* file permission */
export const createFileToken = ({
expiredTime = Math.floor(Date.now() / 1000) + 60 * 30,
...data
}: FileTokenQuery) => {
export const createFileToken = (data: FileTokenQuery) => {
if (!process.env.FILE_TOKEN_KEY) {
return Promise.reject('System unset FILE_TOKEN_KEY');
}
const key = process.env.FILE_TOKEN_KEY as string;
const expireMinutes = bucketNameMap[data.bucketName].previewExpireMinutes;
const expiredTime = Math.floor(addMinutes(new Date(), expireMinutes).getTime() / 1000);
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
const token = jwt.sign(
{
...data,
@@ -274,7 +276,7 @@ export const authFileToken = (token?: string) =>
if (!token) {
return reject(ERROR_ENUM.unAuthFile);
}
const key = process.env.FILE_TOKEN_KEY as string;
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded.bucketName || !decoded?.teamId || !decoded?.tmbId || !decoded?.fileId) {