File input (#2270)

* doc

* feat: file upload  config

* perf: chat box file params

* feat: markdown show file

* feat: chat file store and clear

* perf: read file contentType

* feat: llm vision config

* feat: file url output

* perf: plugin error text

* perf: image load

* feat: ai chat document

* perf: file block ui

* feat: read file node

* feat: file read response field

* feat: simple mode support read files

* feat: tool call

* feat: read file histories

* perf: select file

* perf: select file config

* i18n

* i18n

* fix: ts; feat: tool response preview result
This commit is contained in:
Archer
2024-08-06 10:00:22 +08:00
committed by GitHub
parent 10dcdb5491
commit e36d9d794f
121 changed files with 2600 additions and 1142 deletions

View File

@@ -3,16 +3,19 @@ import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import fsp from 'fs/promises';
import fs from 'fs';
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
import { MongoFileSchema } from './schema';
import { MongoChatFileSchema, MongoDatasetFileSchema } from './schema';
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoRawTextBuffer } from '../../buffer/rawText/schema';
import { readRawContentByFileBuffer } from '../read/utils';
import { gridFsStream2Buffer, stream2Encoding } from './utils';
import { addLog } from '../../system/log';
import { readFromSecondary } from '../../mongo/utils';
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
MongoFileSchema;
MongoDatasetFileSchema;
MongoChatFileSchema;
return connectionMongo.connection.db.collection(`${bucket}.files`);
}
export function getGridBucket(bucket: `${BucketNameEnum}`) {
@@ -49,6 +52,7 @@ export async function uploadFile({
const { stream: readStream, encoding } = await stream2Encoding(fs.createReadStream(path));
// Add default metadata
metadata.teamId = teamId;
metadata.tmbId = tmbId;
metadata.encoding = encoding;
@@ -103,7 +107,9 @@ export async function delFileByFileIdList({
try {
const bucket = getGridBucket(bucketName);
await Promise.all(fileIdList.map((id) => bucket.delete(new Types.ObjectId(id))));
for await (const fileId of fileIdList) {
await bucket.delete(new Types.ObjectId(fileId));
}
} catch (error) {
if (retry > 0) {
return delFileByFileIdList({ bucketName, fileIdList, retry: retry - 1 });
@@ -138,7 +144,9 @@ export const readFileContentFromMongo = async ({
filename: string;
}> => {
// read buffer
const fileBuffer = await MongoRawTextBuffer.findOne({ sourceId: fileId }).lean();
const fileBuffer = await MongoRawTextBuffer.findOne({ sourceId: fileId }, undefined, {
...readFromSecondary
}).lean();
if (fileBuffer) {
return {
rawText: fileBuffer.rawText,

View File

@@ -1,13 +1,17 @@
import { connectionMongo, getMongoModel, type Model } from '../../mongo';
const { Schema, model, models } = connectionMongo;
const { Schema } = connectionMongo;
const FileSchema = new Schema({});
const DatasetFileSchema = new Schema({});
const ChatFileSchema = new Schema({});
try {
FileSchema.index({ 'metadata.teamId': 1 });
FileSchema.index({ 'metadata.uploadDate': -1 });
DatasetFileSchema.index({ uploadDate: -1 });
ChatFileSchema.index({ uploadDate: -1 });
ChatFileSchema.index({ 'metadata.chatId': 1 });
} catch (error) {
console.log(error);
}
export const MongoFileSchema = getMongoModel('dataset.files', FileSchema);
export const MongoDatasetFileSchema = getMongoModel('dataset.files', DatasetFileSchema);
export const MongoChatFileSchema = getMongoModel('chat.files', ChatFileSchema);