mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
feat: vision model (#489)
* mongo init * perf: mongo connect * perf: tts perf: whisper and tts peref: tts whisper permission log reabase (#488) * perf: modal * i18n * perf: schema lean * feat: vision model format * perf: tts loading * perf: static data * perf: tts * feat: image * perf: image * perf: upload image and title * perf: image size * doc * perf: color * doc * speaking can not select file * doc
This commit is contained in:
@@ -29,16 +29,16 @@ export async function generateQA(): Promise<any> {
|
||||
error = false
|
||||
} = await (async () => {
|
||||
try {
|
||||
const data = (
|
||||
await MongoDatasetTraining.findOneAndUpdate(
|
||||
{
|
||||
mode: TrainingModeEnum.qa,
|
||||
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
|
||||
},
|
||||
{
|
||||
lockTime: new Date()
|
||||
}
|
||||
).select({
|
||||
const data = await MongoDatasetTraining.findOneAndUpdate(
|
||||
{
|
||||
mode: TrainingModeEnum.qa,
|
||||
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
|
||||
},
|
||||
{
|
||||
lockTime: new Date()
|
||||
}
|
||||
)
|
||||
.select({
|
||||
_id: 1,
|
||||
userId: 1,
|
||||
teamId: 1,
|
||||
@@ -50,7 +50,7 @@ export async function generateQA(): Promise<any> {
|
||||
billId: 1,
|
||||
prompt: 1
|
||||
})
|
||||
)?.toJSON();
|
||||
.lean();
|
||||
|
||||
// task preemption
|
||||
if (!data) {
|
||||
|
@@ -24,16 +24,16 @@ export async function generateVector(): Promise<any> {
|
||||
error = false
|
||||
} = await (async () => {
|
||||
try {
|
||||
const data = (
|
||||
await MongoDatasetTraining.findOneAndUpdate(
|
||||
{
|
||||
mode: TrainingModeEnum.chunk,
|
||||
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
|
||||
},
|
||||
{
|
||||
lockTime: new Date()
|
||||
}
|
||||
).select({
|
||||
const data = await MongoDatasetTraining.findOneAndUpdate(
|
||||
{
|
||||
mode: TrainingModeEnum.chunk,
|
||||
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
|
||||
},
|
||||
{
|
||||
lockTime: new Date()
|
||||
}
|
||||
)
|
||||
.select({
|
||||
_id: 1,
|
||||
userId: 1,
|
||||
teamId: 1,
|
||||
@@ -46,7 +46,7 @@ export async function generateVector(): Promise<any> {
|
||||
model: 1,
|
||||
billId: 1
|
||||
})
|
||||
)?.toJSON();
|
||||
.lean();
|
||||
|
||||
// task preemption
|
||||
if (!data) {
|
||||
|
@@ -21,6 +21,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
|
||||
|
||||
export type ChatProps = ModuleDispatchProps<
|
||||
AIChatProps & {
|
||||
@@ -106,6 +107,21 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
temperature = Math.max(temperature, 0.01);
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
|
||||
const concatMessages = [
|
||||
...(modelConstantsData.defaultSystemChatPrompt
|
||||
? [
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||
content: modelConstantsData.defaultSystemChatPrompt
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...messages.map((item) => ({
|
||||
...item,
|
||||
content: modelConstantsData.vision ? formatStr2ChatContent(item.content) : item.content
|
||||
}))
|
||||
];
|
||||
|
||||
const response = await ai.chat.completions.create(
|
||||
{
|
||||
model,
|
||||
@@ -113,17 +129,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
max_tokens,
|
||||
stream,
|
||||
seed: temperature < 0.3 ? 1 : undefined,
|
||||
messages: [
|
||||
...(modelConstantsData.defaultSystemChatPrompt
|
||||
? [
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||
content: modelConstantsData.defaultSystemChatPrompt
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...messages
|
||||
]
|
||||
messages: concatMessages
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
|
@@ -4,6 +4,7 @@ import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { addLog } from '@fastgpt/service/common/mongo/controller';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
|
||||
type Props = {
|
||||
chatId: string;
|
||||
@@ -51,12 +52,17 @@ export async function saveChat({
|
||||
)
|
||||
];
|
||||
|
||||
const title =
|
||||
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
|
||||
content[1]?.value?.slice(0, 20) ||
|
||||
'Chat';
|
||||
|
||||
if (chatHistory) {
|
||||
promise.push(
|
||||
MongoChat.updateOne(
|
||||
{ chatId },
|
||||
{
|
||||
title: content[0].value.slice(0, 20),
|
||||
title,
|
||||
updateTime: new Date()
|
||||
}
|
||||
)
|
||||
@@ -69,7 +75,7 @@ export async function saveChat({
|
||||
tmbId,
|
||||
appId,
|
||||
variables,
|
||||
title: content[0].value.slice(0, 20),
|
||||
title,
|
||||
source,
|
||||
shareId
|
||||
})
|
||||
|
Reference in New Issue
Block a user