perf: chat completion api

This commit is contained in:
archer
2023-08-07 13:18:45 +08:00
parent 2f614ac40d
commit c26be2e885
6 changed files with 47 additions and 17 deletions

View File

@@ -31,6 +31,7 @@ export const streamFetch = ({
signal: abortSignal.signal,
body: JSON.stringify({
...data,
detail: true,
stream: true
})
});

View File

@@ -54,7 +54,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
history: gptMessage2ChatType(history),
userChatInput: prompt
},
stream: true
stream: true,
detail: true
});
sseResponse({

View File

@@ -41,6 +41,7 @@ export type Props = CreateChatCompletionRequest &
FastGptShareChatProps & {
messages: MessageItemType[];
stream?: boolean;
detail?: boolean;
variables: Record<string, any>;
};
export type ChatResponseType = {
@@ -57,7 +58,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.end();
});
let { chatId, appId, shareId, stream = false, messages = [], variables = {} } = req.body as Props;
let {
chatId,
appId,
shareId,
stream = false,
detail = false,
messages = [],
variables = {}
} = req.body as Props;
try {
if (!messages) {
@@ -133,7 +142,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
history: prompts,
userChatInput: prompt.value
},
stream
stream,
detail
});
// console.log(responseData, '===', answerText);
@@ -176,7 +186,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
@@ -184,11 +194,11 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
event: detail ? sseResponseEventEnum.answer : undefined,
data: '[DONE]'
});
if (isOwner) {
if (isOwner && detail) {
sseResponse({
res,
event: sseResponseEventEnum.appStreamResponse,
@@ -199,7 +209,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.end();
} else {
res.json({
responseData,
...(detail ? { responseData } : {}),
id: chatId || '',
model: '',
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
@@ -244,7 +254,8 @@ export async function dispatchModules({
user,
params = {},
variables = {},
stream = false
stream = false,
detail = false
}: {
res: NextApiResponse;
modules: AppModuleItemType[];
@@ -252,6 +263,7 @@ export async function dispatchModules({
params?: Record<string, any>;
variables?: Record<string, any>;
stream?: boolean;
detail?: boolean;
}) {
const runningModules = loadModules(modules, variables);
@@ -322,7 +334,7 @@ export async function dispatchModules({
if (res.closed) return Promise.resolve();
console.log('run=========', module.flowType);
if (stream && module.showStatus) {
if (stream && detail && module.showStatus) {
responseStatus({
res,
name: module.name,
@@ -338,6 +350,7 @@ export async function dispatchModules({
const props: Record<string, any> = {
res,
stream,
detail,
userOpenaiAccount: user?.openaiAccount,
...params
};

View File

@@ -25,6 +25,7 @@ export type ChatProps = {
history?: ChatItemType[];
userChatInput: string;
stream?: boolean;
detail?: boolean;
quoteQA?: QuoteItemType[];
systemPrompt?: string;
limitPrompt?: string;
@@ -44,6 +45,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
temperature = 0,
maxToken = 4000,
stream = false,
detail = false,
history = [],
quoteQA = [],
userChatInput,
@@ -111,7 +113,11 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
const { answerText, totalTokens, completeMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({ res, response });
const { answer } = await streamResponse({
res,
detail,
response
});
// count tokens
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
@@ -282,7 +288,15 @@ function getMaxTokens({
};
}
async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) {
async function streamResponse({
res,
detail,
response
}: {
res: NextApiResponse;
detail: boolean;
response: any;
}) {
let answer = '';
let error: any = null;
const parseData = new SSEParseData();
@@ -301,7 +315,7 @@ async function streamResponse({ res, response }: { res: NextApiResponse; respons
sseResponse({
res,
event: sseResponseEventEnum.answer,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})

View File

@@ -5,6 +5,7 @@ import type { NextApiResponse } from 'next';
export type AnswerProps = {
res: NextApiResponse;
detail?: boolean;
text: string;
stream: boolean;
};
@@ -13,12 +14,12 @@ export type AnswerResponse = {
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const { res, text = '', stream } = props as AnswerProps;
const { res, detail, text = '', stream } = props as AnswerProps;
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: text.replace(/\\n/g, '\n')
})

View File

@@ -6,11 +6,11 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
import type { AppModuleItemType } from '@/types/app';
import type { FlowModuleItemType, FlowModuleTemplateType } from '@/types/flow';
import type { FlowModuleItemType } from '@/types/flow';
import type { Edge, Node } from 'reactflow';
import { connectionLineStyle } from '@/constants/flow';
import { customAlphabet } from 'nanoid';
import { EmptyModule, ModuleTemplates, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
import { EmptyModule, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
export const adaptBill = (bill: BillSchema): UserBillType => {
@@ -41,7 +41,7 @@ export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[]
export const textAdaptGptResponse = ({
text,
model,
model = '',
finish_reason = null,
extraData = {}
}: {