mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
feat: gpt3流响应
This commit is contained in:
@@ -3,6 +3,8 @@ export enum EmailTypeEnum {
|
||||
findPassword = 'findPassword'
|
||||
}
|
||||
|
||||
export const PRICE_SCALE = 100000;
|
||||
|
||||
export const introPage = `
|
||||
## 欢迎使用 Fast GPT
|
||||
|
||||
|
@@ -89,6 +89,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
temperature: temperature,
|
||||
// max_tokens: modelConstantsData.maxToken,
|
||||
messages: formatPrompts,
|
||||
frequency_penalty: 0.5, // 越大,重复内容越少
|
||||
presence_penalty: -0.5, // 越大,越容易出现新内容
|
||||
stream: true
|
||||
},
|
||||
{
|
||||
@@ -117,7 +119,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const content: string = json?.choices?.[0].delta.content || '';
|
||||
if (!content) return;
|
||||
if (!content || (responseContent === '' && content === '\n')) return;
|
||||
|
||||
responseContent += content;
|
||||
// console.log('content:', content)
|
||||
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
|
||||
@@ -144,7 +147,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
stream.destroy();
|
||||
|
||||
const promptsContent = formatPrompts.map((item) => item.content).join('');
|
||||
console.log(`responseLen: ${responseContent.length}`, `promptLen: ${promptsContent.length}`);
|
||||
// 只有使用平台的 key 才计费
|
||||
!userApiKey &&
|
||||
pushBill({
|
||||
|
@@ -1,20 +1,38 @@
|
||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { getOpenAIApi, authChat } from '@/service/utils/chat';
|
||||
import { ChatItemType } from '@/types/chat';
|
||||
import { httpsAgent } from '@/service/utils/tools';
|
||||
import { ChatItemType } from '@/types/chat';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import type { ModelSchema } from '@/types/mongoSchema';
|
||||
import { PassThrough } from 'stream';
|
||||
import { modelList } from '@/constants/model';
|
||||
import { pushBill } from '@/service/events/pushChatBill';
|
||||
|
||||
/* 发送提示词 */
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
const { prompt, chatId } = req.body as { prompt: ChatItemType[]; chatId: string };
|
||||
const { authorization } = req.headers;
|
||||
let step = 0; // step=1时,表示开始了流响应
|
||||
const stream = new PassThrough();
|
||||
stream.on('error', () => {
|
||||
console.log('error: ', 'stream error');
|
||||
stream.destroy();
|
||||
});
|
||||
res.on('close', () => {
|
||||
stream.destroy();
|
||||
});
|
||||
res.on('error', () => {
|
||||
console.log('error: ', 'request error');
|
||||
stream.destroy();
|
||||
});
|
||||
|
||||
if (!prompt || !chatId) {
|
||||
try {
|
||||
const { chatId, prompt } = req.body as {
|
||||
prompt: ChatItemType;
|
||||
chatId: string;
|
||||
};
|
||||
const { authorization } = req.headers;
|
||||
if (!chatId || !prompt) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
@@ -22,13 +40,29 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
|
||||
|
||||
const model = chat.modelId;
|
||||
const model: ModelSchema = chat.modelId;
|
||||
|
||||
// 获取 chatAPI
|
||||
const chatAPI = getOpenAIApi(userApiKey || systemKey);
|
||||
// 读取对话内容
|
||||
const prompts = [...chat.content, prompt];
|
||||
|
||||
// prompt处理
|
||||
const formatPrompts = prompt.map((item) => `${item.value}\n\n###\n\n`).join('');
|
||||
// 上下文长度过滤
|
||||
const maxContext = model.security.contextMaxLen;
|
||||
const filterPrompts =
|
||||
prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts;
|
||||
|
||||
// 格式化文本内容
|
||||
const map = {
|
||||
Human: 'Human',
|
||||
AI: 'AI',
|
||||
SYSTEM: 'SYSTEM'
|
||||
};
|
||||
const formatPrompts: string[] = filterPrompts.map((item: ChatItemType) => item.value);
|
||||
// 如果有系统提示词,自动插入
|
||||
if (model.systemPrompt) {
|
||||
formatPrompts.unshift(`${model.systemPrompt}`);
|
||||
}
|
||||
|
||||
const promptText = formatPrompts.join('</s>');
|
||||
|
||||
// 计算温度
|
||||
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
|
||||
@@ -37,42 +71,95 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
}
|
||||
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
|
||||
|
||||
// 发送请求
|
||||
const response = await chatAPI.createCompletion(
|
||||
// 获取 chatAPI
|
||||
const chatAPI = getOpenAIApi(userApiKey || systemKey);
|
||||
let startTime = Date.now();
|
||||
|
||||
// 发出请求
|
||||
const chatResponse = await chatAPI.createCompletion(
|
||||
{
|
||||
model: model.service.modelName,
|
||||
prompt: formatPrompts,
|
||||
model: model.service.chatModel,
|
||||
temperature: temperature,
|
||||
// max_tokens: modelConstantsData.maxToken,
|
||||
top_p: 1,
|
||||
frequency_penalty: 0,
|
||||
presence_penalty: 0.6,
|
||||
stop: ['###']
|
||||
prompt: promptText,
|
||||
stream: true,
|
||||
max_tokens: modelConstantsData.maxToken,
|
||||
presence_penalty: 0, // 越大,越容易出现新内容
|
||||
frequency_penalty: 0, // 越大,重复内容越少
|
||||
stop: ['。!?.!.', `</s>`]
|
||||
},
|
||||
{
|
||||
timeout: 40000,
|
||||
responseType: 'stream',
|
||||
httpsAgent
|
||||
}
|
||||
);
|
||||
|
||||
const responseContent = response.data.choices[0]?.text || '';
|
||||
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
|
||||
|
||||
// 创建响应流
|
||||
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('X-Accel-Buffering', 'no');
|
||||
res.setHeader('Cache-Control', 'no-cache, no-transform');
|
||||
step = 1;
|
||||
|
||||
let responseContent = '';
|
||||
stream.pipe(res);
|
||||
|
||||
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
|
||||
if (event.type !== 'event') return;
|
||||
const data = event.data;
|
||||
if (data === '[DONE]') return;
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const content: string = json?.choices?.[0].text || '';
|
||||
if (!content || (responseContent === '' && content === '\n')) return;
|
||||
|
||||
responseContent += content;
|
||||
// console.log('content:', content);
|
||||
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
|
||||
} catch (error) {
|
||||
error;
|
||||
}
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder();
|
||||
try {
|
||||
for await (const chunk of chatResponse.data as any) {
|
||||
if (stream.destroyed) {
|
||||
// 流被中断了,直接忽略后面的内容
|
||||
break;
|
||||
}
|
||||
const parser = createParser(onParse);
|
||||
parser.feed(decoder.decode(chunk));
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('pipe error', error);
|
||||
}
|
||||
// close stream
|
||||
!stream.destroyed && stream.push(null);
|
||||
stream.destroy();
|
||||
|
||||
console.log(`responseLen: ${responseContent.length}`, `promptLen: ${formatPrompts.length}`);
|
||||
// 只有使用平台的 key 才计费
|
||||
!userApiKey &&
|
||||
pushBill({
|
||||
modelName: model.service.modelName,
|
||||
userId,
|
||||
chatId,
|
||||
text: formatPrompts + responseContent
|
||||
text: promptText + responseContent
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: responseContent
|
||||
});
|
||||
} catch (err: any) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
// console.log(err?.response);
|
||||
if (step === 1) {
|
||||
// 直接结束流
|
||||
console.log('error,结束');
|
||||
stream.destroy();
|
||||
} else {
|
||||
res.status(500);
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ import axios from 'axios';
|
||||
import { authToken } from '@/service/utils/tools';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
import { connectToDatabase, Pay } from '@/service/mongo';
|
||||
import { PRICE_SCALE } from '@/utils/user';
|
||||
import { PRICE_SCALE } from '@/constants/common';
|
||||
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 20);
|
||||
|
||||
|
@@ -197,16 +197,22 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
[chatId]
|
||||
);
|
||||
|
||||
// chatGPT
|
||||
const chatGPTPrompt = useCallback(
|
||||
async (newChatList: ChatSiteItemType[]) => {
|
||||
// gpt 对话
|
||||
const gptChatPrompt = useCallback(
|
||||
async (prompts: ChatSiteItemType) => {
|
||||
const urlMap: Record<string, string> = {
|
||||
[ChatModelNameEnum.GPT35]: '/api/chat/chatGpt',
|
||||
[ChatModelNameEnum.GPT3]: '/api/chat/gpt3'
|
||||
};
|
||||
if (!urlMap[chatData.chatModel]) return Promise.reject('找不到模型');
|
||||
|
||||
const prompt = {
|
||||
obj: newChatList[newChatList.length - 1].obj,
|
||||
value: newChatList[newChatList.length - 1].value
|
||||
obj: prompts.obj,
|
||||
value: prompts.value
|
||||
};
|
||||
// 流请求,获取数据
|
||||
const res = await streamFetch({
|
||||
url: '/api/chat/chatGpt',
|
||||
url: urlMap[chatData.chatModel],
|
||||
data: {
|
||||
prompt,
|
||||
chatId
|
||||
@@ -240,7 +246,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
});
|
||||
} catch (err) {
|
||||
toast({
|
||||
title: '存储对话出现异常, 继续对话会导致上下文丢失,请刷新页面',
|
||||
title: '对话出现异常, 继续对话会导致上下文丢失,请刷新页面',
|
||||
status: 'warning',
|
||||
duration: 3000,
|
||||
isClosable: true
|
||||
@@ -259,7 +265,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
})
|
||||
}));
|
||||
},
|
||||
[chatId, toast]
|
||||
[chatData.chatModel, chatId, toast]
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -272,7 +278,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((val) => val)
|
||||
.join('\n\n');
|
||||
.join('\n');
|
||||
if (!chatData?.modelId || !val || !ChatBox.current || isChatting) {
|
||||
return;
|
||||
}
|
||||
@@ -301,22 +307,8 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
resetInputVal('');
|
||||
scrollToBottom();
|
||||
|
||||
const fnMap: { [key: string]: any } = {
|
||||
[ChatModelNameEnum.GPT35]: chatGPTPrompt,
|
||||
[ChatModelNameEnum.GPT3]: gpt3ChatPrompt
|
||||
};
|
||||
|
||||
try {
|
||||
/* 对长度进行限制 */
|
||||
const maxContext = chatData.secret.contextMaxLen;
|
||||
const requestPrompt =
|
||||
newChatList.length > maxContext + 1
|
||||
? newChatList.slice(newChatList.length - maxContext - 1, -1)
|
||||
: newChatList.slice(0, -1);
|
||||
|
||||
if (typeof fnMap[chatData.chatModel] === 'function') {
|
||||
await fnMap[chatData.chatModel](requestPrompt);
|
||||
}
|
||||
await gptChatPrompt(newChatList[newChatList.length - 2]);
|
||||
|
||||
// 如果是 Human 第一次发送,插入历史记录
|
||||
const humanChat = newChatList.filter((item) => item.obj === 'Human');
|
||||
@@ -343,15 +335,12 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
||||
}
|
||||
}, [
|
||||
inputVal,
|
||||
chatData.modelId,
|
||||
chatData?.modelId,
|
||||
chatData.history,
|
||||
chatData.secret.contextMaxLen,
|
||||
chatData.chatModel,
|
||||
isChatting,
|
||||
resetInputVal,
|
||||
scrollToBottom,
|
||||
chatGPTPrompt,
|
||||
gpt3ChatPrompt,
|
||||
gptChatPrompt,
|
||||
pushChatHistory,
|
||||
chatId,
|
||||
toast
|
||||
|
@@ -34,6 +34,7 @@ const CreateModel = ({
|
||||
onSuccess: Dispatch<ModelSchema>;
|
||||
}) => {
|
||||
const [requesting, setRequesting] = useState(false);
|
||||
const [refresh, setRefresh] = useState(false);
|
||||
const toast = useToast({
|
||||
duration: 2000,
|
||||
position: 'top'
|
||||
@@ -95,7 +96,10 @@ const CreateModel = ({
|
||||
<Select
|
||||
placeholder="选择基础模型类型"
|
||||
{...register('serviceModelName', {
|
||||
required: '底层模型不能为空'
|
||||
required: '底层模型不能为空',
|
||||
onChange() {
|
||||
setRefresh(!refresh);
|
||||
}
|
||||
})}
|
||||
>
|
||||
{modelList.map((item) => (
|
||||
@@ -110,8 +114,9 @@ const CreateModel = ({
|
||||
</FormControl>
|
||||
<Box mt={3} textAlign={'center'} fontSize={'sm'} color={'blackAlpha.600'}>
|
||||
{formatPrice(
|
||||
modelList.find((item) => item.model === getValues('serviceModelName'))?.price || 0
|
||||
) * 1000}
|
||||
modelList.find((item) => item.model === getValues('serviceModelName'))?.price || 0,
|
||||
1000
|
||||
)}
|
||||
元/1K tokens(包括上下文和标点符号)
|
||||
</Box>
|
||||
</ModalBody>
|
||||
|
@@ -107,7 +107,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
|
||||
{modelList.map((item, i) => (
|
||||
<Tr key={item.model}>
|
||||
<Td>{item.name}</Td>
|
||||
<Td>{formatPrice(item.price) * 1000}</Td>
|
||||
<Td>{formatPrice(item.price, 1000)}</Td>
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
|
@@ -28,7 +28,9 @@ export const pushBill = async ({
|
||||
|
||||
// 计算价格
|
||||
const price = unitPrice * tokens.length;
|
||||
console.log('token len:', tokens.length, 'price: ', `${formatPrice(price)}元`);
|
||||
console.log('token len:', tokens.length);
|
||||
console.log('text len: ', text.length);
|
||||
console.log('price: ', `${formatPrice(price)}元`);
|
||||
|
||||
try {
|
||||
// 插入 Bill 记录
|
||||
|
@@ -39,9 +39,9 @@ const ModelSchema = new Schema({
|
||||
},
|
||||
temperature: {
|
||||
type: Number,
|
||||
min: 1,
|
||||
min: 0,
|
||||
max: 10,
|
||||
default: 5
|
||||
default: 4
|
||||
},
|
||||
service: {
|
||||
company: {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { Schema, model, models } from 'mongoose';
|
||||
import { hashPassword } from '@/service/utils/tools';
|
||||
import { PRICE_SCALE } from '@/utils/user';
|
||||
import { PRICE_SCALE } from '@/constants/common';
|
||||
|
||||
const UserSchema = new Schema({
|
||||
email: {
|
||||
|
@@ -28,11 +28,11 @@ export const jsonRes = <T = any>(
|
||||
} else if (openaiError[error?.response?.statusText]) {
|
||||
msg = openaiError[error.response.statusText];
|
||||
}
|
||||
// console.log(error?.response);
|
||||
console.log('error->');
|
||||
console.log('code:', error.code);
|
||||
console.log('statusText:', error?.response?.statusText);
|
||||
console.log('msg:', msg);
|
||||
error?.response && console.log('chat err:', error?.response);
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { PRICE_SCALE } from '@/constants/common';
|
||||
const tokenKey = 'fast-gpt-token';
|
||||
export const PRICE_SCALE = 100000;
|
||||
|
||||
export const setToken = (val: string) => {
|
||||
localStorage.setItem(tokenKey, val);
|
||||
@@ -14,6 +14,6 @@ export const clearToken = () => {
|
||||
/**
|
||||
* 把数据库读取到的price,转化成元
|
||||
*/
|
||||
export const formatPrice = (val: number) => {
|
||||
return val / PRICE_SCALE;
|
||||
export const formatPrice = (val: number, multiple = 1) => {
|
||||
return Number(((val / PRICE_SCALE) * multiple).toFixed(10));
|
||||
};
|
||||
|
Reference in New Issue
Block a user