perf: fetch error

This commit is contained in:
archer
2023-06-24 21:21:53 +08:00
parent 83d755ad0e
commit 057c3411b9
4 changed files with 95 additions and 80 deletions

View File

@@ -9,90 +9,95 @@ interface StreamFetchProps {
abortSignal: AbortController;
}
export const streamFetch = ({ data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<ChatResponseType & { responseText: string }>(async (resolve, reject) => {
try {
const response = await window.fetch('/api/openapi/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
signal: abortSignal.signal,
body: JSON.stringify({
...data,
stream: true
})
});
new Promise<ChatResponseType & { responseText: string; errMsg: string }>(
async (resolve, reject) => {
try {
const response = await window.fetch('/api/openapi/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
signal: abortSignal.signal,
body: JSON.stringify({
...data,
stream: true
})
});
if (response.status !== 200) {
const err = await response.json();
return reject(err);
}
if (response.status !== 200) {
const err = await response.json();
return reject(err);
}
if (!response?.body) {
throw new Error('Request Error');
}
if (!response?.body) {
throw new Error('Request Error');
}
const reader = response.body?.getReader();
const reader = response.body?.getReader();
// response data
let responseText = '';
let newChatId = '';
let quoteLen = 0;
// response data
let responseText = '';
let newChatId = '';
let quoteLen = 0;
let errMsg = '';
const read = async () => {
try {
const { done, value } = await reader.read();
if (done) {
if (response.status === 200) {
const read = async () => {
try {
const { done, value } = await reader.read();
if (done) {
if (response.status === 200) {
return resolve({
responseText,
newChatId,
quoteLen,
errMsg
});
} else {
return reject('响应过程出现异常~');
}
}
const chunkResponse = parseStreamChunk(value);
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return item.data;
}
})();
if (item.event === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices?.[0].delta.content || '';
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.chatResponse) {
const chatResponse = data as ChatResponseType;
newChatId = chatResponse.newChatId;
quoteLen = chatResponse.quoteLen || 0;
} else if (item.event === sseResponseEventEnum.error) {
errMsg = getErrText(data, '流响应错误');
}
});
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({
responseText,
newChatId,
quoteLen
quoteLen,
errMsg
});
} else {
return reject('响应过程出现异常~');
}
reject(getErrText(err, '请求异常'));
}
const chunkResponse = parseStreamChunk(value);
};
read();
} catch (err: any) {
console.log(err);
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return item.data;
}
})();
if (item.event === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices?.[0].delta.content || '';
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.chatResponse) {
const chatResponse = data as ChatResponseType;
newChatId = chatResponse.newChatId;
quoteLen = chatResponse.quoteLen || 0;
} else if (item.event === sseResponseEventEnum.error) {
return reject(getErrText(data, '流响应错误'));
}
});
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({
responseText,
newChatId,
quoteLen
});
}
reject(getErrText(err, '请求异常'));
}
};
read();
} catch (err: any) {
console.log(err);
reject(getErrText(err, '请求异常'));
reject(getErrText(err, '请求异常'));
}
}
});
);

View File

@@ -174,7 +174,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
const messages = adaptChatItem_openAI({ messages: prompts, reserveId: true });
// 流请求,获取数据
const { newChatId, quoteLen } = await streamFetch({
const { newChatId, quoteLen, errMsg } = await streamFetch({
data: {
messages,
chatId,
@@ -219,7 +219,9 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
...item,
status: 'finish',
quoteLen,
systemPrompt: `${chatData.systemPrompt}\n\n${chatData.limitPrompt}`
systemPrompt: `${chatData.systemPrompt}${`${
chatData.limitPrompt ? `\n\n${chatData.limitPrompt}` : ''
}`}`
};
})
}));
@@ -230,6 +232,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
loadHistory({ pageNum: 1, init: true });
loadMyModels(true);
}, 100);
if (errMsg) {
toast({
status: 'warning',
title: errMsg
});
}
},
[
chatId,
@@ -241,7 +250,8 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
chatData.systemPrompt,
chatData.limitPrompt,
loadHistory,
loadMyModels
loadMyModels,
toast
]
);

View File

@@ -49,8 +49,8 @@ export const chatResponse = async ({
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream,
stop: ['.!?。']
stream
// stop: ['.!?。']
},
{
timeout: stream ? 60000 : 480000,

View File

@@ -116,7 +116,7 @@ export const voiceBroadcast = ({ text }: { text: string }) => {
};
export const getErrText = (err: any, def = '') => {
const msg = typeof err === 'string' ? err : err?.message || def || '';
const msg: string = typeof err === 'string' ? err : err?.message || def || '';
msg && console.log('error =>', msg);
return msg;
};