This commit is contained in:
Archer
2023-10-23 15:05:13 +08:00
committed by GitHub
parent d37433eacd
commit bf6dbfb245
9 changed files with 61 additions and 38 deletions

View File

@@ -31,4 +31,5 @@ curl --location --request POST 'https://{{host}}/api/admin/initv451' \
### Fast GPT V4.5.1
1. 新增知识库文件夹管理
2. 修复了 openai4.x sdk 无法兼容 oneapi 的智谱和阿里的接口。

View File

@@ -13,7 +13,7 @@
"winston-mongodb": "^5.1.1",
"tunnel": "^0.0.6",
"encoding": "^0.1.13",
"openai": "^4.12.1"
"openai": "^4.12.4"
},
"devDependencies": {
"@types/tunnel": "^0.0.4",

2
pnpm-lock.yaml generated
View File

@@ -76,7 +76,7 @@ importers:
specifier: ^2.1.2
version: registry.npmmirror.com/nextjs-cors@2.1.2(next@13.5.2)
openai:
specifier: ^4.12.1
specifier: ^4.12.4
version: registry.npmmirror.com/openai@4.12.4(encoding@0.1.13)
tunnel:
specifier: ^0.0.6

View File

@@ -83,6 +83,7 @@ export type ComponentRef = {
resetVariables: (data?: Record<string, any>) => void;
resetHistory: (history: ChatSiteItemType[]) => void;
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;
sendPrompt: (question: string) => void;
};
enum FeedbackTypeEnum {
@@ -452,7 +453,8 @@ const ChatBox = (
setVariableInputFinish(!!e.length);
setChatHistory(e);
},
scrollToBottom
scrollToBottom,
sendPrompt: (question: string) => handleSubmit((item) => sendPrompt(item, question))()
}));
/* style start */

View File

@@ -45,8 +45,16 @@ function App({ Component, pageProps }: AppProps) {
// get init data
(async () => {
const {
feConfigs: { scripts }
feConfigs: { scripts, isPlus }
} = await clientInitData();
// log fastgpt
!isPlus &&
console.log(
'%cWelcome to FastGPT',
'font-family:Arial; color:#3370ff ; font-size:18px; font-weight:bold;',
`GitHubhttps://github.com/labring/FastGPT`
);
setScripts(scripts || []);
})();
// add window error track
@@ -61,12 +69,7 @@ function App({ Component, pageProps }: AppProps) {
url
});
};
// log fastgpt
console.log(
'%cWelcome to FastGPT',
'font-family:Arial; color:#3370ff ; font-size:18px; font-weight:bold;',
`GitHubhttps://github.com/labring/FastGPT`
);
return () => {
window.onerror = null;
};

View File

@@ -166,7 +166,10 @@ const OutLink = ({
});
useEffect(() => {
setIdEmbed(window !== parent);
if (window !== top) {
window.top?.postMessage({ type: 'shareChatReady' }, '*');
}
setIdEmbed(window !== top);
}, []);
return (
@@ -280,7 +283,13 @@ export async function getServerSideProps(context: any) {
const authToken = context?.query?.authToken || '';
return {
props: { shareId, chatId, showHistory, authToken, ...(await serviceSideProps(context)) }
props: {
shareId,
chatId,
showHistory,
authToken,
...(await serviceSideProps(context))
}
};
}

View File

@@ -18,7 +18,7 @@ export async function hasSameValue({
const { rows: existsRows } = await PgClient.query(`
SELECT COUNT(*) > 0 AS exists
FROM ${PgDatasetTableName}
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') collection_id='${collectionId}'
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') AND collection_id='${collectionId}'
`);
const exists = existsRows[0]?.exists || false;

View File

@@ -110,7 +110,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
temperature = Math.max(temperature, 0.01);
const ai = getAIApi(user.openaiAccount, 480000);
const response = await ai.chat.completions.create({
const response = await ai.chat.completions.create(
{
model,
temperature,
max_tokens,
@@ -126,7 +127,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
: []),
...messages
]
});
},
{
headers: {
Accept: 'application/json, text/plain, */*'
}
}
);
const { answerText, totalTokens, completeMessages } = await (async () => {
if (stream) {
@@ -355,7 +362,6 @@ async function streamResponse({
readStream: stream
});
let answer = '';
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();

View File

@@ -42,6 +42,7 @@ export const dispatchAppRequest = async (props: Record<string, any>): Promise<Re
return Promise.reject('App not found');
}
if (stream) {
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
@@ -49,6 +50,7 @@ export const dispatchAppRequest = async (props: Record<string, any>): Promise<Re
text: '\n'
})
});
}
const { responseData, answerText } = await dispatchModules({
res,