mirror of
https://github.com/labring/FastGPT.git
synced 2026-05-05 01:02:59 +08:00
fix: surrender;perf: llm response (#6190)
* feat: workflow route to detail * llm response * fix: surrender * fix: surrender * fix: surrender * fix: test
This commit is contained in:
@@ -177,56 +177,50 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
|
||||
const write = res ? responseWriteController({ res, readStream: stream }) : undefined;
|
||||
|
||||
const {
|
||||
completeMessages,
|
||||
reasoningText,
|
||||
answerText,
|
||||
finish_reason,
|
||||
getEmptyResponseTip,
|
||||
usage
|
||||
} = await createLLMResponse({
|
||||
body: {
|
||||
model: modelConstantsData.model,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
temperature,
|
||||
max_tokens,
|
||||
top_p: aiChatTopP,
|
||||
stop: aiChatStopSign,
|
||||
response_format: {
|
||||
type: aiChatResponseFormat,
|
||||
json_schema: aiChatJsonSchema
|
||||
const { completeMessages, reasoningText, answerText, finish_reason, responseEmptyTip, usage } =
|
||||
await createLLMResponse({
|
||||
body: {
|
||||
model: modelConstantsData.model,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
temperature,
|
||||
max_tokens,
|
||||
top_p: aiChatTopP,
|
||||
stop: aiChatStopSign,
|
||||
response_format: {
|
||||
type: aiChatResponseFormat,
|
||||
json_schema: aiChatJsonSchema
|
||||
},
|
||||
retainDatasetCite,
|
||||
useVision: aiChatVision,
|
||||
requestOrigin
|
||||
},
|
||||
retainDatasetCite,
|
||||
useVision: aiChatVision,
|
||||
requestOrigin
|
||||
},
|
||||
userKey: externalProvider.openaiAccount,
|
||||
isAborted: checkIsStopping,
|
||||
onReasoning({ text }) {
|
||||
if (!aiChatReasoning) return;
|
||||
workflowStreamResponse?.({
|
||||
write,
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
reasoning_content: text
|
||||
})
|
||||
});
|
||||
},
|
||||
onStreaming({ text }) {
|
||||
if (!isResponseAnswerText) return;
|
||||
workflowStreamResponse?.({
|
||||
write,
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
text
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
userKey: externalProvider.openaiAccount,
|
||||
isAborted: checkIsStopping,
|
||||
onReasoning({ text }) {
|
||||
if (!aiChatReasoning) return;
|
||||
workflowStreamResponse?.({
|
||||
write,
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
reasoning_content: text
|
||||
})
|
||||
});
|
||||
},
|
||||
onStreaming({ text }) {
|
||||
if (!isResponseAnswerText) return;
|
||||
workflowStreamResponse?.({
|
||||
write,
|
||||
event: SseResponseEventEnum.answer,
|
||||
data: textAdaptGptResponse({
|
||||
text
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (!answerText && !reasoningText) {
|
||||
return getNodeErrResponse({ error: getEmptyResponseTip() });
|
||||
if (responseEmptyTip) {
|
||||
return getNodeErrResponse({ error: responseEmptyTip });
|
||||
}
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
|
||||
@@ -414,7 +414,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
||||
return;
|
||||
}
|
||||
|
||||
// Thread avoidance
|
||||
await surrenderProcess();
|
||||
const nodeId = this.activeRunQueue.keys().next().value;
|
||||
const node = nodeId ? this.runtimeNodesMap.get(nodeId) : undefined;
|
||||
@@ -430,10 +429,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
||||
this.processActiveNode();
|
||||
});
|
||||
}
|
||||
// 兜底,除非极端情况,否则不可能触发
|
||||
else {
|
||||
this.processActiveNode();
|
||||
}
|
||||
}
|
||||
|
||||
private addSkipNode(node: RuntimeNodeItemType, skippedNodeIdList: Set<string>) {
|
||||
@@ -446,9 +441,8 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
||||
this.skipNodeQueue.set(node.nodeId, { node, skippedNodeIdList: concatSkippedNodeIdList });
|
||||
}
|
||||
private async processSkipNodes() {
|
||||
// Thread avoidance
|
||||
await surrenderProcess();
|
||||
// 取一个 node,并且从队列里删除
|
||||
await surrenderProcess();
|
||||
const skipItem = this.skipNodeQueue.values().next().value;
|
||||
if (skipItem) {
|
||||
this.skipNodeQueue.delete(skipItem.node.nodeId);
|
||||
|
||||
Reference in New Issue
Block a user