mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-30 10:28:42 +00:00
feat: app detail
This commit is contained in:
@@ -12,7 +12,7 @@ export const pushChatBill = async ({
|
||||
isPay,
|
||||
chatModel,
|
||||
userId,
|
||||
chatId,
|
||||
appId,
|
||||
textLen,
|
||||
tokens,
|
||||
type
|
||||
@@ -20,7 +20,7 @@ export const pushChatBill = async ({
|
||||
isPay: boolean;
|
||||
chatModel: ChatModelType;
|
||||
userId: string;
|
||||
chatId?: '' | string;
|
||||
appId: string;
|
||||
textLen: number;
|
||||
tokens: number;
|
||||
type: BillTypeEnum.chat | BillTypeEnum.openapiChat;
|
||||
@@ -43,7 +43,7 @@ export const pushChatBill = async ({
|
||||
userId,
|
||||
type,
|
||||
modelName: chatModel,
|
||||
chatId: chatId ? chatId : undefined,
|
||||
appId,
|
||||
textLen,
|
||||
tokenLen: tokens,
|
||||
price
|
||||
|
@@ -105,4 +105,4 @@ try {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const App: Model<AppType> = models['model'] || model('model', AppSchema);
|
||||
export const App: Model<AppType> = models['app'] || model('app', AppSchema);
|
||||
|
@@ -16,12 +16,11 @@ const BillSchema = new Schema({
|
||||
},
|
||||
modelName: {
|
||||
type: String,
|
||||
enum: [...Object.keys(ChatModelMap), embeddingModel],
|
||||
required: true
|
||||
enum: [...Object.keys(ChatModelMap), embeddingModel]
|
||||
},
|
||||
chatId: {
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'chat'
|
||||
ref: 'app'
|
||||
},
|
||||
time: {
|
||||
type: Date,
|
||||
@@ -44,8 +43,9 @@ const BillSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
BillSchema.index({ time: -1 });
|
||||
BillSchema.index({ userId: 1 });
|
||||
// BillSchema.index({ time: -1 });
|
||||
// BillSchema.index({ time: 1 }, { expireAfterSeconds: 90 * 24 * 60 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -10,7 +10,7 @@ const ChatSchema = new Schema({
|
||||
},
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'model',
|
||||
ref: 'app',
|
||||
required: true
|
||||
},
|
||||
updateTime: {
|
||||
|
@@ -1,77 +0,0 @@
|
||||
import { ChatCompletionType, StreamResponseType } from './index';
|
||||
import { ChatRoleEnum } from '@/constants/chat';
|
||||
import axios from 'axios';
|
||||
|
||||
/* 模型对话 */
|
||||
export const claudChat = async ({ apiKey, messages, stream, chatId }: ChatCompletionType) => {
|
||||
// get system prompt
|
||||
const systemPrompt = messages
|
||||
.filter((item) => item.obj === 'System')
|
||||
.map((item) => item.value)
|
||||
.join('\n');
|
||||
const systemPromptText = systemPrompt ? `你本次知识:${systemPrompt}\n下面是我的问题:` : '';
|
||||
|
||||
const prompt = `${systemPromptText}'${messages[messages.length - 1].value}'`;
|
||||
|
||||
const response = await axios.post(
|
||||
process.env.CLAUDE_BASE_URL || '',
|
||||
{
|
||||
prompt,
|
||||
stream,
|
||||
conversationId: chatId
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: apiKey
|
||||
},
|
||||
timeout: stream ? 60000 : 480000,
|
||||
responseType: stream ? 'stream' : 'json'
|
||||
}
|
||||
);
|
||||
|
||||
const responseText = stream ? '' : response.data?.text || '';
|
||||
|
||||
return {
|
||||
streamResponse: response,
|
||||
responseMessages: messages.concat({
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: responseText
|
||||
}),
|
||||
responseText,
|
||||
totalTokens: 0
|
||||
};
|
||||
};
|
||||
|
||||
/* openai stream response */
|
||||
export const claudStreamResponse = async ({ res, chatResponse, prompts }: StreamResponseType) => {
|
||||
try {
|
||||
let responseContent = '';
|
||||
|
||||
try {
|
||||
const decoder = new TextDecoder();
|
||||
for await (const chunk of chatResponse.data as any) {
|
||||
if (res.closed) {
|
||||
break;
|
||||
}
|
||||
const content = decoder.decode(chunk);
|
||||
responseContent += content;
|
||||
content && res.write(content);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('pipe error', error);
|
||||
}
|
||||
|
||||
const finishMessages = prompts.concat({
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: responseContent
|
||||
});
|
||||
|
||||
return {
|
||||
responseContent,
|
||||
totalTokens: 0,
|
||||
finishMessages
|
||||
};
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
};
|
@@ -14,7 +14,7 @@ export type ChatCompletionType = {
|
||||
temperature: number;
|
||||
maxToken?: number;
|
||||
messages: ChatItemType[];
|
||||
chatId?: string;
|
||||
historyId?: string;
|
||||
[key: string]: any;
|
||||
};
|
||||
export type ChatCompletionResponseType = {
|
||||
|
Reference in New Issue
Block a user