feat: openapi v2 chat

This commit is contained in:
archer
2023-06-18 19:59:37 +08:00
parent 2b93ae2d00
commit ee9afa310a
27 changed files with 790 additions and 585 deletions

View File

@@ -2,6 +2,10 @@ import { formatPrice } from './user';
import dayjs from 'dayjs';
import type { BillSchema } from '../types/mongoSchema';
import type { UserBillType } from '@/types/user';
import { ChatItemType } from '@/types/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
export const adaptBill = (bill: BillSchema): UserBillType => {
return {
@@ -14,3 +18,37 @@ export const adaptBill = (bill: BillSchema): UserBillType => {
price: formatPrice(bill.price)
};
};
export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[] => {
const roleMap = {
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System
};
return messages.map((item) => ({
_id: item._id,
obj: roleMap[item.role],
value: item.content
}));
};
export const textAdaptGptResponse = ({
text,
model,
finish_reason = null,
extraData = {}
}: {
model?: string;
text: string | null;
finish_reason?: null | 'stop';
extraData?: Object;
}) => {
return JSON.stringify({
...extraData,
id: '',
object: '',
created: 0,
model,
choices: [{ delta: text === null ? {} : { content: text }, index: 0, finish_reason }]
});
};

View File

@@ -1,18 +1,15 @@
import { ClaudeEnum, OpenAiChatEnum } from '@/constants/model';
import { OpenAiChatEnum } from '@/constants/model';
import type { ChatModelType } from '@/constants/model';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap: Record<
ChatModelType,
{
countTokens: (data: { messages: ChatItemSimpleType[] }) => number;
countTokens: (data: { messages: ChatItemType[] }) => number;
sliceText: (data: { text: string; length: number }) => string;
tokenSlice: (data: {
messages: ChatItemSimpleType[];
maxToken: number;
}) => ChatItemSimpleType[];
tokenSlice: (data: { messages: ChatItemType[]; maxToken: number }) => ChatItemType[];
}
> = {
[OpenAiChatEnum.GPT35]: {
@@ -34,10 +31,5 @@ export const modelToolMap: Record<
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT432k, ...data })
},
[ClaudeEnum.Claude]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT35, ...data })
}
};

View File

@@ -1,11 +1,12 @@
import { encoding_for_model, type Tiktoken } from '@dqbd/tiktoken';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { OpenAiChatEnum } from '@/constants/model';
import Graphemer from 'graphemer';
import axios from 'axios';
import dayjs from 'dayjs';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
const textDecoder = new TextDecoder();
const graphemer = new Graphemer();
@@ -86,16 +87,19 @@ export const getOpenAiEncMap = () => {
};
export const adaptChatItem_openAI = ({
messages
messages,
reserveId
}: {
messages: ChatItemSimpleType[];
}): ChatCompletionRequestMessage[] => {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
return messages.map((item) => ({
...(reserveId && { _id: item._id }),
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
@@ -105,7 +109,7 @@ export function countOpenAIToken({
messages,
model
}: {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: `${OpenAiChatEnum}`;
}) {
function getChatGPTEncodingText(
@@ -158,7 +162,7 @@ export function countOpenAIToken({
return segments.reduce((memo, i) => memo + i.tokens.length, 0) ?? 0;
}
const adaptMessages = adaptChatItem_openAI({ messages });
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
return text2TokensLen(getOpenAiEncMap()[model], getChatGPTEncodingText(adaptMessages, model));
}