mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
feat: gpt3流响应
This commit is contained in:
@@ -3,6 +3,8 @@ export enum EmailTypeEnum {
|
|||||||
findPassword = 'findPassword'
|
findPassword = 'findPassword'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const PRICE_SCALE = 100000;
|
||||||
|
|
||||||
export const introPage = `
|
export const introPage = `
|
||||||
## 欢迎使用 Fast GPT
|
## 欢迎使用 Fast GPT
|
||||||
|
|
||||||
|
@@ -89,6 +89,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
temperature: temperature,
|
temperature: temperature,
|
||||||
// max_tokens: modelConstantsData.maxToken,
|
// max_tokens: modelConstantsData.maxToken,
|
||||||
messages: formatPrompts,
|
messages: formatPrompts,
|
||||||
|
frequency_penalty: 0.5, // 越大,重复内容越少
|
||||||
|
presence_penalty: -0.5, // 越大,越容易出现新内容
|
||||||
stream: true
|
stream: true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -117,7 +119,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
try {
|
try {
|
||||||
const json = JSON.parse(data);
|
const json = JSON.parse(data);
|
||||||
const content: string = json?.choices?.[0].delta.content || '';
|
const content: string = json?.choices?.[0].delta.content || '';
|
||||||
if (!content) return;
|
if (!content || (responseContent === '' && content === '\n')) return;
|
||||||
|
|
||||||
responseContent += content;
|
responseContent += content;
|
||||||
// console.log('content:', content)
|
// console.log('content:', content)
|
||||||
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
|
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
|
||||||
@@ -144,7 +147,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
stream.destroy();
|
stream.destroy();
|
||||||
|
|
||||||
const promptsContent = formatPrompts.map((item) => item.content).join('');
|
const promptsContent = formatPrompts.map((item) => item.content).join('');
|
||||||
console.log(`responseLen: ${responseContent.length}`, `promptLen: ${promptsContent.length}`);
|
|
||||||
// 只有使用平台的 key 才计费
|
// 只有使用平台的 key 才计费
|
||||||
!userApiKey &&
|
!userApiKey &&
|
||||||
pushBill({
|
pushBill({
|
||||||
|
@@ -1,20 +1,38 @@
|
|||||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@/service/response';
|
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { getOpenAIApi, authChat } from '@/service/utils/chat';
|
import { getOpenAIApi, authChat } from '@/service/utils/chat';
|
||||||
import { ChatItemType } from '@/types/chat';
|
|
||||||
import { httpsAgent } from '@/service/utils/tools';
|
import { httpsAgent } from '@/service/utils/tools';
|
||||||
|
import { ChatItemType } from '@/types/chat';
|
||||||
|
import { jsonRes } from '@/service/response';
|
||||||
|
import type { ModelSchema } from '@/types/mongoSchema';
|
||||||
|
import { PassThrough } from 'stream';
|
||||||
import { modelList } from '@/constants/model';
|
import { modelList } from '@/constants/model';
|
||||||
import { pushBill } from '@/service/events/pushChatBill';
|
import { pushBill } from '@/service/events/pushChatBill';
|
||||||
|
|
||||||
/* 发送提示词 */
|
/* 发送提示词 */
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
try {
|
let step = 0; // step=1时,表示开始了流响应
|
||||||
const { prompt, chatId } = req.body as { prompt: ChatItemType[]; chatId: string };
|
const stream = new PassThrough();
|
||||||
const { authorization } = req.headers;
|
stream.on('error', () => {
|
||||||
|
console.log('error: ', 'stream error');
|
||||||
|
stream.destroy();
|
||||||
|
});
|
||||||
|
res.on('close', () => {
|
||||||
|
stream.destroy();
|
||||||
|
});
|
||||||
|
res.on('error', () => {
|
||||||
|
console.log('error: ', 'request error');
|
||||||
|
stream.destroy();
|
||||||
|
});
|
||||||
|
|
||||||
if (!prompt || !chatId) {
|
try {
|
||||||
|
const { chatId, prompt } = req.body as {
|
||||||
|
prompt: ChatItemType;
|
||||||
|
chatId: string;
|
||||||
|
};
|
||||||
|
const { authorization } = req.headers;
|
||||||
|
if (!chatId || !prompt) {
|
||||||
throw new Error('缺少参数');
|
throw new Error('缺少参数');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -22,13 +40,29 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
|
|
||||||
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
|
const { chat, userApiKey, systemKey, userId } = await authChat(chatId, authorization);
|
||||||
|
|
||||||
const model = chat.modelId;
|
const model: ModelSchema = chat.modelId;
|
||||||
|
|
||||||
// 获取 chatAPI
|
// 读取对话内容
|
||||||
const chatAPI = getOpenAIApi(userApiKey || systemKey);
|
const prompts = [...chat.content, prompt];
|
||||||
|
|
||||||
// prompt处理
|
// 上下文长度过滤
|
||||||
const formatPrompts = prompt.map((item) => `${item.value}\n\n###\n\n`).join('');
|
const maxContext = model.security.contextMaxLen;
|
||||||
|
const filterPrompts =
|
||||||
|
prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts;
|
||||||
|
|
||||||
|
// 格式化文本内容
|
||||||
|
const map = {
|
||||||
|
Human: 'Human',
|
||||||
|
AI: 'AI',
|
||||||
|
SYSTEM: 'SYSTEM'
|
||||||
|
};
|
||||||
|
const formatPrompts: string[] = filterPrompts.map((item: ChatItemType) => item.value);
|
||||||
|
// 如果有系统提示词,自动插入
|
||||||
|
if (model.systemPrompt) {
|
||||||
|
formatPrompts.unshift(`${model.systemPrompt}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const promptText = formatPrompts.join('</s>');
|
||||||
|
|
||||||
// 计算温度
|
// 计算温度
|
||||||
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
|
const modelConstantsData = modelList.find((item) => item.model === model.service.modelName);
|
||||||
@@ -37,42 +71,95 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
}
|
}
|
||||||
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
|
const temperature = modelConstantsData.maxTemperature * (model.temperature / 10);
|
||||||
|
|
||||||
// 发送请求
|
// 获取 chatAPI
|
||||||
const response = await chatAPI.createCompletion(
|
const chatAPI = getOpenAIApi(userApiKey || systemKey);
|
||||||
|
let startTime = Date.now();
|
||||||
|
|
||||||
|
// 发出请求
|
||||||
|
const chatResponse = await chatAPI.createCompletion(
|
||||||
{
|
{
|
||||||
model: model.service.modelName,
|
model: model.service.chatModel,
|
||||||
prompt: formatPrompts,
|
|
||||||
temperature: temperature,
|
temperature: temperature,
|
||||||
// max_tokens: modelConstantsData.maxToken,
|
prompt: promptText,
|
||||||
top_p: 1,
|
stream: true,
|
||||||
frequency_penalty: 0,
|
max_tokens: modelConstantsData.maxToken,
|
||||||
presence_penalty: 0.6,
|
presence_penalty: 0, // 越大,越容易出现新内容
|
||||||
stop: ['###']
|
frequency_penalty: 0, // 越大,重复内容越少
|
||||||
|
stop: ['。!?.!.', `</s>`]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
timeout: 40000,
|
||||||
|
responseType: 'stream',
|
||||||
httpsAgent
|
httpsAgent
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
const responseContent = response.data.choices[0]?.text || '';
|
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
|
||||||
|
|
||||||
|
// 创建响应流
|
||||||
|
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
|
||||||
|
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||||
|
res.setHeader('X-Accel-Buffering', 'no');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache, no-transform');
|
||||||
|
step = 1;
|
||||||
|
|
||||||
|
let responseContent = '';
|
||||||
|
stream.pipe(res);
|
||||||
|
|
||||||
|
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
|
||||||
|
if (event.type !== 'event') return;
|
||||||
|
const data = event.data;
|
||||||
|
if (data === '[DONE]') return;
|
||||||
|
try {
|
||||||
|
const json = JSON.parse(data);
|
||||||
|
const content: string = json?.choices?.[0].text || '';
|
||||||
|
if (!content || (responseContent === '' && content === '\n')) return;
|
||||||
|
|
||||||
|
responseContent += content;
|
||||||
|
// console.log('content:', content);
|
||||||
|
!stream.destroyed && stream.push(content.replace(/\n/g, '<br/>'));
|
||||||
|
} catch (error) {
|
||||||
|
error;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
try {
|
||||||
|
for await (const chunk of chatResponse.data as any) {
|
||||||
|
if (stream.destroyed) {
|
||||||
|
// 流被中断了,直接忽略后面的内容
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const parser = createParser(onParse);
|
||||||
|
parser.feed(decoder.decode(chunk));
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log('pipe error', error);
|
||||||
|
}
|
||||||
|
// close stream
|
||||||
|
!stream.destroyed && stream.push(null);
|
||||||
|
stream.destroy();
|
||||||
|
|
||||||
console.log(`responseLen: ${responseContent.length}`, `promptLen: ${formatPrompts.length}`);
|
|
||||||
// 只有使用平台的 key 才计费
|
// 只有使用平台的 key 才计费
|
||||||
!userApiKey &&
|
!userApiKey &&
|
||||||
pushBill({
|
pushBill({
|
||||||
modelName: model.service.modelName,
|
modelName: model.service.modelName,
|
||||||
userId,
|
userId,
|
||||||
chatId,
|
chatId,
|
||||||
text: formatPrompts + responseContent
|
text: promptText + responseContent
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes(res, {
|
|
||||||
data: responseContent
|
|
||||||
});
|
});
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
// console.log(err?.response);
|
||||||
|
if (step === 1) {
|
||||||
|
// 直接结束流
|
||||||
|
console.log('error,结束');
|
||||||
|
stream.destroy();
|
||||||
|
} else {
|
||||||
|
res.status(500);
|
||||||
jsonRes(res, {
|
jsonRes(res, {
|
||||||
code: 500,
|
code: 500,
|
||||||
error: err
|
error: err
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
@@ -5,7 +5,7 @@ import axios from 'axios';
|
|||||||
import { authToken } from '@/service/utils/tools';
|
import { authToken } from '@/service/utils/tools';
|
||||||
import { customAlphabet } from 'nanoid';
|
import { customAlphabet } from 'nanoid';
|
||||||
import { connectToDatabase, Pay } from '@/service/mongo';
|
import { connectToDatabase, Pay } from '@/service/mongo';
|
||||||
import { PRICE_SCALE } from '@/utils/user';
|
import { PRICE_SCALE } from '@/constants/common';
|
||||||
|
|
||||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 20);
|
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 20);
|
||||||
|
|
||||||
|
@@ -197,16 +197,22 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
[chatId]
|
[chatId]
|
||||||
);
|
);
|
||||||
|
|
||||||
// chatGPT
|
// gpt 对话
|
||||||
const chatGPTPrompt = useCallback(
|
const gptChatPrompt = useCallback(
|
||||||
async (newChatList: ChatSiteItemType[]) => {
|
async (prompts: ChatSiteItemType) => {
|
||||||
|
const urlMap: Record<string, string> = {
|
||||||
|
[ChatModelNameEnum.GPT35]: '/api/chat/chatGpt',
|
||||||
|
[ChatModelNameEnum.GPT3]: '/api/chat/gpt3'
|
||||||
|
};
|
||||||
|
if (!urlMap[chatData.chatModel]) return Promise.reject('找不到模型');
|
||||||
|
|
||||||
const prompt = {
|
const prompt = {
|
||||||
obj: newChatList[newChatList.length - 1].obj,
|
obj: prompts.obj,
|
||||||
value: newChatList[newChatList.length - 1].value
|
value: prompts.value
|
||||||
};
|
};
|
||||||
// 流请求,获取数据
|
// 流请求,获取数据
|
||||||
const res = await streamFetch({
|
const res = await streamFetch({
|
||||||
url: '/api/chat/chatGpt',
|
url: urlMap[chatData.chatModel],
|
||||||
data: {
|
data: {
|
||||||
prompt,
|
prompt,
|
||||||
chatId
|
chatId
|
||||||
@@ -240,7 +246,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
toast({
|
toast({
|
||||||
title: '存储对话出现异常, 继续对话会导致上下文丢失,请刷新页面',
|
title: '对话出现异常, 继续对话会导致上下文丢失,请刷新页面',
|
||||||
status: 'warning',
|
status: 'warning',
|
||||||
duration: 3000,
|
duration: 3000,
|
||||||
isClosable: true
|
isClosable: true
|
||||||
@@ -259,7 +265,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
})
|
})
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
[chatId, toast]
|
[chatData.chatModel, chatId, toast]
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -272,7 +278,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
.trim()
|
.trim()
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter((val) => val)
|
.filter((val) => val)
|
||||||
.join('\n\n');
|
.join('\n');
|
||||||
if (!chatData?.modelId || !val || !ChatBox.current || isChatting) {
|
if (!chatData?.modelId || !val || !ChatBox.current || isChatting) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -301,22 +307,8 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
resetInputVal('');
|
resetInputVal('');
|
||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
|
|
||||||
const fnMap: { [key: string]: any } = {
|
|
||||||
[ChatModelNameEnum.GPT35]: chatGPTPrompt,
|
|
||||||
[ChatModelNameEnum.GPT3]: gpt3ChatPrompt
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
/* 对长度进行限制 */
|
await gptChatPrompt(newChatList[newChatList.length - 2]);
|
||||||
const maxContext = chatData.secret.contextMaxLen;
|
|
||||||
const requestPrompt =
|
|
||||||
newChatList.length > maxContext + 1
|
|
||||||
? newChatList.slice(newChatList.length - maxContext - 1, -1)
|
|
||||||
: newChatList.slice(0, -1);
|
|
||||||
|
|
||||||
if (typeof fnMap[chatData.chatModel] === 'function') {
|
|
||||||
await fnMap[chatData.chatModel](requestPrompt);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 如果是 Human 第一次发送,插入历史记录
|
// 如果是 Human 第一次发送,插入历史记录
|
||||||
const humanChat = newChatList.filter((item) => item.obj === 'Human');
|
const humanChat = newChatList.filter((item) => item.obj === 'Human');
|
||||||
@@ -343,15 +335,12 @@ const Chat = ({ chatId }: { chatId: string }) => {
|
|||||||
}
|
}
|
||||||
}, [
|
}, [
|
||||||
inputVal,
|
inputVal,
|
||||||
chatData.modelId,
|
chatData?.modelId,
|
||||||
chatData.history,
|
chatData.history,
|
||||||
chatData.secret.contextMaxLen,
|
|
||||||
chatData.chatModel,
|
|
||||||
isChatting,
|
isChatting,
|
||||||
resetInputVal,
|
resetInputVal,
|
||||||
scrollToBottom,
|
scrollToBottom,
|
||||||
chatGPTPrompt,
|
gptChatPrompt,
|
||||||
gpt3ChatPrompt,
|
|
||||||
pushChatHistory,
|
pushChatHistory,
|
||||||
chatId,
|
chatId,
|
||||||
toast
|
toast
|
||||||
|
@@ -34,6 +34,7 @@ const CreateModel = ({
|
|||||||
onSuccess: Dispatch<ModelSchema>;
|
onSuccess: Dispatch<ModelSchema>;
|
||||||
}) => {
|
}) => {
|
||||||
const [requesting, setRequesting] = useState(false);
|
const [requesting, setRequesting] = useState(false);
|
||||||
|
const [refresh, setRefresh] = useState(false);
|
||||||
const toast = useToast({
|
const toast = useToast({
|
||||||
duration: 2000,
|
duration: 2000,
|
||||||
position: 'top'
|
position: 'top'
|
||||||
@@ -95,7 +96,10 @@ const CreateModel = ({
|
|||||||
<Select
|
<Select
|
||||||
placeholder="选择基础模型类型"
|
placeholder="选择基础模型类型"
|
||||||
{...register('serviceModelName', {
|
{...register('serviceModelName', {
|
||||||
required: '底层模型不能为空'
|
required: '底层模型不能为空',
|
||||||
|
onChange() {
|
||||||
|
setRefresh(!refresh);
|
||||||
|
}
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
{modelList.map((item) => (
|
{modelList.map((item) => (
|
||||||
@@ -110,8 +114,9 @@ const CreateModel = ({
|
|||||||
</FormControl>
|
</FormControl>
|
||||||
<Box mt={3} textAlign={'center'} fontSize={'sm'} color={'blackAlpha.600'}>
|
<Box mt={3} textAlign={'center'} fontSize={'sm'} color={'blackAlpha.600'}>
|
||||||
{formatPrice(
|
{formatPrice(
|
||||||
modelList.find((item) => item.model === getValues('serviceModelName'))?.price || 0
|
modelList.find((item) => item.model === getValues('serviceModelName'))?.price || 0,
|
||||||
) * 1000}
|
1000
|
||||||
|
)}
|
||||||
元/1K tokens(包括上下文和标点符号)
|
元/1K tokens(包括上下文和标点符号)
|
||||||
</Box>
|
</Box>
|
||||||
</ModalBody>
|
</ModalBody>
|
||||||
|
@@ -107,7 +107,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
|
|||||||
{modelList.map((item, i) => (
|
{modelList.map((item, i) => (
|
||||||
<Tr key={item.model}>
|
<Tr key={item.model}>
|
||||||
<Td>{item.name}</Td>
|
<Td>{item.name}</Td>
|
||||||
<Td>{formatPrice(item.price) * 1000}</Td>
|
<Td>{formatPrice(item.price, 1000)}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
))}
|
))}
|
||||||
</Tbody>
|
</Tbody>
|
||||||
|
@@ -28,7 +28,9 @@ export const pushBill = async ({
|
|||||||
|
|
||||||
// 计算价格
|
// 计算价格
|
||||||
const price = unitPrice * tokens.length;
|
const price = unitPrice * tokens.length;
|
||||||
console.log('token len:', tokens.length, 'price: ', `${formatPrice(price)}元`);
|
console.log('token len:', tokens.length);
|
||||||
|
console.log('text len: ', text.length);
|
||||||
|
console.log('price: ', `${formatPrice(price)}元`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 插入 Bill 记录
|
// 插入 Bill 记录
|
||||||
|
@@ -39,9 +39,9 @@ const ModelSchema = new Schema({
|
|||||||
},
|
},
|
||||||
temperature: {
|
temperature: {
|
||||||
type: Number,
|
type: Number,
|
||||||
min: 1,
|
min: 0,
|
||||||
max: 10,
|
max: 10,
|
||||||
default: 5
|
default: 4
|
||||||
},
|
},
|
||||||
service: {
|
service: {
|
||||||
company: {
|
company: {
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import { Schema, model, models } from 'mongoose';
|
import { Schema, model, models } from 'mongoose';
|
||||||
import { hashPassword } from '@/service/utils/tools';
|
import { hashPassword } from '@/service/utils/tools';
|
||||||
import { PRICE_SCALE } from '@/utils/user';
|
import { PRICE_SCALE } from '@/constants/common';
|
||||||
|
|
||||||
const UserSchema = new Schema({
|
const UserSchema = new Schema({
|
||||||
email: {
|
email: {
|
||||||
|
@@ -28,11 +28,11 @@ export const jsonRes = <T = any>(
|
|||||||
} else if (openaiError[error?.response?.statusText]) {
|
} else if (openaiError[error?.response?.statusText]) {
|
||||||
msg = openaiError[error.response.statusText];
|
msg = openaiError[error.response.statusText];
|
||||||
}
|
}
|
||||||
// console.log(error?.response);
|
|
||||||
console.log('error->');
|
console.log('error->');
|
||||||
console.log('code:', error.code);
|
console.log('code:', error.code);
|
||||||
console.log('statusText:', error?.response?.statusText);
|
console.log('statusText:', error?.response?.statusText);
|
||||||
console.log('msg:', msg);
|
console.log('msg:', msg);
|
||||||
|
error?.response && console.log('chat err:', error?.response);
|
||||||
}
|
}
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
|
import { PRICE_SCALE } from '@/constants/common';
|
||||||
const tokenKey = 'fast-gpt-token';
|
const tokenKey = 'fast-gpt-token';
|
||||||
export const PRICE_SCALE = 100000;
|
|
||||||
|
|
||||||
export const setToken = (val: string) => {
|
export const setToken = (val: string) => {
|
||||||
localStorage.setItem(tokenKey, val);
|
localStorage.setItem(tokenKey, val);
|
||||||
@@ -14,6 +14,6 @@ export const clearToken = () => {
|
|||||||
/**
|
/**
|
||||||
* 把数据库读取到的price,转化成元
|
* 把数据库读取到的price,转化成元
|
||||||
*/
|
*/
|
||||||
export const formatPrice = (val: number) => {
|
export const formatPrice = (val: number, multiple = 1) => {
|
||||||
return val / PRICE_SCALE;
|
return Number(((val / PRICE_SCALE) * multiple).toFixed(10));
|
||||||
};
|
};
|
||||||
|
Reference in New Issue
Block a user