feat: 支持 accessToken 请求 web api 调用 (#80)

* feat: 支持 markdown 格式和图片

* perf: 重载的时候滚动条保持

* chore: version 2.5.2

* feat: 添加文字换行

* chore: 添加新封面

* chore: 更新 cover

* feat: 支持 web api 的形式

* feat: 支持新模型和调整超时

* feat: 添加反向代理

* chore: 更新 README.md

* feat: 添加超时和反向代理显示

* chore: version 2.6.0

* chore: update README
This commit is contained in:
Redon
2023-02-21 15:26:23 +08:00
committed by GitHub
parent ac9536ab87
commit f40048fb08
18 changed files with 236 additions and 35 deletions

View File

@@ -1,27 +1,46 @@
import * as dotenv from 'dotenv'
import 'isomorphic-fetch'
import type { ChatGPTAPI, SendMessageOptions } from 'chatgpt'
import { ChatGPTUnofficialProxyAPI } from 'chatgpt'
import { sendResponse } from './utils'
dotenv.config()
let apiModel: 'ChatGPTAPI' | 'ChatGPTUnofficialProxyAPI' | undefined
export interface ChatContext {
conversationId?: string
parentMessageId?: string
}
dotenv.config()
const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT_MS : 30 * 1000
const apiKey = process.env.OPENAI_API_KEY
if (!process.env.OPENAI_API_KEY && !process.env.OPENAI_ACCESS_TOKEN)
throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable')
if (apiKey === undefined)
throw new Error('OPENAI_API_KEY is not defined')
let api: ChatGPTAPI
let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
// To use ESM in CommonJS, you can use a dynamic import
(async () => {
// More Info: https://github.com/transitive-bullshit/chatgpt-api
const { ChatGPTAPI } = await import('chatgpt')
api = new ChatGPTAPI({ apiKey: process.env.OPENAI_API_KEY })
if (process.env.OPENAI_API_KEY) {
api = new ChatGPTAPI({ apiKey: process.env.OPENAI_API_KEY })
apiModel = 'ChatGPTAPI'
}
else {
let options = {}
if (process.env.API_REVERSE_PROXY)
options = { apiReverseProxyUrl: process.env.API_REVERSE_PROXY }
api = new ChatGPTUnofficialProxyAPI({
accessToken: process.env.OPENAI_ACCESS_TOKEN,
...options,
})
apiModel = 'ChatGPTUnofficialProxyAPI'
}
})()
async function chatReply(
@@ -32,7 +51,7 @@ async function chatReply(
return sendResponse({ type: 'Fail', message: 'Message is empty' })
try {
let options: SendMessageOptions = { timeoutMs: 30 * 1000 }
let options: SendMessageOptions = { timeoutMs }
if (lastContext)
options = { ...lastContext }
@@ -46,4 +65,15 @@ async function chatReply(
}
}
export { chatReply }
async function chatConfig() {
return sendResponse({
type: 'Success',
data: {
apiModel,
reverseProxy: process.env.API_REVERSE_PROXY,
timeoutMs,
},
})
}
export { chatReply, chatConfig }

View File

@@ -1,6 +1,6 @@
import express from 'express'
import type { ChatContext } from './chatgpt'
import { chatReply } from './chatgpt'
import { chatConfig, chatReply } from './chatgpt'
const app = express()
const router = express.Router()
@@ -26,6 +26,16 @@ router.post('/chat', async (req, res) => {
}
})
router.post('/config', async (req, res) => {
try {
const response = await chatConfig()
res.send(response)
}
catch (error) {
res.send(error)
}
})
app.use('', router)
app.use('/api', router)