[feat] 支持自定义自带的三种模型的模型名

This commit is contained in:
Wizerd
2023-12-17 20:40:13 +08:00
parent dc5a6f222a
commit a321f965da
2 changed files with 97 additions and 67 deletions

View File

@@ -11,6 +11,9 @@ services:
- PROXY_API_PREFIX=<Pandora-Next的PROXY_API_PREFIX> - PROXY_API_PREFIX=<Pandora-Next的PROXY_API_PREFIX>
- UPLOAD_BASE_URL=<50011端口在公网可以访问到的地址http://1.2.3.4:50011如果使用了反代需填入反代后的域名如https://pandora-backend-api.com> - UPLOAD_BASE_URL=<50011端口在公网可以访问到的地址http://1.2.3.4:50011如果使用了反代需填入反代后的域名如https://pandora-backend-api.com>
- KEY_FOR_GPTS_INFO=<一个仅用于获取GPTs信息的fk> # 如果不需要额外使用gpts可以不填 - KEY_FOR_GPTS_INFO=<一个仅用于获取GPTs信息的fk> # 如果不需要额外使用gpts可以不填
- GPT_4_S_New_Name=gpt-4-s # gpt-4-s模型的自定义模型名称
- GPT_4_MOBILE_NEW_NAME=gpt-4-mobile # gpt-4-mobile模型的自定义模型名称
- GPT_3_5_NEW_NAME=gpt-3.5-turbo # gpt-3.5-turbo模型的自定义模型名称
volumes: volumes:
- ./log:/app/log - ./log:/app/log
- ./images:/app/images - ./images:/app/images

161
main.py
View File

@@ -50,17 +50,7 @@ def fetch_gizmo_info(base_url, proxy_api_prefix, model_id):
else: else:
return None return None
gpts_configurations = [ # gpts_configurations = []
{
"name":"gpt-4-s"
},
{
"name":"gpt-4-mobile"
},
{
"name":"gpt-3.5-turbo"
}
]
# 将配置添加到全局列表 # 将配置添加到全局列表
def add_config_to_global_list(base_url, proxy_api_prefix, gpts_data): def add_config_to_global_list(base_url, proxy_api_prefix, gpts_data):
@@ -113,10 +103,14 @@ PROXY_API_PREFIX = os.getenv('PROXY_API_PREFIX', '')
UPLOAD_BASE_URL = os.getenv('UPLOAD_BASE_URL', '') UPLOAD_BASE_URL = os.getenv('UPLOAD_BASE_URL', '')
KEY_FOR_GPTS_INFO = os.getenv('KEY_FOR_GPTS_INFO', '') KEY_FOR_GPTS_INFO = os.getenv('KEY_FOR_GPTS_INFO', '')
VERSION = '0.1.4' VERSION = '0.1.5'
UPDATE_INFO = '合并了 Upload 容器和 Backend 容器' # VERSION = 'test'
UPDATE_INFO = '支持自定义自带的三种模型的名称'
# UPDATE_INFO = '【仅供临时测试使用】 '
with app.app_context(): with app.app_context():
global gpts_configurations # 移到作用域的最开始
# 输出版本信息 # 输出版本信息
print(f"==========================================") print(f"==========================================")
print(f"Version: {VERSION}") print(f"Version: {VERSION}")
@@ -134,6 +128,27 @@ with app.app_context():
print(f"==========================================") print(f"==========================================")
# 从环境变量中读取模型名称
GPT_4_S_New_Name = os.getenv('GPT_4_S_New_Name', 'gpt-4-s')
GPT_4_MOBILE_NEW_NAME = os.getenv('GPT_4_MOBILE_NEW_NAME', 'gpt-4-mobile')
GPT_3_5_NEW_NAME = os.getenv('GPT_3_5_NEW_NAME', 'gpt-3.5-turbo')
# 更新 gpts_configurations 列表
gpts_configurations = [
{
"name": GPT_4_S_New_Name,
"ori_name": "gpt-4-s"
},
{
"name": GPT_4_MOBILE_NEW_NAME,
"ori_name": "gpt-4-mobile"
},
{
"name": GPT_3_5_NEW_NAME,
"ori_name": "gpt-3.5-turbo"
}
]
print(f"GPTS 配置信息") print(f"GPTS 配置信息")
# 加载配置并添加到全局列表 # 加载配置并添加到全局列表
@@ -141,7 +156,13 @@ with app.app_context():
add_config_to_global_list(BASE_URL, PROXY_API_PREFIX, gpts_data) add_config_to_global_list(BASE_URL, PROXY_API_PREFIX, gpts_data)
# print("当前可用GPTS" + get_accessible_model_list()) # print("当前可用GPTS" + get_accessible_model_list())
# 输出当前可用 GPTS name # 输出当前可用 GPTS name
print(f"当前可用 GPTS 列表: {get_accessible_model_list()}") # 获取当前可用 GPTS 模型列表
accessible_model_list = get_accessible_model_list()
print(f"当前可用 GPTS 列表: {accessible_model_list}")
# 检查列表中是否有重复的模型名称
if len(accessible_model_list) != len(set(accessible_model_list)):
raise Exception("检测到重复的模型名称,请检查环境变量或配置文件。")
print(f"==========================================") print(f"==========================================")
@@ -185,59 +206,65 @@ def send_text_prompt_and_get_response(messages, api_key, stream, model):
payload = {} payload = {}
print(f"model: {model}") print(f"model: {model}")
if model == 'gpt-4-s':
payload = { # 查找模型配置
# 构建 payload model_config = find_model_config(model)
"action": "next", if model_config:
"messages": formatted_messages, # 检查是否有 ori_name
"parent_message_id": str(uuid.uuid4()), ori_model_name = model_config.get('ori_name', model)
"model":"gpt-4", if ori_model_name == 'gpt-4-s':
"timezone_offset_min": -480, payload = {
"suggestions":[], # 构建 payload
"history_and_training_disabled": False, "action": "next",
"conversation_mode":{"kind":"primary_assistant"},"force_paragen":False,"force_rate_limit":False "messages": formatted_messages,
} "parent_message_id": str(uuid.uuid4()),
elif model == 'gpt-4-mobile': "model":"gpt-4",
payload = { "timezone_offset_min": -480,
# 构建 payload "suggestions":[],
"action": "next", "history_and_training_disabled": False,
"messages": formatted_messages, "conversation_mode":{"kind":"primary_assistant"},"force_paragen":False,"force_rate_limit":False
"parent_message_id": str(uuid.uuid4()), }
"model":"gpt-4-mobile", elif ori_model_name == 'gpt-4-mobile':
"timezone_offset_min": -480, payload = {
"suggestions":["Give me 3 ideas about how to plan good New Years resolutions. Give me some that are personal, family, and professionally-oriented.","Write a text asking a friend to be my plus-one at a wedding next month. I want to keep it super short and casual, and offer an out.","Design a database schema for an online merch store.","Compare Gen Z and Millennial marketing strategies for sunglasses."], # 构建 payload
"history_and_training_disabled": False, "action": "next",
"conversation_mode":{"kind":"primary_assistant"},"force_paragen":False,"force_rate_limit":False "messages": formatted_messages,
} "parent_message_id": str(uuid.uuid4()),
elif model =='gpt-3.5-turbo': "model":"gpt-4-mobile",
payload = { "timezone_offset_min": -480,
# 构建 payload "suggestions":["Give me 3 ideas about how to plan good New Years resolutions. Give me some that are personal, family, and professionally-oriented.","Write a text asking a friend to be my plus-one at a wedding next month. I want to keep it super short and casual, and offer an out.","Design a database schema for an online merch store.","Compare Gen Z and Millennial marketing strategies for sunglasses."],
"action": "next", "history_and_training_disabled": False,
"messages": formatted_messages, "conversation_mode":{"kind":"primary_assistant"},"force_paragen":False,"force_rate_limit":False
"parent_message_id": str(uuid.uuid4()), }
"model": "text-davinci-002-render-sha", elif ori_model_name =='gpt-3.5-turbo':
"timezone_offset_min": -480, payload = {
"suggestions": [ # 构建 payload
"What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.", "action": "next",
"I want to cheer up my friend who's having a rough day. Can you suggest a couple short and sweet text messages to go with a kitten gif?", "messages": formatted_messages,
"Come up with 5 concepts for a retro-style arcade game.", "parent_message_id": str(uuid.uuid4()),
"I have a photoshoot tomorrow. Can you recommend me some colors and outfit options that will look good on camera?" "model": "text-davinci-002-render-sha",
], "timezone_offset_min": -480,
"history_and_training_disabled":False, "suggestions": [
"arkose_token":None, "What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.",
"conversation_mode": { "I want to cheer up my friend who's having a rough day. Can you suggest a couple short and sweet text messages to go with a kitten gif?",
"kind": "primary_assistant" "Come up with 5 concepts for a retro-style arcade game.",
}, "I have a photoshoot tomorrow. Can you recommend me some colors and outfit options that will look good on camera?"
"force_paragen":False, ],
"force_rate_limit":False "history_and_training_disabled":False,
} "arkose_token":None,
else: "conversation_mode": {
payload = generate_gpts_payload(model, formatted_messages) "kind": "primary_assistant"
if not payload: },
raise Exception('model is not accessible') "force_paragen":False,
response = requests.post(url, headers=headers, json=payload, stream=True) "force_rate_limit":False
# print(response) }
return response else:
payload = generate_gpts_payload(model, formatted_messages)
if not payload:
raise Exception('model is not accessible')
response = requests.post(url, headers=headers, json=payload, stream=True)
# print(response)
return response
def delete_conversation(conversation_id, api_key): def delete_conversation(conversation_id, api_key):
print(f"[{datetime.now()}] 准备删除的会话id {conversation_id}") print(f"[{datetime.now()}] 准备删除的会话id {conversation_id}")