From 4075a3472bf444b5485ebb99b163f2572b5dad9e Mon Sep 17 00:00:00 2001 From: cl1107 Date: Thu, 11 Apr 2024 06:25:20 +0800 Subject: [PATCH] add lobe-chat compose yaml file (#37) * Create compose_with_lobe_chat.yaml * Update README.md --- README.md | 10 ++++++++++ compose_with_lobe_chat.yaml | 18 ++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 compose_with_lobe_chat.yaml diff --git a/README.md b/README.md index 2bde1ef..cb65e52 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,16 @@ docker compose up -d After deployment, you can directly access `http://[IP]:3040/v1/chat/completions` to use the API. Or use `http://[IP]:3000` to directly use **ChatGPT-Next-Web**. +#### FreeGPT35 Service with [lobe-chat](https://github.com/lobehub/lobe-chat)): + +```bash +mkdir freegpt35 && cd freegpt35 +wget -O compose.yaml https://raw.githubusercontent.com/missuo/FreeGPT35/main/compose_with_lobe_chat.yaml +docker compose up -d +``` + +After deployment, you can directly access `http://[IP]:3040/v1/chat/completions` to use the API. Or use `http://[IP]:3210` to directly use **lobe-chat**. + ### Nginx Reverse Proxy ```nginx diff --git a/compose_with_lobe_chat.yaml b/compose_with_lobe_chat.yaml new file mode 100644 index 0000000..1ee2a8a --- /dev/null +++ b/compose_with_lobe_chat.yaml @@ -0,0 +1,18 @@ +services: + + lobe-chat: + image: lobehub/lobe-chat + container_name: lobe-chat + restart: always + ports: + - '3210:3210' + environment: + OPENAI_API_KEY: gptyyds + OPENAI_PROXY_URL: http://freegpt35:3040/v1 + ACCESS_CODE: HelloGPT + + freegpt35: + image: missuo/freegpt35:latest + restart: always + ports: + - "3040:3040"