diff --git a/deploy/args.json b/deploy/args.json index db8049fd75..92563e2beb 100644 --- a/deploy/args.json +++ b/deploy/args.json @@ -1,9 +1,9 @@ { "tags": { - "fastgpt": "v4.14.6", + "fastgpt": "v4.14.6.1", "fastgpt-sandbox": "v4.14.6", "fastgpt-mcp_server": "v4.14.6", - "fastgpt-plugin": "v0.5.0", + "fastgpt-plugin": "v0.5.1", "aiproxy": "v0.3.2", "aiproxy-pg": "0.8.0-pg15", "mongo": "5.0.32", diff --git a/deploy/dev/docker-compose.cn.yml b/deploy/dev/docker-compose.cn.yml index d39122ae49..022987b74f 100644 --- a/deploy/dev/docker-compose.cn.yml +++ b/deploy/dev/docker-compose.cn.yml @@ -153,7 +153,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always ports: diff --git a/deploy/dev/docker-compose.yml b/deploy/dev/docker-compose.yml index 2ae62743f6..291e551244 100644 --- a/deploy/dev/docker-compose.yml +++ b/deploy/dev/docker-compose.yml @@ -153,7 +153,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always ports: diff --git a/deploy/docker/cn/docker-compose.milvus.yml b/deploy/docker/cn/docker-compose.milvus.yml index 2c27830d62..7611e84170 100644 --- a/deploy/docker/cn/docker-compose.milvus.yml +++ b/deploy/docker/cn/docker-compose.milvus.yml @@ -180,7 +180,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -247,7 +247,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/cn/docker-compose.oceanbase.yml b/deploy/docker/cn/docker-compose.oceanbase.yml index adee2aacec..8d56092f64 100644 --- a/deploy/docker/cn/docker-compose.oceanbase.yml +++ b/deploy/docker/cn/docker-compose.oceanbase.yml @@ -157,7 +157,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -224,7 +224,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/cn/docker-compose.pg.yml b/deploy/docker/cn/docker-compose.pg.yml index b2d07c4580..c51557cb9c 100644 --- a/deploy/docker/cn/docker-compose.pg.yml +++ b/deploy/docker/cn/docker-compose.pg.yml @@ -138,7 +138,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -205,7 +205,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/cn/docker-compose.seekdb.yml b/deploy/docker/cn/docker-compose.seekdb.yml index ada8204f79..d7e1c31e71 100644 --- a/deploy/docker/cn/docker-compose.seekdb.yml +++ b/deploy/docker/cn/docker-compose.seekdb.yml @@ -144,7 +144,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -211,7 +211,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/cn/docker-compose.zilliz.yml b/deploy/docker/cn/docker-compose.zilliz.yml index 85f7af9d0f..30906e2ed1 100644 --- a/deploy/docker/cn/docker-compose.zilliz.yml +++ b/deploy/docker/cn/docker-compose.zilliz.yml @@ -121,7 +121,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -188,7 +188,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/global/docker-compose.milvus.yml b/deploy/docker/global/docker-compose.milvus.yml index 1d9dbb7bd8..df6a3745f2 100644 --- a/deploy/docker/global/docker-compose.milvus.yml +++ b/deploy/docker/global/docker-compose.milvus.yml @@ -180,7 +180,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -247,7 +247,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/global/docker-compose.oceanbase.yml b/deploy/docker/global/docker-compose.oceanbase.yml index d00c604b37..cba3315c05 100644 --- a/deploy/docker/global/docker-compose.oceanbase.yml +++ b/deploy/docker/global/docker-compose.oceanbase.yml @@ -157,7 +157,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -224,7 +224,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/global/docker-compose.pg.yml b/deploy/docker/global/docker-compose.pg.yml index ef52537d21..d2cca5e709 100644 --- a/deploy/docker/global/docker-compose.pg.yml +++ b/deploy/docker/global/docker-compose.pg.yml @@ -138,7 +138,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -205,7 +205,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/global/docker-compose.seekdb.yml b/deploy/docker/global/docker-compose.seekdb.yml index 8e5a8a41ba..58a10e6c7e 100644 --- a/deploy/docker/global/docker-compose.seekdb.yml +++ b/deploy/docker/global/docker-compose.seekdb.yml @@ -144,7 +144,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -211,7 +211,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/deploy/docker/global/docker-compose.ziliiz.yml b/deploy/docker/global/docker-compose.ziliiz.yml index 992dfdc2dd..2dbc58a148 100644 --- a/deploy/docker/global/docker-compose.ziliiz.yml +++ b/deploy/docker/global/docker-compose.ziliiz.yml @@ -121,7 +121,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -188,7 +188,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/content/docs/upgrading/4-14/4143.mdx b/document/content/docs/upgrading/4-14/4143.mdx index 9ef11630a8..7f59ead2a0 100644 --- a/document/content/docs/upgrading/4-14/4143.mdx +++ b/document/content/docs/upgrading/4-14/4143.mdx @@ -1,5 +1,5 @@ --- -title: 'V4.14.3' +title: 'V4.14.3(包含升级脚本)' description: 'FastGPT V4.14.3 更新说明' --- diff --git a/document/content/docs/upgrading/4-14/4144.mdx b/document/content/docs/upgrading/4-14/4144.mdx index 3e937edaa5..41433785ef 100644 --- a/document/content/docs/upgrading/4-14/4144.mdx +++ b/document/content/docs/upgrading/4-14/4144.mdx @@ -1,5 +1,5 @@ --- -title: 'V4.14.4' +title: 'V4.14.4(包含升级脚本)' description: 'FastGPT V4.14.4 更新说明' --- diff --git a/document/content/docs/upgrading/4-14/4146.mdx b/document/content/docs/upgrading/4-14/4146.mdx index 680ca69b77..38786d60af 100644 --- a/document/content/docs/upgrading/4-14/4146.mdx +++ b/document/content/docs/upgrading/4-14/4146.mdx @@ -7,9 +7,9 @@ description: 'FastGPT V4.14.6 更新说明' ### 1. 更新镜像: -- 更新 FastGPT 镜像 tag: v4.14.6 +- 更新 FastGPT 镜像 tag: v4.14.6.1 - 更新 FastGPT 商业版镜像 tag: v4.14.6 -- 更新 fastgpt-plugin 镜像 tag: v0.5.0 +- 更新 fastgpt-plugin 镜像 tag: v0.5.1 - mcp_server 无需更新 - sandbox 无需更新 - AIProxy 无需更新 diff --git a/document/data/doc-last-modified.json b/document/data/doc-last-modified.json index 816dc51771..3cd27e23ed 100644 --- a/document/data/doc-last-modified.json +++ b/document/data/doc-last-modified.json @@ -104,7 +104,7 @@ "document/content/docs/protocol/terms.en.mdx": "2025-12-15T23:36:54+08:00", "document/content/docs/protocol/terms.mdx": "2025-12-15T23:36:54+08:00", "document/content/docs/toc.en.mdx": "2025-08-04T13:42:36+08:00", - "document/content/docs/toc.mdx": "2026-01-19T19:10:54+08:00", + "document/content/docs/toc.mdx": "2026-02-02T18:48:25+08:00", "document/content/docs/upgrading/4-10/4100.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-10/4101.mdx": "2025-09-08T20:07:20+08:00", "document/content/docs/upgrading/4-11/4110.mdx": "2025-08-05T23:20:39+08:00", @@ -124,7 +124,8 @@ "document/content/docs/upgrading/4-14/4144.mdx": "2025-12-16T14:56:04+08:00", "document/content/docs/upgrading/4-14/4145.mdx": "2026-01-18T23:59:15+08:00", "document/content/docs/upgrading/4-14/41451.mdx": "2026-01-20T11:53:27+08:00", - "document/content/docs/upgrading/4-14/4146.mdx": "2026-01-30T22:34:42+08:00", + "document/content/docs/upgrading/4-14/4146.mdx": "2026-02-04T14:20:54+08:00", + "document/content/docs/upgrading/4-14/4147.mdx": "2026-02-02T18:48:25+08:00", "document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00", diff --git a/document/public/deploy/docker/cn/docker-compose.milvus.yml b/document/public/deploy/docker/cn/docker-compose.milvus.yml index 2c27830d62..7611e84170 100644 --- a/document/public/deploy/docker/cn/docker-compose.milvus.yml +++ b/document/public/deploy/docker/cn/docker-compose.milvus.yml @@ -180,7 +180,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -247,7 +247,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/cn/docker-compose.oceanbase.yml b/document/public/deploy/docker/cn/docker-compose.oceanbase.yml index adee2aacec..8d56092f64 100644 --- a/document/public/deploy/docker/cn/docker-compose.oceanbase.yml +++ b/document/public/deploy/docker/cn/docker-compose.oceanbase.yml @@ -157,7 +157,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -224,7 +224,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/cn/docker-compose.pg.yml b/document/public/deploy/docker/cn/docker-compose.pg.yml index b2d07c4580..c51557cb9c 100644 --- a/document/public/deploy/docker/cn/docker-compose.pg.yml +++ b/document/public/deploy/docker/cn/docker-compose.pg.yml @@ -138,7 +138,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -205,7 +205,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/cn/docker-compose.seekdb.yml b/document/public/deploy/docker/cn/docker-compose.seekdb.yml index ada8204f79..d7e1c31e71 100644 --- a/document/public/deploy/docker/cn/docker-compose.seekdb.yml +++ b/document/public/deploy/docker/cn/docker-compose.seekdb.yml @@ -144,7 +144,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -211,7 +211,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/cn/docker-compose.zilliz.yml b/document/public/deploy/docker/cn/docker-compose.zilliz.yml index 85f7af9d0f..30906e2ed1 100644 --- a/document/public/deploy/docker/cn/docker-compose.zilliz.yml +++ b/document/public/deploy/docker/cn/docker-compose.zilliz.yml @@ -121,7 +121,7 @@ services: fastgpt: container_name: fastgpt - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6 # git + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -188,7 +188,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.0 + image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/global/docker-compose.milvus.yml b/document/public/deploy/docker/global/docker-compose.milvus.yml index 1d9dbb7bd8..df6a3745f2 100644 --- a/document/public/deploy/docker/global/docker-compose.milvus.yml +++ b/document/public/deploy/docker/global/docker-compose.milvus.yml @@ -180,7 +180,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -247,7 +247,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/global/docker-compose.oceanbase.yml b/document/public/deploy/docker/global/docker-compose.oceanbase.yml index d00c604b37..cba3315c05 100644 --- a/document/public/deploy/docker/global/docker-compose.oceanbase.yml +++ b/document/public/deploy/docker/global/docker-compose.oceanbase.yml @@ -157,7 +157,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -224,7 +224,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/global/docker-compose.pg.yml b/document/public/deploy/docker/global/docker-compose.pg.yml index ef52537d21..d2cca5e709 100644 --- a/document/public/deploy/docker/global/docker-compose.pg.yml +++ b/document/public/deploy/docker/global/docker-compose.pg.yml @@ -138,7 +138,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -205,7 +205,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/global/docker-compose.seekdb.yml b/document/public/deploy/docker/global/docker-compose.seekdb.yml index 8e5a8a41ba..58a10e6c7e 100644 --- a/document/public/deploy/docker/global/docker-compose.seekdb.yml +++ b/document/public/deploy/docker/global/docker-compose.seekdb.yml @@ -144,7 +144,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -211,7 +211,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/document/public/deploy/docker/global/docker-compose.ziliiz.yml b/document/public/deploy/docker/global/docker-compose.ziliiz.yml index 992dfdc2dd..2dbc58a148 100644 --- a/document/public/deploy/docker/global/docker-compose.ziliiz.yml +++ b/document/public/deploy/docker/global/docker-compose.ziliiz.yml @@ -121,7 +121,7 @@ services: fastgpt: container_name: fastgpt - image: ghcr.io/labring/fastgpt:v4.14.6 # git + image: ghcr.io/labring/fastgpt:v4.14.6.1 # git ports: - 3000:3000 networks: @@ -188,7 +188,7 @@ services: environment: - FASTGPT_ENDPOINT=http://fastgpt:3000 fastgpt-plugin: - image: ghcr.io/labring/fastgpt-plugin:v0.5.0 + image: ghcr.io/labring/fastgpt-plugin:v0.5.1 container_name: fastgpt-plugin restart: always networks: diff --git a/package.json b/package.json index a5c7695331..a68109bb61 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,7 @@ "previewIcon": "node ./scripts/icon/index.js", "create:i18n": "node ./scripts/i18n/index.js", "lint": "eslint \"**/*.{ts,tsx}\" --fix --ignore-path .eslintignore", - "test": "vitest run", + "test": "vitest run --coverage", "test:vector": "vitest run --config test/integrationTest/vectorDB/vitest.config.mts" }, "devDependencies": { diff --git a/sdk/sandbox/README.md b/sdk/sandbox/README.md deleted file mode 100644 index e7d89604c3..0000000000 --- a/sdk/sandbox/README.md +++ /dev/null @@ -1,173 +0,0 @@ -# @fastgpt/sandbox - -A unified, high-level abstraction layer for cloud sandbox providers. It offers a consistent, vendor-agnostic interface for creating, managing, and interacting with sandboxed environments like OpenSandbox. - -> This package is ESM-only (`"type": "module"`) and requires Node.js **>= 20**. - -## Installation - -```bash -pnpm add @fastgpt/sandbox -``` - -## Quick Start - -The following example demonstrates the complete lifecycle of a sandbox: creating, executing commands, managing files, and finally, deleting it. - -```ts -import { createSandbox } from '@fastgpt/sandbox'; - -async function main() { - // 1. Create a sandbox with the OpenSandbox provider - const sandbox = createSandbox({ - provider: 'opensandbox', - connection: { - apiKey: process.env.OPEN_SANDBOX_API_KEY, - baseUrl: 'http://127.0.0.1:8080', // Your OpenSandbox server - runtime: 'kubernetes', - }, - }); - - console.log(`Provider: ${sandbox.provider}`); - console.log(`Native filesystem support: ${sandbox.capabilities.nativeFileSystem}`); - - try { - // 2. Create the sandbox instance with a specific image - await sandbox.create({ - image: { repository: 'nginx', tag: 'latest' }, - timeout: 3600, // Expiration in seconds - }); - console.log(`Sandbox created: ${sandbox.id}`); - - // 3. Wait until the sandbox is fully ready - await sandbox.waitUntilReady(60000); // 60-second timeout - console.log('Sandbox is ready.'); - - // 4. Execute a simple command - const version = await sandbox.execute('nginx -v'); - console.log(`Nginx version: ${version.stdout || version.stderr}`); - - // 5. Execute a command with streaming output - console.log('--- Streaming Execution ---'); - await sandbox.executeStream('for i in 1 2 3; do echo "Line $i"; sleep 0.5; done', { - onStdout: (msg) => console.log(` [stdout] ${msg.text}`), - onStderr: (msg) => console.log(` [stderr] ${msg.text}`), - onComplete: (result) => console.log(` [done] Exit code: ${result.exitCode}`), - }); - - // 6. Work with the filesystem - console.log('\n--- Filesystem Operations ---'); - // Write a file - await sandbox.writeFiles([ - { - path: '/app/hello.js', - data: `console.log('Hello from sandbox!');`, - }, - ]); - console.log('Written /app/hello.js'); - - // Read the file back - const [file] = await sandbox.readFiles(['/app/hello.js']); - if (file && !file.error) { - const content = new TextDecoder().decode(file.content); - console.log(`Read content: "${content}"`); - } - - // List directory - const entries = await sandbox.listDirectory('/app'); - console.log('Directory listing for /app:', entries.map(e => e.name)); - - - // 7. Stop and delete the sandbox - console.log('\n--- Cleanup ---'); - await sandbox.stop(); - console.log('Sandbox stopped.'); - - if (sandbox.runtime !== 'kubernetes') { - await sandbox.delete(); - console.log('Sandbox deleted.'); - } - - } catch (error) { - console.error('An error occurred:', error); - } finally { - // 8. Close the connection - await sandbox.close(); - console.log('Connection closed.'); - } -} - -main(); -``` - -## API (`ISandbox`) - -The `createSandbox(options)` function returns an instance that implements the `ISandbox` interface. - -### Lifecycle Management - -- **`create(options)`**: Creates a new sandbox instance. -- **`getInfo()`**: Retrieves detailed information about the sandbox. -- **`waitUntilReady(timeout)`**: Waits for the sandbox to become fully operational. -- **`renewExpiration(seconds)`**: Extends the sandbox's lifetime. -- **`pause()` / `resume()`**: Pauses and resumes a running sandbox (if supported). -- **`stop()`**: Stops the sandbox gracefully. -- **`delete()`**: Deletes the sandbox instance. -- **`close()`**: Closes the connection to the provider. - -### Command Execution - -- **`execute(command)`**: Executes a command and returns the result after completion. -- **`executeStream(command, handlers)`**: Executes a command and streams `stdout` and `stderr` in real-time. -- **`executeBackground(command)`**: Starts a command in the background and returns a session handle. - -### Filesystem Operations - -- **`writeFiles(files)`**: Writes one or more files to the sandbox. -- **`readFiles(paths)`**: Reads one or more files from the sandbox. -- **`listDirectory(path)`**: Lists the contents of a directory. -- **`createDirectories(paths)`**: Creates directories. -- **`deleteFiles(paths)`**: Deletes files. -- **`moveFiles(files)`**: Moves or renames files. - -### Health and Metrics - -- **`ping()`**: Performs a quick health check. -- **`getMetrics()`**: Retrieves CPU and memory usage statistics. - -## Provider Capabilities - -Different sandbox providers have different native capabilities. The SDK uses polyfills to provide a consistent API, but performance may vary. - -| Feature | OpenSandbox | MinimalProvider | -|---------|-------------|-----------------| -| Native Filesystem | ✅ | ❌ (polyfilled) | -| Streaming Output | ✅ | ❌ (fallback) | -| Background Exec | ✅ | ⚠️ (simulated) | -| Pause/Resume | ✅ | ❌ | -| Health Check | ✅ | ⚠️ (polyfilled) | -| Metrics | ✅ | ⚠️ (polyfilled) | -| File Search | ✅ | ⚠️ (polyfilled) | - -## Error Handling - -The SDK exports specific error types to facilitate robust error handling: - -- `SandboxException` -- `FeatureNotSupportedError` -- `FileOperationError` -- `CommandExecutionError` -- `TimeoutError` - -Example: -```ts -import { FileOperationError } from '@fastgpt/sandbox'; - -try { - await sandbox.readFiles(['/nonexistent-file']); -} catch (error) { - if (error instanceof FileOperationError) { - console.error(`File operation failed: ${error.message}`); - } -} -``` diff --git a/sdk/sandbox/package.json b/sdk/sandbox/package.json deleted file mode 100644 index 2a204885b7..0000000000 --- a/sdk/sandbox/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "name": "@fastgpt/sandbox", - "version": "0.1.0", - "description": "Unified abstraction layer for cloud sandbox providers with adapter pattern and feature polyfilling", - "type": "module", - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "import": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "scripts": { - "build": "tsc", - "dev": "tsc --watch", - "test": "vitest run --config ./vitest.config.mts", - "test:watch": "vitest watch", - "test:coverage": "vitest run --coverage" - }, - "keywords": [ - "sandbox", - "cloud", - "adapter", - "abstraction" - ], - "author": "", - "license": "MIT", - "dependencies": { - "@alibaba-group/opensandbox": "^0.1.3" - }, - "devDependencies": { - "vitest": "^3.0.9", - "@vitest/coverage-v8": "^3.0.9", - "typescript": "^5.1.3", - "husky": "^9.1.7", - "lint-staged": "^16.2.7" - }, - "peerDependencies": { - "typescript": "^5.0.0" - } -} diff --git a/sdk/sandbox/src/adapters/BaseSandboxAdapter.ts b/sdk/sandbox/src/adapters/BaseSandboxAdapter.ts deleted file mode 100644 index 821309e126..0000000000 --- a/sdk/sandbox/src/adapters/BaseSandboxAdapter.ts +++ /dev/null @@ -1,621 +0,0 @@ -import { FeatureNotSupportedError, SandboxReadyTimeoutError } from '../errors'; -import type { ISandbox } from '../interfaces/ISandbox'; -import { CapabilityDetector, CommandPolyfillService } from '../polyfill'; -import type { - ContentReplaceEntry, - DirectoryEntry, - ExecuteOptions, - ExecuteResult, - FileDeleteResult, - FileInfo, - FileReadResult, - FileWriteEntry, - FileWriteResult, - MoveEntry, - PermissionEntry, - ProviderCapabilities, - ReadFileOptions, - SandboxConfig, - SandboxId, - SandboxInfo, - SandboxMetrics, - SandboxStatus, - SearchResult, - StreamHandlers -} from '../types'; - -/** - * Abstract base class for all sandbox adapters. - * - * Implements the Template Method pattern for capability-aware operations. - * Subclasses implement native methods, and this base class automatically -n * routes to polyfills when native capabilities are unavailable. - * - * Following the Open/Closed Principle: new providers are added by - * extending this class, not modifying it. - */ -export abstract class BaseSandboxAdapter implements ISandbox { - abstract readonly id: SandboxId; - abstract readonly provider: string; - abstract readonly capabilities: ProviderCapabilities; - - protected _status: SandboxStatus = { state: 'Creating' }; - protected polyfillService?: CommandPolyfillService; - protected capabilityDetector: CapabilityDetector; - - constructor() { - // Will be initialized by subclasses with their capabilities - this.capabilityDetector = new CapabilityDetector(this.getCapabilitiesForDetector()); - } - - /** - * Get capabilities for the detector (called during construction). - * Subclasses should override capabilities property. - */ - protected getCapabilitiesForDetector(): ProviderCapabilities { - return this.capabilities; - } - - /** - * Initialize the polyfill service if needed. - * Called by subclasses after setting up command execution. - */ - protected initializePolyfillService(executor: ISandbox): void { - if ( - !( - this.capabilities.nativeFileSystem && - this.capabilities.nativeHealthCheck && - this.capabilities.nativeMetrics && - this.capabilities.supportsSearch - ) - ) { - this.polyfillService = new CommandPolyfillService(executor); - } - } - - get status(): SandboxStatus { - return this._status; - } - - // ==================== Abstract Native Methods ==================== - // Subclasses MUST implement these - - abstract create(config: SandboxConfig): Promise; - abstract start(): Promise; - abstract stop(): Promise; - abstract pause(): Promise; - abstract resume(): Promise; - abstract delete(): Promise; - abstract getInfo(): Promise; - abstract close(): Promise; - - /** - * Native command execution - subclasses must implement. - */ - protected abstract nativeExecute( - command: string, - options?: ExecuteOptions - ): Promise; - - /** - * Native file read - implement if nativeFileSystem is true. - */ - protected abstract nativeReadFiles( - paths: string[], - options?: ReadFileOptions - ): Promise; - - /** - * Native file write - implement if nativeFileSystem is true. - */ - protected abstract nativeWriteFiles(entries: FileWriteEntry[]): Promise; - - /** - * Native file delete - implement if nativeFileSystem is true. - */ - protected abstract nativeDeleteFiles(paths: string[]): Promise; - - /** - * Native directory list - implement if nativeFileSystem is true. - */ - protected abstract nativeListDirectory(path: string): Promise; - - /** - * Native file info - implement if nativeFileSystem is true. - */ - protected abstract nativeGetFileInfo(paths: string[]): Promise>; - - /** - * Native health check - implement if nativeHealthCheck is true. - */ - protected abstract nativePing(): Promise; - - /** - * Native metrics - implement if nativeMetrics is true. - */ - protected abstract nativeGetMetrics(): Promise; - - // ==================== Template Methods (Capability Routing) ==================== - - async waitUntilReady(timeoutMs: number = 120000): Promise { - const startTime = Date.now(); - const checkInterval = 1000; - - while (Date.now() - startTime < timeoutMs) { - const isReady = await this.ping(); - if (isReady) { - return; - } - await this.sleep(checkInterval); - } - - throw new SandboxReadyTimeoutError(this.id, timeoutMs); - } - - async renewExpiration(additionalSeconds: number): Promise { - if (!this.capabilities.supportsRenews) { - throw new FeatureNotSupportedError( - 'Sandbox expiration renewal not supported by this provider', - 'renewExpiration', - this.provider - ); - } - await this.nativeRenewExpiration(additionalSeconds); - } - - protected abstract nativeRenewExpiration(additionalSeconds: number): Promise; - - // ==================== ICommandExecution Implementation ==================== - - async execute(command: string, options?: ExecuteOptions): Promise { - return this.nativeExecute(command, options); - } - - async executeStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise { - if (!this.capabilities.supportsStreamingOutput) { - // Fallback: execute normally and call handlers - const result = await this.execute(command, options); - - if (handlers.onStdout && result.stdout) { - await handlers.onStdout({ text: result.stdout }); - } - if (handlers.onStderr && result.stderr) { - await handlers.onStderr({ text: result.stderr }); - } - if (handlers.onComplete) { - await handlers.onComplete(result); - } - return; - } - - await this.nativeExecuteStream(command, handlers, options); - } - - protected abstract nativeExecuteStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise; - - async executeBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }> { - if (!this.capabilities.supportsBackgroundExecution) { - throw new FeatureNotSupportedError( - 'Background execution not supported by this provider', - 'executeBackground', - this.provider - ); - } - return this.nativeExecuteBackground(command, options); - } - - protected abstract nativeExecuteBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }>; - - async interrupt(sessionId: string): Promise { - if (!this.capabilities.supportsBackgroundExecution) { - throw new FeatureNotSupportedError( - 'Command interruption not supported by this provider', - 'interrupt', - this.provider - ); - } - return this.nativeInterrupt(sessionId); - } - - protected abstract nativeInterrupt(sessionId: string): Promise; - - // ==================== IFileSystem Implementation (with Polyfill Routing) ==================== - - async readFiles(paths: string[], options?: ReadFileOptions): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeReadFiles(paths, options); - } - - // Use polyfill - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File read not supported and no polyfill available', - 'readFiles', - this.provider - ); - } - - // Batch via polyfill - const results: FileReadResult[] = []; - for (const path of paths) { - try { - const content = await this.polyfillService.readFile(path); - results.push({ path, content, error: null }); - } catch (error) { - results.push({ - path, - content: new Uint8Array(), - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - return results; - } - - async writeFiles(entries: FileWriteEntry[]): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeWriteFiles(entries); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File write not supported and no polyfill available', - 'writeFiles', - this.provider - ); - } - - const results: FileWriteResult[] = []; - for (const entry of entries) { - try { - let bytesWritten: number; - - if (typeof entry.data === 'string') { - bytesWritten = await this.polyfillService.writeTextFile(entry.path, entry.data); - } else if (entry.data instanceof Uint8Array) { - bytesWritten = await this.polyfillService.writeFile(entry.path, entry.data); - } else if (entry.data instanceof ArrayBuffer) { - bytesWritten = await this.polyfillService.writeFile( - entry.path, - new Uint8Array(entry.data) - ); - } else if (entry.data instanceof Blob) { - const arrayBuffer = await entry.data.arrayBuffer(); - bytesWritten = await this.polyfillService.writeFile( - entry.path, - new Uint8Array(arrayBuffer) - ); - } else { - // ReadableStream - const chunks: Uint8Array[] = []; - const reader = entry.data.getReader(); - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; - } - chunks.push(value); - } - const totalLength = chunks.reduce((sum, c) => sum + c.length, 0); - const combined = new Uint8Array(totalLength); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.length; - } - bytesWritten = await this.polyfillService.writeFile(entry.path, combined); - } - - results.push({ path: entry.path, bytesWritten, error: null }); - } catch (error) { - results.push({ - path: entry.path, - bytesWritten: 0, - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - return results; - } - - async deleteFiles(paths: string[]): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeDeleteFiles(paths); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File delete not supported and no polyfill available', - 'deleteFiles', - this.provider - ); - } - - const polyfillResults = await this.polyfillService.deleteFiles(paths); - return polyfillResults.map((r) => ({ - path: r.path, - success: r.success, - error: r.error || null - })); - } - - async moveFiles(entries: MoveEntry[]): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeMoveFiles(entries); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File move not supported and no polyfill available', - 'moveFiles', - this.provider - ); - } - - await this.polyfillService.moveFiles( - entries.map((e) => ({ source: e.source, destination: e.destination })) - ); - } - - protected abstract nativeMoveFiles(entries: MoveEntry[]): Promise; - - async replaceContent(entries: ContentReplaceEntry[]): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeReplaceContent(entries); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Content replace not supported and no polyfill available', - 'replaceContent', - this.provider - ); - } - - await this.polyfillService.replaceContent(entries); - } - - protected abstract nativeReplaceContent(entries: ContentReplaceEntry[]): Promise; - - // ==================== Directory Operations ==================== - - async createDirectories( - paths: string[], - options?: { mode?: number; owner?: string; group?: string } - ): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeCreateDirectories(paths, options); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Directory creation not supported and no polyfill available', - 'createDirectories', - this.provider - ); - } - - await this.polyfillService.createDirectories(paths, options); - } - - protected abstract nativeCreateDirectories( - paths: string[], - options?: { mode?: number; owner?: string; group?: string } - ): Promise; - - async deleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeDeleteDirectories(paths, options); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Directory deletion not supported and no polyfill available', - 'deleteDirectories', - this.provider - ); - } - - await this.polyfillService.deleteDirectories(paths, options); - } - - protected abstract nativeDeleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise; - - async listDirectory(path: string): Promise { - if (this.capabilities.nativeFileSystem) { - return this.nativeListDirectory(path); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Directory listing not supported and no polyfill available', - 'listDirectory', - this.provider - ); - } - - return this.polyfillService.listDirectory(path); - } - - // ==================== Streaming Operations ==================== - - readFileStream(path: string): AsyncIterable { - const self = this; - return { - async *[Symbol.asyncIterator]() { - if (!self.capabilities.supportsStreamingTransfer) { - // Fallback: read entire file then stream it - const result = await self.readFiles([path]); - const fileResult = result[0]; - if (!fileResult) { - throw new Error('No file result returned'); - } - if (fileResult.error) { - throw fileResult.error; - } - yield fileResult.content; - return; - } - - const iterable = self.nativeReadFileStream(path); - for await (const chunk of iterable) { - yield chunk; - } - } - }; - } - - protected abstract nativeReadFileStream(path: string): AsyncIterable; - - async writeFileStream(path: string, stream: ReadableStream): Promise { - if (!this.capabilities.supportsStreamingTransfer) { - // Fallback: collect stream then write - const reader = stream.getReader(); - const chunks: Uint8Array[] = []; - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; - } - chunks.push(value); - } - - const totalLength = chunks.reduce((sum, c) => sum + c.length, 0); - const combined = new Uint8Array(totalLength); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.length; - } - - await this.writeFiles([{ path, data: combined }]); - return; - } - - await this.nativeWriteFileStream(path, stream); - } - - protected abstract nativeWriteFileStream( - path: string, - stream: ReadableStream - ): Promise; - - // ==================== Metadata Operations ==================== - - async getFileInfo(paths: string[]): Promise> { - if (this.capabilities.nativeFileSystem) { - return this.nativeGetFileInfo(paths); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File info not supported and no polyfill available', - 'getFileInfo', - this.provider - ); - } - - return this.polyfillService.getFileInfo(paths); - } - - async setPermissions(entries: PermissionEntry[]): Promise { - if (!this.capabilities.supportsPermissions) { - // Try polyfill - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Permission setting not supported and no polyfill available', - 'setPermissions', - this.provider - ); - } - await this.polyfillService.setPermissions(entries); - return; - } - - await this.nativeSetPermissions(entries); - } - - protected abstract nativeSetPermissions(entries: PermissionEntry[]): Promise; - - // ==================== Search Operations ==================== - - async search(pattern: string, path?: string): Promise { - if (this.capabilities.supportsSearch) { - return this.nativeSearch(pattern, path); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'File search not supported and no polyfill available', - 'search', - this.provider - ); - } - - return this.polyfillService.search(pattern, path); - } - - protected abstract nativeSearch(pattern: string, path?: string): Promise; - - // ==================== IHealthCheck Implementation ==================== - - async ping(): Promise { - if (this.capabilities.nativeHealthCheck) { - return this.nativePing(); - } - - if (!this.polyfillService) { - // Fallback: try to execute a simple command - try { - const result = await this.execute('echo PING'); - return result.exitCode === 0; - } catch { - return false; - } - } - - return this.polyfillService.ping(); - } - - async getMetrics(): Promise { - if (this.capabilities.nativeMetrics) { - return this.nativeGetMetrics(); - } - - if (!this.polyfillService) { - throw new FeatureNotSupportedError( - 'Metrics not supported and no polyfill available', - 'getMetrics', - this.provider - ); - } - - return this.polyfillService.getMetrics(); - } - - // ==================== Utility Methods ==================== - - protected sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); - } -} diff --git a/sdk/sandbox/src/adapters/MinimalProviderAdapter.ts b/sdk/sandbox/src/adapters/MinimalProviderAdapter.ts deleted file mode 100644 index c65766ed35..0000000000 --- a/sdk/sandbox/src/adapters/MinimalProviderAdapter.ts +++ /dev/null @@ -1,360 +0,0 @@ -import { FeatureNotSupportedError, FileOperationError } from '../errors'; -import { - type ContentReplaceEntry, - createMinimalCapabilities, - type DirectoryEntry, - type ExecuteOptions, - type ExecuteResult, - type FileDeleteResult, - type FileInfo, - type FileReadResult, - type FileWriteEntry, - type FileWriteResult, - type MoveEntry, - type PermissionEntry, - type ProviderCapabilities, - type ReadFileOptions, - type SandboxConfig, - type SandboxId, - type SandboxInfo, - type SandboxMetrics, - type SandboxStatus, - type SearchResult, - type StreamHandlers -} from '../types'; -import { BaseSandboxAdapter } from './BaseSandboxAdapter'; - -/** - * Connection interface for minimal providers. - * Represents a provider that only supports basic command execution. - */ -export interface MinimalProviderConnection { - /** Unique identifier for the sandbox */ - id: string; - - /** Execute a command and return result */ - execute(command: string): Promise<{ - stdout: string; - stderr: string; - exitCode: number; - }>; - - /** Get current status */ - getStatus(): Promise; - - /** Close the connection */ - close(): Promise; -} - -/** - * Minimal provider adapter. - * - * This demonstrates how to adapt a provider with minimal capabilities - * (only command execution) to the full ISandbox interface using - * the CommandPolyfillService. - * - * Use case: Legacy SSH-based sandboxes, custom container providers, - * or any provider that only exposes a shell interface. - */ -export class MinimalProviderAdapter extends BaseSandboxAdapter { - readonly provider = 'minimal'; - readonly capabilities: ProviderCapabilities = createMinimalCapabilities(); - - private _id: SandboxId = ''; - private _status: SandboxStatus = { state: 'Creating' }; - private connection?: MinimalProviderConnection; - - constructor(private connectionFactory?: () => Promise) { - super(); - } - - get id(): SandboxId { - return this._id; - } - - get status(): SandboxStatus { - return this._status; - } - - // ==================== Lifecycle Methods ==================== - - async create(config: SandboxConfig): Promise { - // Minimal provider assumes sandbox is created externally - // This would typically involve calling an API to create the sandbox - if (this.connectionFactory) { - this.connection = await this.connectionFactory(); - this._id = this.connection.id; - this._status = { state: 'Running' }; - - // Initialize polyfill service for all filesystem operations - this.initializePolyfillService(this); - - // Run any setup commands from config - if (config.entrypoint && config.entrypoint.length > 0) { - await this.execute(config.entrypoint.join(' ')); - } - } else { - throw new Error('Connection factory not provided'); - } - } - - async connect(connection: MinimalProviderConnection): Promise { - this.connection = connection; - this._id = connection.id; - this._status = await connection.getStatus(); - this.initializePolyfillService(this); - } - - async start(): Promise { - // No-op: minimal provider doesn't support explicit start - this._status = { state: 'Running' }; - } - - async stop(): Promise { - // Execute shutdown command - await this.execute('exit 0').catch(() => { - // Expected to fail as connection closes - }); - this._status = { state: 'Deleted' }; - } - - async pause(): Promise { - throw new FeatureNotSupportedError( - 'Pause not supported by minimal provider', - 'pause', - this.provider - ); - } - - async resume(): Promise { - throw new FeatureNotSupportedError( - 'Resume not supported by minimal provider', - 'resume', - this.provider - ); - } - - async delete(): Promise { - await this.stop(); - await this.connection?.close(); - } - - async getInfo(): Promise { - return { - id: this._id, - image: { repository: 'minimal', tag: 'latest' }, - entrypoint: [], - status: this._status, - createdAt: new Date() - }; - } - - async close(): Promise { - await this.connection?.close(); - } - - protected async nativeRenewExpiration(_additionalSeconds: number): Promise { - throw new FeatureNotSupportedError( - 'Renewal not supported by minimal provider', - 'renewExpiration', - this.provider - ); - } - - // ==================== Command Execution (Native) ==================== - - protected async nativeExecute(command: string, options?: ExecuteOptions): Promise { - if (!this.connection) { - throw new Error('Not connected to minimal provider'); - } - - // Handle working directory option - let finalCommand = command; - if (options?.workingDirectory) { - finalCommand = `cd "${options.workingDirectory}" && ${command}`; - } - - // Handle timeout via timeout command - if (options?.timeoutMs && options.timeoutMs > 0) { - const timeoutSec = Math.ceil(options.timeoutMs / 1000); - finalCommand = `timeout ${timeoutSec} sh -c '${finalCommand.replace(/'/g, "'\"'\"'")}'`; - } - - // Handle environment variables - if (options?.env && Object.keys(options.env).length > 0) { - const envVars = Object.entries(options.env) - .map(([k, v]) => `${k}="${v.replace(/"/g, '"')}"`) - .join(' '); - finalCommand = `export ${envVars} && ${finalCommand}`; - } - - const result = await this.connection.execute(finalCommand); - - return { - stdout: result.stdout, - stderr: result.stderr, - exitCode: result.exitCode - }; - } - - protected async nativeExecuteStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise { - // Minimal provider doesn't support true streaming - // Simulate by executing and calling handlers - const result = await this.nativeExecute(command, options); - - if (handlers.onStdout && result.stdout) { - await handlers.onStdout({ text: result.stdout }); - } - if (handlers.onStderr && result.stderr) { - await handlers.onStderr({ text: result.stderr }); - } - if (handlers.onComplete) { - await handlers.onComplete(result); - } - } - - protected async nativeExecuteBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }> { - // Simulate background execution with nohup - const sessionId = `bg-${Date.now()}`; - - let finalCommand = command; - if (options?.workingDirectory) { - finalCommand = `cd "${options.workingDirectory}" && ${command}`; - } - - // Start process in background - await this.nativeExecute( - `nohup sh -c '${finalCommand.replace( - /'/g, - "'\"'\"'" - )}' > /tmp/${sessionId}.out 2>&1 & echo $!`, - options - ); - - return { - sessionId, - kill: async () => { - await this.nativeExecute(`pkill -f "${sessionId}" || true`); - } - }; - } - - protected async nativeInterrupt(_sessionId: string): Promise { - // Kill all background processes - await this.nativeExecute('pkill -f "nohup" || true'); - } - - // ==================== File System Operations (All Polyfilled) ==================== - - protected async nativeReadFiles( - _paths: string[], - _options?: ReadFileOptions - ): Promise { - // This should never be called - polyfill is always used - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeWriteFiles(_entries: FileWriteEntry[]): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeDeleteFiles(_paths: string[]): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeListDirectory(_path: string): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeGetFileInfo(_paths: string[]): Promise> { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeMoveFiles(_entries: MoveEntry[]): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeReplaceContent(_entries: ContentReplaceEntry[]): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeCreateDirectories( - _paths: string[], - _options?: { mode?: number; owner?: string; group?: string } - ): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeDeleteDirectories( - _paths: string[], - _options?: { recursive?: boolean; force?: boolean } - ): Promise { - throw new FileOperationError('Native filesystem not supported', '', 'TRANSFER_ERROR'); - } - - protected nativeReadFileStream(_path: string): AsyncIterable { - throw new FileOperationError('Native streaming not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeWriteFileStream( - _path: string, - _stream: ReadableStream - ): Promise { - throw new FileOperationError('Native streaming not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeSetPermissions(_entries: PermissionEntry[]): Promise { - throw new FileOperationError('Native permissions not supported', '', 'TRANSFER_ERROR'); - } - - protected async nativeSearch(_pattern: string, _path?: string): Promise { - throw new FileOperationError('Native search not supported', '', 'TRANSFER_ERROR'); - } - - // ==================== Health Check ==================== - - protected async nativePing(): Promise { - try { - const result = await this.execute('echo PING'); - return result.exitCode === 0; - } catch { - return false; - } - } - - protected async nativeGetMetrics(): Promise { - // Read from /proc filesystem via command - const cpuResult = await this.execute( - 'cat /proc/cpuinfo 2>/dev/null | grep processor | wc -l || echo 1' - ); - const cpuCount = Number.parseInt(cpuResult.stdout.trim(), 10) || 1; - - const memResult = await this.execute('cat /proc/meminfo 2>/dev/null || echo "MemTotal: 0 kB"'); - const memMatch = memResult.stdout.match(/MemTotal:\s+(\d+)\s+kB/); - const memoryTotalMiB = memMatch ? Math.floor(Number.parseInt(memMatch[1], 10) / 1024) : 0; - - // Estimate used memory (very rough approximation) - const memFreeMatch = memResult.stdout.match(/MemFree:\s+(\d+)\s+kB/); - const memoryUsedMiB = - memMatch && memFreeMatch - ? Math.floor( - (Number.parseInt(memMatch[1], 10) - Number.parseInt(memFreeMatch[1], 10)) / 1024 - ) - : 0; - - return { - cpuCount, - cpuUsedPercentage: 0, // Would need multiple samples - memoryTotalMiB, - memoryUsedMiB, - timestamp: Date.now() - }; - } -} diff --git a/sdk/sandbox/src/adapters/OpenSandboxAdapter.ts b/sdk/sandbox/src/adapters/OpenSandboxAdapter.ts deleted file mode 100644 index 6cca213145..0000000000 --- a/sdk/sandbox/src/adapters/OpenSandboxAdapter.ts +++ /dev/null @@ -1,1083 +0,0 @@ -import { ConnectionConfig, Sandbox } from '@alibaba-group/opensandbox'; -import { - CommandExecutionError, - ConnectionError, - FeatureNotSupportedError, - SandboxStateError -} from '../errors'; -import type { - ContentReplaceEntry, - ExecuteOptions, - ExecuteResult, - FileDeleteResult, - FileInfo, - FileReadResult, - FileWriteEntry, - FileWriteResult, - ImageSpec, - MoveEntry, - PermissionEntry, - ProviderCapabilities, - ReadFileOptions, - ResourceLimits, - SandboxConfig, - SandboxId, - SandboxInfo, - SandboxMetrics, - SearchResult, - StreamHandlers -} from '../types'; -import { createFullCapabilities } from '../types/capabilities'; -import { readableStreamToAsyncIterable } from '../utils/streams'; -import { BaseSandboxAdapter } from './BaseSandboxAdapter'; - -/** - * Sandbox runtime type. - * - docker: Full-featured runtime with pause/resume support - * - kubernetes: Container orchestration runtime (no pause/resume, stop = delete) - */ -export type SandboxRuntimeType = 'docker' | 'kubernetes'; - -/** - * Connection configuration options for OpenSandboxAdapter. - */ -export interface OpenSandboxConnectionConfig { - /** Base URL for the OpenSandbox API (e.g., 'https://api.opensandbox.example.com') */ - baseUrl?: string; - /** API key for authentication */ - apiKey?: string; - /** - * Sandbox runtime type. - * - docker: Full-featured with pause/resume support - * - kubernetes: No pause/resume, stop operation deletes the sandbox - * @default 'docker' - */ - runtime?: SandboxRuntimeType; -} - -/** - * OpenSandbox provider adapter. - * - * This is the "Gold Standard" implementation with full native - * support for all features. Uses the OpenSandbox TypeScript SDK - * for all operations. - * - * @example - * ```typescript - * const adapter = new OpenSandboxAdapter({ - * baseUrl: 'https://api.opensandbox.example.com', - * apiKey: 'your-api-key' - * }); - * - * await adapter.create({ - * image: { repository: 'node', tag: '18-alpine' } - * }); - * - * const result = await adapter.execute('node --version'); - * console.log(result.stdout); // v18.x.x - * ``` - */ -export class OpenSandboxAdapter extends BaseSandboxAdapter { - /** Provider identifier */ - readonly provider = 'opensandbox' as const; - - /** Runtime type for this adapter instance */ - readonly runtime: SandboxRuntimeType; - - /** - * Capability set - configured based on runtime type. - * - Docker: Full capabilities including pause/resume - * - Kubernetes: No pause/resume, stop = delete - * Note: nativeFileSystem is false because the SDK doesn't provide - * a native directory listing method (listDirectory uses command polyfill). - */ - readonly capabilities: ProviderCapabilities; - - /** Internal SDK sandbox instance */ - private _sandbox?: Sandbox; - - /** SDK connection configuration */ - private _connection: ConnectionConfig; - - /** Cached sandbox ID */ - private _id: SandboxId = ''; - - /** Current adapter state */ - private _connectionState: 'disconnected' | 'connecting' | 'connected' | 'closed' = 'disconnected'; - - /** - * Creates a new OpenSandboxAdapter instance. - * - * @param connectionConfig - Connection configuration options - */ - constructor(private connectionConfig: OpenSandboxConnectionConfig = {}) { - super(); - - // Determine runtime type (default to docker for backwards compatibility) - this.runtime = connectionConfig.runtime ?? 'docker'; - - // Configure capabilities based on runtime type - this.capabilities = this.createCapabilitiesForRuntime(this.runtime); - this._connection = this.createConnectionConfig(); - } - - /** - * Get the sandbox ID. Returns empty string if not created/connected. - */ - get id(): SandboxId { - return this._id; - } - - /** - * Get the current connection state. - */ - get connectionState(): typeof this._connectionState { - return this._connectionState; - } - - /** - * Get the underlying SDK sandbox instance. - * @throws {SandboxStateError} If sandbox is not initialized - */ - private get sandbox(): Sandbox { - if (!this._sandbox) { - throw new SandboxStateError( - 'Sandbox not initialized. Call create() or connect() first.', - this._connectionState, - 'connected' - ); - } - return this._sandbox; - } - - /** - * Create ConnectionConfig from adapter's connection options. - * Handles URL parsing with fallback to domain string. - */ - private createConnectionConfig(): ConnectionConfig { - const { baseUrl, apiKey } = this.connectionConfig; - - if (!baseUrl) { - // Default to localhost:8080 as per SDK default - return new ConnectionConfig({ apiKey }); - } - - // Pass the full URL as domain - SDK handles URL parsing internally - return new ConnectionConfig({ - domain: baseUrl, - apiKey - }); - } - - // ==================== Image and Resource Conversion ==================== - - /** - * Convert ImageSpec to SDK image format (string). - * Format: repository[:tag][@digest] - */ - private convertImageSpec(image: ImageSpec): string { - const parts: string[] = [image.repository]; - - if (image.tag) { - parts.push(':', image.tag); - } - if (image.digest) { - parts.push('@', image.digest); - } - - return parts.join(''); - } - - /** - * Parse SDK image string into ImageSpec. - * Handles formats: repository, repository:tag, repository@digest - */ - private parseImageSpec(image: string): ImageSpec { - const atIndex = image.indexOf('@'); - - // Handle digest format first (repository@digest) - if (atIndex > -1) { - const repository = image.slice(0, atIndex); - const digest = image.slice(atIndex + 1); - return { repository, digest }; - } - - // Handle tag format (repository:tag) - const colonIndex = image.indexOf(':'); - if (colonIndex > -1) { - const repository = image.slice(0, colonIndex); - const tag = image.slice(colonIndex + 1); - return { repository, tag }; - } - - // Just repository name - return { repository: image }; - } - - /** - * Convert ResourceLimits to SDK resource format. - * Maps cpuCount -> cpu, memoryMiB -> memory, diskGiB -> disk - */ - private convertResourceLimits( - resourceLimits?: ResourceLimits - ): Record | undefined { - if (!resourceLimits) { - return undefined; - } - - const result: Record = {}; - - if (resourceLimits.cpuCount !== undefined) { - result.cpu = resourceLimits.cpuCount.toString(); - } - if (resourceLimits.memoryMiB !== undefined) { - result.memory = `${resourceLimits.memoryMiB}Mi`; - } - if (resourceLimits.diskGiB !== undefined) { - result.disk = `${resourceLimits.diskGiB}Gi`; - } - - return result; - } - - /** - * Parse SDK resource limits (Record) to ResourceLimits. - * Handles memory format: 512Mi, 2Gi - */ - private parseResourceLimits(resource?: Record): ResourceLimits | undefined { - if (!resource) { - return undefined; - } - - const result: ResourceLimits = {}; - - // Parse CPU count - const cpu = resource.cpu; - if (cpu) { - const cpuCount = Number.parseInt(cpu, 10); - if (!Number.isNaN(cpuCount)) { - result.cpuCount = cpuCount; - } - } - - // Parse memory (e.g., "512Mi" or "2Gi") - const memory = resource.memory; - if (memory) { - const match = memory.match(/^(\d+)(Mi|Gi)$/); - if (match) { - const value = Number.parseInt(match[1] || '0', 10); - if (match[2] === 'Mi') { - result.memoryMiB = value; - } else { - // Convert GiB to MiB - result.memoryMiB = value * 1024; - } - } - } - - // Parse disk (e.g., "10Gi") - const disk = resource.disk; - if (disk) { - const match = disk.match(/^(\d+)Gi$/); - if (match) { - const value = Number.parseInt(match[1] || '0', 10); - result.diskGiB = value; - } - } - - return result; - } - - /** - * Create capabilities configuration based on runtime type. - * - Docker: Full capabilities including pause/resume - * - Kubernetes: No pause/resume (containers can't be paused), stop = delete - */ - private createCapabilitiesForRuntime(runtime: SandboxRuntimeType): ProviderCapabilities { - const baseCapabilities: ProviderCapabilities = { - ...createFullCapabilities(), - nativeFileSystem: false // SDK doesn't provide native directory listing - }; - - if (runtime === 'kubernetes') { - // Kubernetes-specific limitations - return { - ...baseCapabilities, - supportsPauseResume: false // Kubernetes doesn't support pausing containers - // In Kubernetes, stop() operation deletes the sandbox (no separate stop state) - // This is handled in the stop() and delete() methods - }; - } - - // Docker runtime - full capabilities - return baseCapabilities; - } - - // ==================== Lifecycle Methods ==================== - - /** - * Create a new sandbox with the given configuration. - * - * @param config - Sandbox configuration - * @throws {ConnectionError} If connection to the API fails - * @throws {CommandExecutionError} If sandbox creation fails - */ - async create(config: SandboxConfig): Promise { - this._connectionState = 'connecting'; - - try { - const image = this.convertImageSpec(config.image); - const resource = this.convertResourceLimits(config.resourceLimits); - - this._sandbox = await Sandbox.create({ - connectionConfig: this._connection, - image, - entrypoint: config.entrypoint, - timeoutSeconds: config.timeout, - resource, - env: config.env, - metadata: config.metadata - }); - - this._id = this._sandbox.id; - this._status = { state: 'Running' }; - this._connectionState = 'connected'; - - // Initialize polyfill service if needed (unlikely for OpenSandbox) - this.initializePolyfillService(this); - } catch (error) { - this._connectionState = 'disconnected'; - throw new ConnectionError('Failed to create sandbox', this.connectionConfig.baseUrl, error); - } - } - - /** - * Connect to an existing OpenSandbox instance. - * - * @param sandboxId - The ID of the sandbox to connect to - * @throws {ConnectionError} If connection fails or sandbox not found - */ - async connect(sandboxId: string): Promise { - this._connectionState = 'connecting'; - - try { - this._sandbox = await Sandbox.connect({ - sandboxId, - connectionConfig: this._connection - }); - - this._id = this._sandbox.id; - this._status = { state: 'Running' }; - this._connectionState = 'connected'; - - this.initializePolyfillService(this); - } catch (error) { - this._connectionState = 'disconnected'; - throw new ConnectionError( - `Failed to connect to sandbox ${sandboxId}`, - this.connectionConfig.baseUrl, - error - ); - } - } - - /** - * Start a stopped or paused sandbox. - * For OpenSandbox, this resumes from paused state if applicable. - * - * @throws {SandboxStateError} If sandbox is not initialized - */ - async start(): Promise { - if (this._status.state === 'Paused') { - await this.resume(); - } - } - - /** - * Stop the sandbox (graceful shutdown). - * - * @throws {SandboxStateError} If sandbox is not initialized - */ - async stop(): Promise { - try { - await this.sandbox.kill(); - this._status = { state: 'Deleted' }; - this._connectionState = 'disconnected'; - } catch (error) { - throw new CommandExecutionError( - 'Failed to stop sandbox', - 'stop', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Pause a running sandbox. - * - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {FeatureNotSupportedError} If pause is not supported by the runtime - * @throws {CommandExecutionError} If pause fails - */ - async pause(): Promise { - try { - await this.sandbox.pause(); - this._status = { state: 'Paused' }; - } catch (error) { - // Check if this is a "not supported" error from the SDK - if ( - error && - typeof error === 'object' && - 'code' in error && - error.code === 'SANDBOX::API_NOT_SUPPORTED' - ) { - throw new FeatureNotSupportedError( - 'Pause operation is not supported by this runtime (e.g., Kubernetes)', - 'pause', - this.provider - ); - } - throw new CommandExecutionError( - 'Failed to pause sandbox', - 'pause', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Resume a paused sandbox. - * - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {FeatureNotSupportedError} If resume is not supported by the runtime - * @throws {CommandExecutionError} If resume fails - */ - async resume(): Promise { - try { - // resume() returns a fresh Sandbox instance - this._sandbox = await this.sandbox.resume(); - this._id = this.sandbox.id; - this._status = { state: 'Running' }; - } catch (error) { - // Check if this is a "not supported" error from the SDK - if ( - error && - typeof error === 'object' && - 'code' in error && - error.code === 'SANDBOX::API_NOT_SUPPORTED' - ) { - throw new FeatureNotSupportedError( - 'Resume operation is not supported by this runtime (e.g., Kubernetes)', - 'resume', - this.provider - ); - } - throw new CommandExecutionError( - 'Failed to resume sandbox', - 'resume', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Delete the sandbox permanently. - * - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If deletion fails - */ - async delete(): Promise { - try { - await this.sandbox.kill(); - this._status = { state: 'Deleted' }; - this._connectionState = 'disconnected'; - } catch (error) { - throw new CommandExecutionError( - 'Failed to delete sandbox', - 'delete', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Get detailed information about the sandbox. - * - * @returns Sandbox information - * @throws {SandboxStateError} If sandbox is not initialized - */ - async getInfo(): Promise { - try { - const info = await this.sandbox.getInfo(); - return { - id: info.id, - image: - typeof info.image === 'string' - ? this.parseImageSpec(info.image) - : 'uri' in (info.image as Record) - ? this.parseImageSpec((info.image as { uri: string }).uri) - : (info.image as ImageSpec), - entrypoint: info.entrypoint, - metadata: info.metadata, - status: info.status, - createdAt: info.createdAt, - expiresAt: info.expiresAt, - resourceLimits: this.parseResourceLimits( - (info as Record).resourceLimits as Record | undefined - ) - }; - } catch (error) { - throw new CommandExecutionError( - 'Failed to get sandbox info', - 'getInfo', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Close the connection and release resources. - */ - async close(): Promise { - try { - await this._sandbox?.close(); - } finally { - this._sandbox = undefined; - this._id = ''; - this._connectionState = 'closed'; - this._status = { state: 'Deleted' }; - } - } - - /** - * Renew the sandbox expiration. - * - * @param additionalSeconds - Seconds to extend the expiration by - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If renewal fails - */ - async renewExpiration(additionalSeconds: number): Promise { - try { - await this.sandbox.renew(additionalSeconds); - } catch (error) { - throw new CommandExecutionError( - 'Failed to renew sandbox expiration', - 'renew', - error instanceof Error ? error : undefined - ); - } - } - - /** - * Native implementation of expiration renewal. - */ - protected override async nativeRenewExpiration(additionalSeconds: number): Promise { - await this.renewExpiration(additionalSeconds); - } - - // ==================== Command Execution ==================== - - /** - * Execute a command and wait for completion. - * - * @param command - The command to execute - * @param options - Execution options - * @returns Execution result with stdout, stderr, and exit code - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If execution fails - */ - protected override async nativeExecute( - command: string, - options?: ExecuteOptions - ): Promise { - try { - const execution = await this.sandbox.commands.run(command, { - workingDirectory: options?.workingDirectory, - background: options?.background - }); - - // Combine stdout/stderr from logs arrays - // Join with newlines to preserve line structure for parsing - const stdout = execution.logs.stdout.map((msg) => msg.text).join('\n'); - const stderr = execution.logs.stderr.map((msg) => msg.text).join('\n'); - - // Get exit code from first result, default to 0 - const exitCode = execution.result[0]?.exitCode ?? 0; - - // Determine if output was truncated by comparing content lengths - const stdoutLength = execution.logs.stdout.reduce((sum, msg) => sum + msg.text.length, 0); - const stderrLength = execution.logs.stderr.reduce((sum, msg) => sum + msg.text.length, 0); - - // OpenSandbox SDK truncates output at 1MB per stream by default - const MaxOutputSize = 1024 * 1024; - const truncated = stdoutLength >= MaxOutputSize || stderrLength >= MaxOutputSize; - - return { - stdout, - stderr, - exitCode, - truncated - }; - } catch (error) { - if (error instanceof SandboxStateError) { - throw error; - } - throw new CommandExecutionError( - `Command execution failed: ${command}`, - command, - error instanceof Error ? error : undefined - ); - } - } - - /** - * Execute a command with streaming output. - * - * @param command - The command to execute - * @param handlers - Stream handlers for output - * @param options - Execution options - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If execution fails - */ - protected override async nativeExecuteStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise { - try { - // SDK types may vary, use type assertions for compatibility - const sdkHandlers: Record = {}; - if (handlers.onStdout) { - sdkHandlers.onStdout = handlers.onStdout; - } - if (handlers.onStderr) { - sdkHandlers.onStderr = handlers.onStderr; - } - if (handlers.onComplete) { - sdkHandlers.onExecutionComplete = handlers.onComplete; - } - if (handlers.onError) { - sdkHandlers.onError = handlers.onError; - } - - await this.sandbox.commands.run( - command, - { - workingDirectory: options?.workingDirectory, - background: options?.background - }, - sdkHandlers as { - onStdout?: (msg: { text: string }) => void | Promise; - onStderr?: (msg: { text: string }) => void | Promise; - onExecutionComplete?: (result: { exitCode?: number }) => void | Promise; - onError?: (err: { message: string }) => void | Promise; - } - ); - } catch (error) { - throw new CommandExecutionError( - `Streaming command execution failed: ${command}`, - command, - error instanceof Error ? error : undefined - ); - } - } - - /** - * Execute a command in the background. - * - * @param command - The command to execute - * @param options - Execution options - * @returns Handle with sessionId and kill function - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If execution fails - */ - protected override async nativeExecuteBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }> { - try { - const execution = await this.sandbox.commands.run(command, { - workingDirectory: options?.workingDirectory, - background: true - }); - - if (!execution.id) { - throw new CommandExecutionError( - 'Background execution did not return a session ID', - command - ); - } - - const sessionId = execution.id; - const sandbox = this.sandbox; - - return { - sessionId, - kill: async (): Promise => { - try { - await sandbox.commands.interrupt(sessionId); - } catch (error) { - throw new CommandExecutionError( - `Failed to kill background session ${sessionId}`, - 'interrupt', - error instanceof Error ? error : undefined - ); - } - } - }; - } catch (error) { - if (error instanceof CommandExecutionError) { - throw error; - } - throw new CommandExecutionError( - `Background command execution failed: ${command}`, - command, - error instanceof Error ? error : undefined - ); - } - } - - /** - * Interrupt/kill a running command session. - * - * @param sessionId - The session ID from executeBackground - * @throws {SandboxStateError} If sandbox is not initialized - * @throws {CommandExecutionError} If interruption fails - */ - protected override async nativeInterrupt(sessionId: string): Promise { - try { - await this.sandbox.commands.interrupt(sessionId); - } catch (error) { - throw new CommandExecutionError( - `Failed to interrupt session ${sessionId}`, - 'interrupt', - error instanceof Error ? error : undefined - ); - } - } - - // ==================== File System Operations ==================== - - /** - * Read files from the sandbox. - * - * @param paths - Array of file paths to read - * @param options - Read options - * @returns Array of results (one per path) - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected override async nativeReadFiles( - paths: string[], - options?: ReadFileOptions - ): Promise { - const results: FileReadResult[] = []; - - for (const path of paths) { - try { - let content: Uint8Array; - - if (options?.range) { - content = await this.sandbox.files.readBytes(path, { - range: options.range - }); - } else { - content = await this.sandbox.files.readBytes(path); - } - - results.push({ path, content, error: null }); - } catch (error) { - results.push({ - path, - content: new Uint8Array(), - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - - return results; - } - - /** - * Write files to the sandbox. - * - * @param entries - Files to write - * @returns Array of results with bytes written - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected override async nativeWriteFiles(entries: FileWriteEntry[]): Promise { - const results: FileWriteResult[] = []; - - for (const entry of entries) { - try { - const { data, size } = this.normalizeWriteData(entry.data); - - await this.sandbox.files.writeFiles([ - { - path: entry.path, - data, - mode: entry.mode, - owner: entry.owner, - group: entry.group - } - ]); - - results.push({ - path: entry.path, - bytesWritten: size, - error: null - }); - } catch (error) { - results.push({ - path: entry.path, - bytesWritten: 0, - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - - return results; - } - - /** - * Normalize write data to Uint8Array/Blob/ReadableStream and calculate size. - */ - private normalizeWriteData( - data: string | Uint8Array | ArrayBuffer | Blob | ReadableStream - ): { - data: Uint8Array | Blob | ReadableStream; - size: number; - } { - if (typeof data === 'string') { - const encoded = new TextEncoder().encode(data); - return { data: encoded, size: encoded.length }; - } - - if (data instanceof Uint8Array) { - return { data, size: data.length }; - } - - if (data instanceof ArrayBuffer) { - const uint8Array = new Uint8Array(data); - return { data: uint8Array, size: uint8Array.length }; - } - - if (data instanceof Blob) { - return { data, size: data.size }; - } - - // ReadableStream - size unknown until consumed - return { data, size: 0 }; - } - - /** - * Delete files from the sandbox. - * - * @param paths - Files to delete - * @returns Array of results - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeDeleteFiles(paths: string[]): Promise { - const results: FileDeleteResult[] = []; - - for (const path of paths) { - try { - await this.sandbox.files.deleteFiles([path]); - results.push({ path, success: true, error: null }); - } catch (error) { - results.push({ - path, - success: false, - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - - return results; - } - - /** - * Get file/directory information. - * - * @param paths - Paths to query - * @returns Map of path to file info - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeGetFileInfo(paths: string[]): Promise> { - const infos = await this.sandbox.files.getFileInfo(paths); - const infoMap = new Map(); - - for (const [path, info] of Object.entries(infos)) { - infoMap.set(path, info); - } - - return infoMap; - } - - /** - * Move/rename files. - * - * @param entries - Move operations to perform - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeMoveFiles(entries: MoveEntry[]): Promise { - for (const entry of entries) { - await this.sandbox.files.moveFiles([ - { source: entry.source, destination: entry.destination } - ]); - } - } - - /** - * Replace content within files. - * - * @param entries - Replacement operations - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeReplaceContent(entries: ContentReplaceEntry[]): Promise { - for (const entry of entries) { - await ( - this.sandbox.files as unknown as { - replaceContents(path: string, oldContent: string, newContent: string): Promise; - } - ).replaceContents(entry.path, entry.oldContent, entry.newContent); - } - } - - /** - * Create directories. - * - * @param paths - Directories to create - * @param options - Directory options (mode, owner, group) - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeCreateDirectories( - paths: string[], - options?: { mode?: number; owner?: string; group?: string } - ): Promise { - for (const path of paths) { - await ( - this.sandbox.files as unknown as { - createDirectories( - path: string, - options?: { mode?: number; owner?: string; group?: string } - ): Promise; - } - ).createDirectories(path, { - mode: options?.mode, - owner: options?.owner, - group: options?.group - }); - } - } - - /** - * Delete directories. - * - * @param paths - Directories to delete - * @param options - Options (recursive, force) - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeDeleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise { - for (const path of paths) { - await ( - this.sandbox.files as unknown as { - deleteDirectories( - path: string, - options?: { recursive?: boolean; force?: boolean } - ): Promise; - } - ).deleteDirectories(path, { - recursive: options?.recursive, - force: options?.force - }); - } - } - - /** - * Read a file as a stream. - * - * @param path - File path - * @returns Async iterable of file chunks - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected nativeReadFileStream(path: string): AsyncIterable { - const stream = ( - this.sandbox.files as unknown as { readStream(path: string): ReadableStream } - ).readStream(path); - return readableStreamToAsyncIterable(stream); - } - - /** - * Write a file from a stream. - * - * @param path - File path - * @param stream - Data stream - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeWriteFileStream( - path: string, - stream: ReadableStream - ): Promise { - await this.sandbox.files.writeFiles([{ path, data: stream }]); - } - - /** - * Set file permissions. - * - * @param entries - Permission changes to apply - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeSetPermissions(entries: PermissionEntry[]): Promise { - for (const entry of entries) { - await this.sandbox.files.setPermissions(entry.path, { - mode: entry.mode, - owner: entry.owner, - group: entry.group - }); - } - } - - /** - * Search for files. - * - * @param pattern - Search pattern - * @param path - Directory to search in - * @returns Array of matching results - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeSearch(pattern: string, path?: string): Promise { - return ( - this.sandbox.files as unknown as { - search(options: { pattern: string; path?: string }): Promise; - } - ).search({ pattern, path }); - } - - // ==================== Health Check ==================== - - /** - * Check if the sandbox is healthy. - * - * @returns true if healthy, false otherwise - */ - protected async nativePing(): Promise { - try { - return await this.sandbox.health.ping(); - } catch { - return false; - } - } - - /** - * Get current resource metrics. - * - * @returns Current metrics - * @throws {SandboxStateError} If sandbox is not initialized - */ - protected async nativeGetMetrics(): Promise { - return this.sandbox.metrics.getMetrics(); - } -} diff --git a/sdk/sandbox/src/adapters/index.ts b/sdk/sandbox/src/adapters/index.ts deleted file mode 100644 index 350c7175e1..0000000000 --- a/sdk/sandbox/src/adapters/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export { BaseSandboxAdapter } from './BaseSandboxAdapter'; -export type { MinimalProviderConnection } from './MinimalProviderAdapter'; -export { MinimalProviderAdapter } from './MinimalProviderAdapter'; -export type { OpenSandboxConnectionConfig, SandboxRuntimeType } from './OpenSandboxAdapter'; -export { OpenSandboxAdapter } from './OpenSandboxAdapter'; diff --git a/sdk/sandbox/src/errors/CommandExecutionError.ts b/sdk/sandbox/src/errors/CommandExecutionError.ts deleted file mode 100644 index bb8e6cbf5f..0000000000 --- a/sdk/sandbox/src/errors/CommandExecutionError.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Thrown when command execution fails. - */ -export class CommandExecutionError extends SandboxException { - public readonly exitCode?: number; - public readonly stdout?: string; - public readonly stderr?: string; - public readonly commandError?: Error; - - constructor( - message: string, - public readonly command: string, - exitCodeOrCause?: number | Error, - stdout?: string, - stderr?: string - ) { - super( - message, - 'COMMAND_FAILED', - exitCodeOrCause instanceof Error ? exitCodeOrCause : undefined - ); - this.name = 'CommandExecutionError'; - Object.setPrototypeOf(this, CommandExecutionError.prototype); - - if (exitCodeOrCause instanceof Error) { - this.commandError = exitCodeOrCause; - } else { - this.exitCode = exitCodeOrCause; - this.stdout = stdout; - this.stderr = stderr; - } - } - - /** - * Returns the combined output (stdout + stderr). - */ - getCombinedOutput(): string { - let output = this.stdout || ''; - if (this.stderr) { - output += output ? `\n${this.stderr}` : this.stderr; - } - return output; - } -} diff --git a/sdk/sandbox/src/errors/ConnectionError.ts b/sdk/sandbox/src/errors/ConnectionError.ts deleted file mode 100644 index ed5477c1de..0000000000 --- a/sdk/sandbox/src/errors/ConnectionError.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Thrown when connection to a sandbox fails. - */ -export class ConnectionError extends SandboxException { - constructor( - message: string, - public readonly endpoint?: string, - cause?: unknown - ) { - super(message, 'CONNECTION_ERROR', cause); - this.name = 'ConnectionError'; - Object.setPrototypeOf(this, ConnectionError.prototype); - } -} diff --git a/sdk/sandbox/src/errors/FeatureNotSupportedError.ts b/sdk/sandbox/src/errors/FeatureNotSupportedError.ts deleted file mode 100644 index 28a1386890..0000000000 --- a/sdk/sandbox/src/errors/FeatureNotSupportedError.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Thrown when a provider does not natively support a feature - * and no polyfill is available. - */ -export class FeatureNotSupportedError extends SandboxException { - constructor( - message: string, - public readonly feature: string, - public readonly provider: string - ) { - super(`Feature not supported by ${provider}: ${message}`, 'FEATURE_NOT_SUPPORTED'); - this.name = 'FeatureNotSupportedError'; - Object.setPrototypeOf(this, FeatureNotSupportedError.prototype); - } -} diff --git a/sdk/sandbox/src/errors/FileOperationError.ts b/sdk/sandbox/src/errors/FileOperationError.ts deleted file mode 100644 index 2be6c6f277..0000000000 --- a/sdk/sandbox/src/errors/FileOperationError.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Error codes specific to file operations. - */ -export type FileErrorCode = - | 'FILE_NOT_FOUND' - | 'FILE_ALREADY_EXISTS' - | 'PERMISSION_DENIED' - | 'PATH_IS_DIRECTORY' - | 'PATH_NOT_DIRECTORY' - | 'INVALID_PATH' - | 'QUOTA_EXCEEDED' - | 'TRANSFER_ERROR'; - -/** - * Thrown when a file operation fails. - */ -export class FileOperationError extends SandboxException { - constructor( - message: string, - public readonly path: string, - public readonly fileErrorCode: FileErrorCode, - cause?: unknown - ) { - super(message, fileErrorCode, cause); - this.name = 'FileOperationError'; - Object.setPrototypeOf(this, FileOperationError.prototype); - } -} diff --git a/sdk/sandbox/src/errors/SandboxException.ts b/sdk/sandbox/src/errors/SandboxException.ts deleted file mode 100644 index e0571fe83e..0000000000 --- a/sdk/sandbox/src/errors/SandboxException.ts +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Base exception class for all sandbox-related errors. - * Provides structured error information with codes and optional metadata. - */ -export class SandboxException extends Error { - constructor( - message: string, - public readonly code: SandboxErrorCode = 'INTERNAL_UNKNOWN_ERROR', - cause?: unknown - ) { - // @ts-expect-error - cause is a valid Error option in ES2022 - super(message, { cause }); - this.name = 'SandboxException'; - Object.setPrototypeOf(this, SandboxException.prototype); - } - - /** - * Returns a structured representation of the error for logging. - */ - toJSON(): Record { - return { - name: this.name, - message: this.message, - code: this.code, - cause: this.cause, - stack: this.stack - }; - } -} - -/** - * Error codes for sandbox exceptions. - * Extensible via string intersection. - */ -export type SandboxErrorCode = - | 'INTERNAL_UNKNOWN_ERROR' - | 'CONNECTION_ERROR' - | 'TIMEOUT' - | 'READY_TIMEOUT' - | 'UNHEALTHY' - | 'INVALID_ARGUMENT' - | 'UNEXPECTED_RESPONSE' - | 'FEATURE_NOT_SUPPORTED' - | 'SANDBOX_NOT_FOUND' - | 'PERMISSION_DENIED' - | 'FILE_NOT_FOUND' - | 'FILE_ALREADY_EXISTS' - | 'COMMAND_FAILED' - | (string & {}); diff --git a/sdk/sandbox/src/errors/SandboxStateError.ts b/sdk/sandbox/src/errors/SandboxStateError.ts deleted file mode 100644 index 739ba594c7..0000000000 --- a/sdk/sandbox/src/errors/SandboxStateError.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Thrown when an operation is attempted in an invalid sandbox state. - */ -export class SandboxStateError extends SandboxException { - constructor( - message: string, - public readonly currentState: string, - public readonly requiredState?: string - ) { - super( - `Invalid sandbox state: ${message} (current: ${currentState}${requiredState ? `, required: ${requiredState}` : ''})`, - 'INVALID_STATE' - ); - this.name = 'SandboxStateError'; - Object.setPrototypeOf(this, SandboxStateError.prototype); - } -} diff --git a/sdk/sandbox/src/errors/TimeoutError.ts b/sdk/sandbox/src/errors/TimeoutError.ts deleted file mode 100644 index 8d0ea12197..0000000000 --- a/sdk/sandbox/src/errors/TimeoutError.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { SandboxException } from './SandboxException'; - -/** - * Thrown when an operation times out. - */ -export class TimeoutError extends SandboxException { - constructor( - message: string, - public readonly timeoutMs: number, - public readonly operation: string - ) { - super(message, 'TIMEOUT'); - this.name = 'TimeoutError'; - Object.setPrototypeOf(this, TimeoutError.prototype); - } -} - -/** - * Thrown when waiting for sandbox readiness times out. - */ -export class SandboxReadyTimeoutError extends SandboxException { - constructor(sandboxId: string, timeoutMs: number) { - super(`Sandbox ${sandboxId} did not become ready within ${timeoutMs}ms`, 'READY_TIMEOUT'); - this.name = 'SandboxReadyTimeoutError'; - Object.setPrototypeOf(this, SandboxReadyTimeoutError.prototype); - } -} diff --git a/sdk/sandbox/src/errors/index.ts b/sdk/sandbox/src/errors/index.ts deleted file mode 100644 index 84f5114267..0000000000 --- a/sdk/sandbox/src/errors/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -export { CommandExecutionError } from './CommandExecutionError'; -export { ConnectionError } from './ConnectionError'; -export { FeatureNotSupportedError } from './FeatureNotSupportedError'; -export { type FileErrorCode, FileOperationError } from './FileOperationError'; -export { type SandboxErrorCode, SandboxException } from './SandboxException'; -export { SandboxStateError } from './SandboxStateError'; -export { SandboxReadyTimeoutError, TimeoutError } from './TimeoutError'; diff --git a/sdk/sandbox/src/factory/SandboxProviderFactory.ts b/sdk/sandbox/src/factory/SandboxProviderFactory.ts deleted file mode 100644 index cfe1a7c946..0000000000 --- a/sdk/sandbox/src/factory/SandboxProviderFactory.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { MinimalProviderAdapter, OpenSandboxAdapter } from '../adapters'; -import type { ISandbox } from '../interfaces'; - -/** - * Configuration for creating a sandbox provider. - */ -export interface ProviderConfig { - /** Provider type */ - provider: 'opensandbox' | 'minimal' | string; - - /** Connection configuration (provider-specific) */ - connection?: { - baseUrl?: string; - apiKey?: string; - [key: string]: unknown; - }; - - /** Provider-specific options */ - options?: Record; -} - -/** - * Factory for creating sandbox provider instances. - * - * Following the Factory Pattern, this centralizes provider - * creation and configuration. - * - * Example: - * ```typescript - * const sandbox = await SandboxProviderFactory.create({ - * provider: 'opensandbox', - * connection: { apiKey: 'xxx' } - * }); - * - * await sandbox.create({ image: { repository: 'node', tag: '18' } }); - * ``` - */ -const customProviders = new Map ISandbox>(); - -/** - * Create a sandbox provider instance. - * - * @param config Provider configuration - * @returns Configured sandbox instance - * @throws Error if provider type is unknown - */ -function createProvider(config: ProviderConfig): ISandbox { - switch (config.provider) { - case 'opensandbox': - return new OpenSandboxAdapter({ - baseUrl: config.connection?.baseUrl, - apiKey: config.connection?.apiKey, - runtime: config.connection?.runtime as 'docker' | 'kubernetes' | undefined - }); - - case 'minimal': - return new MinimalProviderAdapter(); - - default: { - // Check custom providers - const customFactory = customProviders.get(config.provider); - if (customFactory) { - return customFactory(config); - } - throw new Error(`Unknown provider: ${config.provider}`); - } - } -} - -/** - * Register a custom provider adapter. - * - * @param name Provider name - * @param factory Function that creates the adapter - */ -function registerProvider(name: string, factory: (config: ProviderConfig) => ISandbox): void { - customProviders.set(name, factory); -} - -/** - * Get list of available providers. - */ -function getAvailableProviders(): string[] { - return ['opensandbox', 'minimal', ...customProviders.keys()]; -} - -/** - * Factory for creating sandbox provider instances. - * - * Following the Factory Pattern, this centralizes provider - * creation and configuration. - * - * Example: - * ```typescript - * const sandbox = await SandboxProviderFactory.create({ - * provider: 'opensandbox', - * connection: { apiKey: 'xxx' } - * }); - * - * await sandbox.create({ image: { repository: 'node', tag: '18' } }); - * ``` - */ -export const SandboxProviderFactory = { - create: createProvider, - registerProvider, - getAvailableProviders -}; - -/** - * Convenience function for creating sandboxes. - * - * Shorthand for SandboxProviderFactory.create() - */ -export function createSandbox(config: ProviderConfig): ISandbox { - return SandboxProviderFactory.create(config); -} diff --git a/sdk/sandbox/src/factory/index.ts b/sdk/sandbox/src/factory/index.ts deleted file mode 100644 index 8996771bda..0000000000 --- a/sdk/sandbox/src/factory/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export type { ProviderConfig } from './SandboxProviderFactory'; -export { createSandbox, SandboxProviderFactory } from './SandboxProviderFactory'; diff --git a/sdk/sandbox/src/index.ts b/sdk/sandbox/src/index.ts deleted file mode 100644 index e6ee77b2ec..0000000000 --- a/sdk/sandbox/src/index.ts +++ /dev/null @@ -1,16 +0,0 @@ -// Export adapters -export * from './adapters'; -// Export errors -export * from './errors'; -// Export factory -export * from './factory'; -// Export interfaces -export * from './interfaces'; - -// Export polyfill services -export * from './polyfill'; -// Export types -export * from './types'; - -// Export utilities -export * from './utils'; diff --git a/sdk/sandbox/src/interfaces/ICommandExecution.ts b/sdk/sandbox/src/interfaces/ICommandExecution.ts deleted file mode 100644 index fd4887caec..0000000000 --- a/sdk/sandbox/src/interfaces/ICommandExecution.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { ExecuteOptions, ExecuteResult, StreamHandlers } from '../types'; - -/** - * Interface for command execution within a sandbox. - * Follows Interface Segregation Principle. - */ -export interface ICommandExecution { - /** - * Execute a command and wait for completion. - * @param command The command to execute - * @param options Execution options - * @returns Execution result with stdout, stderr, and exit code - * @throws {CommandExecutionError} If command fails - * @throws {TimeoutError} If execution times out - */ - execute(command: string, options?: ExecuteOptions): Promise; - - /** - * Execute a command with streaming output. - * Provides real-time access to stdout/stderr via handlers. - * @param command The command to execute - * @param handlers Stream handlers for output - * @param options Execution options - * @throws {CommandExecutionError} If command fails - */ - executeStream(command: string, handlers: StreamHandlers, options?: ExecuteOptions): Promise; - - /** - * Execute a command in the background. - * Returns immediately with a handle to control the execution. - * @param command The command to execute - * @param options Execution options - * @returns Handle for background execution - */ - executeBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }>; - - /** - * Interrupt/kill a running command session. - * @param sessionId The session ID from executeBackground - */ - interrupt(sessionId: string): Promise; -} diff --git a/sdk/sandbox/src/interfaces/IFileSystem.ts b/sdk/sandbox/src/interfaces/IFileSystem.ts deleted file mode 100644 index 6a219226da..0000000000 --- a/sdk/sandbox/src/interfaces/IFileSystem.ts +++ /dev/null @@ -1,133 +0,0 @@ -import type { - ContentReplaceEntry, - DirectoryEntry, - FileDeleteResult, - FileInfo, - FileReadResult, - FileWriteEntry, - FileWriteResult, - MoveEntry, - PermissionEntry, - ReadFileOptions, - SearchResult -} from '../types'; - -/** - * Interface for filesystem operations within a sandbox. - * Follows Interface Segregation Principle. - * - * All methods support batch operations for efficiency. - * Providers without native batch support will have operations - * automatically parallelized by the base adapter. - */ -export interface IFileSystem { - // ==================== File Operations ==================== - - /** - * Read files from the sandbox. - * @param paths Array of file paths to read - * @param options Read options - * @returns Array of results (one per path, may include errors) - */ - readFiles(paths: string[], options?: ReadFileOptions): Promise; - - /** - * Write files to the sandbox. - * Supports strings, bytes, and streams. - * @param entries Files to write - * @returns Array of results with bytes written - */ - writeFiles(entries: FileWriteEntry[]): Promise; - - /** - * Delete files from the sandbox. - * @param paths Files to delete - * @returns Array of results - */ - deleteFiles(paths: string[]): Promise; - - /** - * Move/rename files within the sandbox. - * @param entries Move operations to perform - */ - moveFiles(entries: MoveEntry[]): Promise; - - /** - * Replace content within files. - * @param entries Replacement operations - */ - replaceContent(entries: ContentReplaceEntry[]): Promise; - - // ==================== Streaming Operations ==================== - - /** - * Read a file as a stream. - * Efficient for large files. - * @param path File path - * @returns Async iterable of file chunks - */ - readFileStream(path: string): AsyncIterable; - - /** - * Write a file from a stream. - * Efficient for large files. - * @param path File path - * @param stream Data stream - */ - writeFileStream(path: string, stream: ReadableStream): Promise; - - // ==================== Directory Operations ==================== - - /** - * Create directories. - * Creates parent directories as needed. - * @param paths Directories to create - * @param options Directory options (mode, owner, group) - */ - createDirectories( - paths: string[], - options?: { mode?: number; owner?: string; group?: string } - ): Promise; - - /** - * Delete directories and their contents. - * @param paths Directories to delete - * @param options Options (recursive, force) - */ - deleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise; - - /** - * List directory contents. - * @param path Directory path - * @returns Array of directory entries - */ - listDirectory(path: string): Promise; - - // ==================== Metadata Operations ==================== - - /** - * Get file/directory information. - * @param paths Paths to query - * @returns Map of path to file info - */ - getFileInfo(paths: string[]): Promise>; - - /** - * Set file permissions. - * @param entries Permission changes to apply - */ - setPermissions(entries: PermissionEntry[]): Promise; - - // ==================== Search Operations ==================== - - /** - * Search for files matching a pattern. - * @param pattern Search pattern (glob or regex, provider-dependent) - * @param path Directory to search in - * @returns Array of matching results - */ - search(pattern: string, path?: string): Promise; -} diff --git a/sdk/sandbox/src/interfaces/IHealthCheck.ts b/sdk/sandbox/src/interfaces/IHealthCheck.ts deleted file mode 100644 index bcfed6f982..0000000000 --- a/sdk/sandbox/src/interfaces/IHealthCheck.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { SandboxMetrics } from '../types'; - -/** - * Interface for health checking and metrics. - * Follows Interface Segregation Principle. - */ -export interface IHealthCheck { - /** - * Check if the sandbox is healthy and responsive. - * @returns true if healthy, false otherwise - */ - ping(): Promise; - - /** - * Get current resource metrics. - * @returns Current metrics (CPU, memory usage) - */ - getMetrics(): Promise; - - /** - * Stream metrics in real-time. - * Not all providers support this. - * @returns Async iterable of metric snapshots - */ - streamMetrics?(): AsyncIterable; -} diff --git a/sdk/sandbox/src/interfaces/ISandbox.ts b/sdk/sandbox/src/interfaces/ISandbox.ts deleted file mode 100644 index 2f9acee7be..0000000000 --- a/sdk/sandbox/src/interfaces/ISandbox.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { ProviderCapabilities } from '../types'; -import type { ICommandExecution } from './ICommandExecution'; -import type { IFileSystem } from './IFileSystem'; -import type { IHealthCheck } from './IHealthCheck'; -import type { ISandboxLifecycle } from './ISandboxLifecycle'; - -/** - * Unified sandbox interface. - * Composes all sandbox capabilities into a single interface. - * - * This is the primary interface that consumers interact with. - * All concrete adapters must implement this interface. - * - * Following Interface Segregation Principle, this interface - * is composed of smaller, focused interfaces. - */ -export interface ISandbox extends ISandboxLifecycle, ICommandExecution, IFileSystem, IHealthCheck { - /** Provider name (e.g., 'opensandbox') */ - readonly provider: string; - - /** Provider capability flags */ - readonly capabilities: ProviderCapabilities; - - /** - * Close the connection and release resources. - * Should be called when done with the sandbox. - */ - close(): Promise; -} diff --git a/sdk/sandbox/src/interfaces/ISandboxLifecycle.ts b/sdk/sandbox/src/interfaces/ISandboxLifecycle.ts deleted file mode 100644 index 49e8641fa1..0000000000 --- a/sdk/sandbox/src/interfaces/ISandboxLifecycle.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { SandboxConfig, SandboxId, SandboxInfo, SandboxStatus } from '../types'; - -/** - * Interface for sandbox lifecycle operations. - * Follows Interface Segregation Principle - only lifecycle methods. - */ -export interface ISandboxLifecycle { - /** Unique identifier for this sandbox */ - readonly id: SandboxId; - - /** Current status of the sandbox */ - readonly status: SandboxStatus; - - /** - * Create a new sandbox with the given configuration. - * The sandbox ID is assigned after creation. - */ - create(config: SandboxConfig): Promise; - - /** - * Start a stopped sandbox. - */ - start(): Promise; - - /** - * Stop a running sandbox (graceful shutdown). - */ - stop(): Promise; - - /** - * Pause a running sandbox. - * Not all providers support this. - */ - pause(): Promise; - - /** - * Resume a paused sandbox. - * Not all providers support this. - */ - resume(): Promise; - - /** - * Delete the sandbox permanently. - */ - delete(): Promise; - - /** - * Get detailed information about the sandbox. - */ - getInfo(): Promise; - - /** - * Wait until the sandbox is ready (healthy and responsive). - * @param timeoutMs Maximum time to wait in milliseconds - * @throws {SandboxReadyTimeoutError} If timeout is exceeded - */ - waitUntilReady(timeoutMs?: number): Promise; - - /** - * Renew the sandbox expiration, extending its lifetime. - * Not all providers support this. - * @param additionalSeconds Seconds to extend - */ - renewExpiration(additionalSeconds: number): Promise; -} diff --git a/sdk/sandbox/src/interfaces/index.ts b/sdk/sandbox/src/interfaces/index.ts deleted file mode 100644 index ea7b2a3934..0000000000 --- a/sdk/sandbox/src/interfaces/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export type { ICommandExecution } from './ICommandExecution'; -export type { IFileSystem } from './IFileSystem'; -export type { IHealthCheck } from './IHealthCheck'; -export type { ISandbox } from './ISandbox'; -export type { ISandboxLifecycle } from './ISandboxLifecycle'; diff --git a/sdk/sandbox/src/polyfill/CapabilityDetector.ts b/sdk/sandbox/src/polyfill/CapabilityDetector.ts deleted file mode 100644 index e5b70c1d7d..0000000000 --- a/sdk/sandbox/src/polyfill/CapabilityDetector.ts +++ /dev/null @@ -1,97 +0,0 @@ -import type { ProviderCapabilities } from '../types'; - -/** - * Detects and reports on provider capabilities. - * - * This class can perform runtime capability detection by testing - * specific features, or use static capability declarations. - */ -export class CapabilityDetector { - /** - * Create a static detector with known capabilities. - */ - static fromCapabilities(capabilities: ProviderCapabilities): CapabilityDetector { - return new CapabilityDetector(capabilities); - } - - constructor(private readonly capabilities: ProviderCapabilities) {} - - /** - * Get the full capability set. - */ - getCapabilities(): ProviderCapabilities { - return { ...this.capabilities }; - } - - /** - * Check if a specific capability is supported. - */ - hasCapability(capability: K): ProviderCapabilities[K] { - return this.capabilities[capability]; - } - - /** - * Check if filesystem operations need polyfilling. - */ - needsFileSystemPolyfill(): boolean { - return !this.capabilities.nativeFileSystem; - } - - /** - * Check if health check needs polyfilling. - */ - needsHealthCheckPolyfill(): boolean { - return !this.capabilities.nativeHealthCheck; - } - - /** - * Check if metrics need polyfilling. - */ - needsMetricsPolyfill(): boolean { - return !this.capabilities.nativeMetrics; - } - - /** - * Check if search needs polyfilling. - */ - needsSearchPolyfill(): boolean { - return !this.capabilities.supportsSearch; - } - - /** - * Get a summary of which features are native vs polyfilled. - */ - getFeatureSummary(): { - native: string[]; - polyfilled: string[]; - unsupported: string[]; - } { - const native: string[] = []; - const polyfilled: string[] = []; - const unsupported: string[] = []; - - const checkCapability = (name: keyof ProviderCapabilities, needsPolyfill?: () => boolean) => { - if (this.capabilities[name]) { - native.push(name); - } else if (needsPolyfill?.()) { - polyfilled.push(name); - } else if (needsPolyfill) { - unsupported.push(name); - } - }; - - checkCapability('supportsPauseResume'); - checkCapability('supportsRenews'); - checkCapability('supportsStreamingOutput'); - checkCapability('supportsBackgroundExecution'); - checkCapability('nativeFileSystem', () => this.needsFileSystemPolyfill()); - checkCapability('supportsBatchOperations'); - checkCapability('supportsStreamingTransfer'); - checkCapability('supportsPermissions'); - checkCapability('supportsSearch', () => this.needsSearchPolyfill()); - checkCapability('nativeHealthCheck', () => this.needsHealthCheckPolyfill()); - checkCapability('nativeMetrics', () => this.needsMetricsPolyfill()); - - return { native, polyfilled, unsupported }; - } -} diff --git a/sdk/sandbox/src/polyfill/CommandPolyfillService.ts b/sdk/sandbox/src/polyfill/CommandPolyfillService.ts deleted file mode 100644 index 0a2fb721df..0000000000 --- a/sdk/sandbox/src/polyfill/CommandPolyfillService.ts +++ /dev/null @@ -1,503 +0,0 @@ -import { CommandExecutionError, FileOperationError } from '../errors'; -import type { ICommandExecution } from '../interfaces'; -import type { DirectoryEntry, FileInfo, SearchResult } from '../types'; -import { base64ToBytes, bytesToBase64 } from '../utils/base64'; - -/** - * Service that implements filesystem operations via command execution. - * - * This is the core polyfill mechanism that enables feature parity - * for providers that only offer raw command execution without - * native filesystem APIs. - * - * All commands use POSIX-compliant syntax for maximum compatibility. - */ -export class CommandPolyfillService { - constructor(private readonly executor: ICommandExecution) {} - - // ==================== File Read Operations ==================== - - /** - * Read a file via base64 encoding. - * Uses: cat | base64 - */ - async readFile(path: string): Promise { - try { - const result = await this.executor.execute(`cat "${this.escapePath(path)}" | base64 -w 0`); - if (result.exitCode !== 0) { - throw this.createFileError(path, result.stderr); - } - return base64ToBytes(result.stdout); - } catch (error) { - if (error instanceof FileOperationError) { - throw error; - } - if (error instanceof CommandExecutionError) { - throw this.createFileError(path, error.stderr); - } - throw error; - } - } - - /** - * Read a portion of a file via dd + base64. - */ - async readFileRange(path: string, start: number, end?: number): Promise { - const length = end ? end - start : ''; - const cmd = `dd if="${this.escapePath( - path - )}" bs=1 skip=${start} count=${length} 2>/dev/null | base64 -w 0`; - - const result = await this.executor.execute(cmd); - if (result.exitCode !== 0) { - throw this.createFileError(path, result.stderr); - } - return base64ToBytes(result.stdout); - } - - // ==================== File Write Operations ==================== - - /** - * Write a file via base64 decoding. - * Uses: echo | base64 -d > - */ - async writeFile(path: string, data: Uint8Array): Promise { - const base64 = bytesToBase64(data); - const chunkSize = 1024; // Avoid command line length limits - - // Ensure parent directory exists - await this.createParentDirectory(path); - - // Write in chunks to avoid command length limits - let first = true; - for (let i = 0; i < base64.length; i += chunkSize) { - const chunk = base64.slice(i, i + chunkSize); - const redirect = first ? '>' : '>>'; - const result = await this.executor.execute( - `echo "${chunk}" | base64 -d ${redirect} "${this.escapePath(path)}"` - ); - if (result.exitCode !== 0) { - throw this.createFileError(path, result.stderr); - } - first = false; - } - - return data.length; - } - - /** - * Write a text file directly. - */ - async writeTextFile(path: string, content: string): Promise { - await this.createParentDirectory(path); - - // Use heredoc for text content to avoid escaping issues - const escapedContent = content.replace(/\\/g, '\\\\').replace(/\$/g, '\\$'); - const result = await this.executor.execute( - `cat > "${this.escapePath(path)}" << 'POLYFILL_EOF'\n${escapedContent}\nPOLYFILL_EOF` - ); - - if (result.exitCode !== 0) { - throw this.createFileError(path, result.stderr); - } - - return content.length; - } - - // ==================== File Delete Operations ==================== - - /** - * Delete files via rm command. - */ - async deleteFiles(paths: string[]): Promise<{ path: string; success: boolean; error?: Error }[]> { - const results: { path: string; success: boolean; error?: Error }[] = []; - - for (const path of paths) { - try { - const result = await this.executor.execute(`rm -f "${this.escapePath(path)}"`); - results.push({ - path, - success: result.exitCode === 0, - error: result.exitCode !== 0 ? new Error(result.stderr) : undefined - }); - } catch (error) { - results.push({ - path, - success: false, - error: error instanceof Error ? error : new Error(String(error)) - }); - } - } - - return results; - } - - // ==================== Directory Operations ==================== - - /** - * Create directories via mkdir -p. - */ - async createDirectories( - paths: string[], - options?: { mode?: number; owner?: string; group?: string } - ): Promise { - for (const path of paths) { - const result = await this.executor.execute(`mkdir -p "${this.escapePath(path)}"`); - if (result.exitCode !== 0) { - throw new FileOperationError( - `Failed to create directory: ${result.stderr}`, - path, - 'PATH_NOT_DIRECTORY' - ); - } - - // Set permissions if specified - if (options?.mode) { - await this.executor.execute(`chmod ${options.mode.toString(8)} "${this.escapePath(path)}"`); - } - - // Set ownership if specified - if (options?.owner || options?.group) { - const owner = options.owner || ''; - const group = options.group ? `:${options.group}` : ''; - await this.executor.execute( - `chown ${owner}${group} "${this.escapePath(path)}" 2>/dev/null || true` - ); - } - } - } - - /** - * Delete directories via rm -rf. - */ - async deleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise { - const flags = [options?.recursive !== false ? '-r' : '', options?.force !== false ? '-f' : ''] - .filter(Boolean) - .join(''); - - for (const path of paths) { - const result = await this.executor.execute(`rm ${flags} "${this.escapePath(path)}"`); - if (result.exitCode !== 0) { - throw new FileOperationError( - `Failed to delete directory: ${result.stderr}`, - path, - 'PATH_IS_DIRECTORY' - ); - } - } - } - - /** - * List directory contents via ls -la. - */ - async listDirectory(path: string): Promise { - const result = await this.executor.execute( - `ls -la "${this.escapePath(path)}" --time-style=+"%Y-%m-%dT%H:%M:%S" 2>/dev/null || echo "DIRECTORY_NOT_FOUND"` - ); - - if (result.stdout.includes('DIRECTORY_NOT_FOUND')) { - throw new FileOperationError('Directory not found', path, 'FILE_NOT_FOUND'); - } - - return this.parseLsOutput(result.stdout, path); - } - - /** - * Create parent directory for a file path. - */ - private async createParentDirectory(filePath: string): Promise { - const lastSlash = filePath.lastIndexOf('/'); - if (lastSlash > 0) { - const parentDir = filePath.slice(0, lastSlash); - await this.executor.execute(`mkdir -p "${this.escapePath(parentDir)}"`); - } - } - - // ==================== Metadata Operations ==================== - - /** - * Get file information via stat. - */ - async getFileInfo(paths: string[]): Promise> { - const infoMap = new Map(); - - for (const path of paths) { - try { - // Use stat for detailed info - const result = await this.executor.execute( - `stat -c '%s|%Y|%W|%a|%U|%G|%F' "${this.escapePath(path)}" 2>/dev/null || echo "STAT_FAILED"` - ); - - if (result.stdout.includes('STAT_FAILED')) { - continue; // File doesn't exist - } - - const parts = result.stdout.trim().split('|'); - if (parts.length >= 7) { - infoMap.set(path, { - path, - size: Number.parseInt(parts[0], 10) || undefined, - modifiedAt: parts[1] ? new Date(Number.parseInt(parts[1], 10) * 1000) : undefined, - createdAt: parts[2] ? new Date(Number.parseInt(parts[2], 10) * 1000) : undefined, - mode: Number.parseInt(parts[3], 8) || undefined, - owner: parts[4] || undefined, - group: parts[5] || undefined, - isDirectory: parts[6].includes('directory'), - isFile: parts[6].includes('regular file'), - isSymlink: parts[6].includes('symbolic link') - }); - } - } catch (_error) { - // Skip files that can't be stat'd - } - } - - return infoMap; - } - - /** - * Set file permissions via chmod. - */ - async setPermissions( - entries: { path: string; mode?: number; owner?: string; group?: string }[] - ): Promise { - for (const entry of entries) { - if (entry.mode !== undefined) { - await this.executor.execute( - `chmod ${entry.mode.toString(8)} "${this.escapePath(entry.path)}"` - ); - } - - if (entry.owner || entry.group) { - const owner = entry.owner || ''; - const group = entry.group ? `:${entry.group}` : ''; - await this.executor.execute( - `chown ${owner}${group} "${this.escapePath(entry.path)}" 2>/dev/null || true` - ); - } - } - } - - // ==================== Search Operations ==================== - - /** - * Search for files via find command. - */ - async search(pattern: string, path: string = '.'): Promise { - // Escape pattern for shell but allow glob characters - const escapedPattern = pattern.replace(/'/g, "'\"'\"'"); - const escapedPath = this.escapePath(path); - - // Try find command first - let result = await this.executor.execute( - `find "${escapedPath}" -name '${escapedPattern}' -print 2>/dev/null || echo "FIND_FAILED"` - ); - - if (!result.stdout.includes('FIND_FAILED')) { - return result.stdout - .split('\n') - .filter((p) => p.trim()) - .map((p) => ({ path: p })); - } - - // Fallback to ls + grep if find not available - result = await this.executor.execute( - `ls -R "${escapedPath}" 2>/dev/null | grep -E "${escapedPattern}" || true` - ); - - return result.stdout - .split('\n') - .filter((p) => p.trim()) - .map((p) => ({ path: `${path}/${p}` })); - } - - /** - * Move/rename files via mv command. - */ - async moveFiles(entries: { source: string; destination: string }[]): Promise { - for (const { source, destination } of entries) { - const result = await this.executor.execute( - `mv "${this.escapePath(source)}" "${this.escapePath(destination)}"` - ); - if (result.exitCode !== 0) { - throw new FileOperationError( - `Failed to move file: ${result.stderr}`, - source, - 'TRANSFER_ERROR' - ); - } - } - } - - /** - * Replace content in files via sed. - */ - async replaceContent( - entries: { path: string; oldContent: string; newContent: string }[] - ): Promise { - for (const { path, oldContent, newContent } of entries) { - // Escape sed special characters - const escapedOld = oldContent - .replace(/\\/g, '\\\\\\\\') - .replace(/\//g, '\\/') - .replace(/&/g, '\\&'); - const escapedNew = newContent - .replace(/\\/g, '\\\\\\\\') - .replace(/\//g, '\\/') - .replace(/&/g, '\\&'); - - const result = await this.executor.execute( - `sed -i 's/${escapedOld}/${escapedNew}/g' "${this.escapePath(path)}"` - ); - - if (result.exitCode !== 0) { - throw new FileOperationError( - `Failed to replace content: ${result.stderr}`, - path, - 'INVALID_PATH' - ); - } - } - } - - // ==================== Health Check Polyfill ==================== - - /** - * Simple health check via echo command. - */ - async ping(): Promise { - try { - const result = await this.executor.execute('echo "PING"'); - return result.exitCode === 0 && result.stdout.includes('PING'); - } catch { - return false; - } - } - - /** - * Get metrics via /proc filesystem. - */ - async getMetrics(): Promise<{ - cpuCount: number; - cpuUsedPercentage: number; - memoryTotalMiB: number; - memoryUsedMiB: number; - timestamp: number; - }> { - const timestamp = Date.now(); - - // Get CPU count - const cpuResult = await this.executor.execute('nproc 2>/dev/null || echo "1"'); - const cpuCount = Number.parseInt(cpuResult.stdout.trim(), 10) || 1; - - // Get memory info from /proc/meminfo - const memResult = await this.executor.execute('cat /proc/meminfo 2>/dev/null || echo "FAILED"'); - - let memoryTotalMiB = 0; - let memoryUsedMiB = 0; - - if (!memResult.stdout.includes('FAILED')) { - const totalMatch = memResult.stdout.match(/MemTotal:\s+(\d+)\s+kB/); - const availableMatch = memResult.stdout.match(/MemAvailable:\s+(\d+)\s+kB/); - - if (totalMatch) { - memoryTotalMiB = Math.floor(Number.parseInt(totalMatch[1], 10) / 1024); - } - if (totalMatch && availableMatch) { - const total = Number.parseInt(totalMatch[1], 10); - const available = Number.parseInt(availableMatch[1], 10); - memoryUsedMiB = Math.floor((total - available) / 1024); - } - } - - // Estimate CPU usage (simplified) - const cpuUsedPercentage = 0; // Would require multiple samples - - return { - cpuCount, - cpuUsedPercentage, - memoryTotalMiB, - memoryUsedMiB, - timestamp - }; - } - - // ==================== Private Helpers ==================== - - /** - * Escape a path for safe shell usage. - */ - private escapePath(path: string): string { - // Replace " with \" for shell safety - return path.replace(/"/g, '\\"'); - } - - /** - * Parse ls -la output into DirectoryEntry objects. - */ - private parseLsOutput(output: string, basePath: string): DirectoryEntry[] { - const lines = output.split('\n'); - const entries: DirectoryEntry[] = []; - - for (const line of lines) { - // Skip total line and empty lines - if (line.startsWith('total') || !line.trim()) { - continue; - } - - // Parse ls -la format: - // drwxr-xr-x 2 user group 4096 2024-01-15T10:30:00 filename - const match = line.match( - /^([-dl])([-rwxsStT]{9})\s+\d+\s+\S+\s+\S+\s+(\d+)\s+(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})\s+(.+)$/ - ); - - if (match) { - const [, type, , size, dateStr, name] = match; - - // Skip . and .. - if (name === '.' || name === '..') { - continue; - } - - const isDirectory = type === 'd'; - const isSymlink = type === 'l'; - - entries.push({ - name, - path: `${basePath}/${name}`, - isDirectory: isDirectory || isSymlink, // Treat symlinks as directories for safety - isFile: type === '-', - size: Number.parseInt(size, 10) || undefined, - modifiedAt: new Date(dateStr) - }); - } - } - - return entries; - } - - /** - * Create a FileOperationError from stderr. - */ - private createFileError(path: string, stderr: string): FileOperationError { - const lowerStderr = stderr.toLowerCase(); - - if (lowerStderr.includes('no such file') || lowerStderr.includes('does not exist')) { - return new FileOperationError(stderr, path, 'FILE_NOT_FOUND'); - } - if (lowerStderr.includes('permission denied')) { - return new FileOperationError(stderr, path, 'PERMISSION_DENIED'); - } - if (lowerStderr.includes('is a directory')) { - return new FileOperationError(stderr, path, 'PATH_IS_DIRECTORY'); - } - if (lowerStderr.includes('not a directory')) { - return new FileOperationError(stderr, path, 'PATH_NOT_DIRECTORY'); - } - - return new FileOperationError(stderr, path, 'TRANSFER_ERROR'); - } -} diff --git a/sdk/sandbox/src/polyfill/index.ts b/sdk/sandbox/src/polyfill/index.ts deleted file mode 100644 index fdb115094a..0000000000 --- a/sdk/sandbox/src/polyfill/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { CapabilityDetector } from './CapabilityDetector'; -export { CommandPolyfillService } from './CommandPolyfillService'; diff --git a/sdk/sandbox/src/types/capabilities.ts b/sdk/sandbox/src/types/capabilities.ts deleted file mode 100644 index 96817867bd..0000000000 --- a/sdk/sandbox/src/types/capabilities.ts +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Provider capability flags. - * Used for feature detection and polyfill routing. - */ -export interface ProviderCapabilities { - /** Provider supports pausing and resuming sandboxes */ - supportsPauseResume: boolean; - - /** Provider supports extending sandbox expiration */ - supportsRenews: boolean; - - /** Provider supports real-time streaming command output */ - supportsStreamingOutput: boolean; - - /** Provider supports background/long-running execution */ - supportsBackgroundExecution: boolean; - - /** Provider has native filesystem API (not just command-based) */ - nativeFileSystem: boolean; - - /** Provider supports batch file operations */ - supportsBatchOperations: boolean; - - /** Provider supports streaming file transfers */ - supportsStreamingTransfer: boolean; - - /** Provider supports file permission operations */ - supportsPermissions: boolean; - - /** Provider supports file search functionality */ - supportsSearch: boolean; - - /** Provider has native health check endpoint */ - nativeHealthCheck: boolean; - - /** Provider has native metrics endpoint */ - nativeMetrics: boolean; -} - -/** - * Helper to create full capability set (for fully-featured providers). - */ -export function createFullCapabilities(): ProviderCapabilities { - return { - supportsPauseResume: true, - supportsRenews: true, - supportsStreamingOutput: true, - supportsBackgroundExecution: true, - nativeFileSystem: true, - supportsBatchOperations: true, - supportsStreamingTransfer: true, - supportsPermissions: true, - supportsSearch: true, - nativeHealthCheck: true, - nativeMetrics: true - }; -} - -/** - * Helper to create minimal capability set (command-only providers). - */ -export function createMinimalCapabilities(): ProviderCapabilities { - return { - supportsPauseResume: false, - supportsRenews: false, - supportsStreamingOutput: false, - supportsBackgroundExecution: false, - nativeFileSystem: false, - supportsBatchOperations: false, - supportsStreamingTransfer: false, - supportsPermissions: false, - supportsSearch: false, - nativeHealthCheck: false, - nativeMetrics: false - }; -} diff --git a/sdk/sandbox/src/types/execution.ts b/sdk/sandbox/src/types/execution.ts deleted file mode 100644 index d8b7c6e783..0000000000 --- a/sdk/sandbox/src/types/execution.ts +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Options for executing commands. - */ -export interface ExecuteOptions { - /** Working directory for execution */ - workingDirectory?: string; - - /** Run in background (don't wait for completion) */ - background?: boolean; - - /** Timeout in milliseconds */ - timeoutMs?: number; - - /** Environment variables to set */ - env?: Record; - - /** Abort signal for cancellation */ - signal?: AbortSignal; -} - -/** - * Result of command execution. - */ -export interface ExecuteResult { - /** Standard output */ - stdout: string; - - /** Standard error */ - stderr: string; - - /** Exit code (null if not completed) */ - exitCode: number | null; - - /** Whether output was truncated */ - truncated?: boolean; - - /** Execution duration in milliseconds */ - durationMs?: number; -} - -/** - * Output message from streaming execution. - */ -export interface OutputMessage { - /** Message content */ - text: string; - - /** Timestamp (Unix milliseconds) */ - timestamp?: number; -} - -/** - * Handlers for streaming command output. - */ -export interface StreamHandlers { - /** Called for each stdout message */ - onStdout?: (msg: OutputMessage) => void | Promise; - - /** Called for each stderr message */ - onStderr?: (msg: OutputMessage) => void | Promise; - - /** Called when execution completes */ - onComplete?: (result: ExecuteResult) => void | Promise; - - /** Called on error */ - onError?: (error: Error) => void | Promise; -} - -/** - * Background execution handle. - */ -export interface BackgroundExecution { - /** Session ID for the background execution */ - sessionId: string; - - /** Kill the background execution */ - kill(): Promise; -} diff --git a/sdk/sandbox/src/types/filesystem.ts b/sdk/sandbox/src/types/filesystem.ts deleted file mode 100644 index 3887060250..0000000000 --- a/sdk/sandbox/src/types/filesystem.ts +++ /dev/null @@ -1,121 +0,0 @@ -/** - * File information/metadata. - */ -export interface FileInfo { - path: string; - size?: number; - modifiedAt?: Date; - createdAt?: Date; - mode?: number; - owner?: string; - group?: string; - isDirectory?: boolean; - isFile?: boolean; - isSymlink?: boolean; -} - -/** - * Directory entry. - */ -export interface DirectoryEntry { - name: string; - path: string; - isDirectory: boolean; - isFile: boolean; - size?: number; - modifiedAt?: Date; -} - -/** - * Entry for writing a file. - */ -export interface FileWriteEntry { - /** File path */ - path: string; - - /** File content (various types supported) */ - data: string | Uint8Array | ArrayBuffer | Blob | ReadableStream; - - /** File permissions (octal) */ - mode?: number; - - /** Owner */ - owner?: string; - - /** Group */ - group?: string; -} - -/** - * Entry for permission changes. - */ -export interface PermissionEntry { - path: string; - mode?: number; - owner?: string; - group?: string; -} - -/** - * Result of reading a file. - */ -export interface FileReadResult { - path: string; - content: Uint8Array; - error: Error | null; -} - -/** - * Result of writing a file. - */ -export interface FileWriteResult { - path: string; - bytesWritten: number; - error: Error | null; -} - -/** - * Result of deleting a file. - */ -export interface FileDeleteResult { - path: string; - success: boolean; - error: Error | null; -} - -/** - * Search result. - */ -export interface SearchResult { - path: string; - isDirectory?: boolean; - isFile?: boolean; -} - -/** - * Move/rename entry. - */ -export interface MoveEntry { - source: string; - destination: string; -} - -/** - * Content replacement entry. - */ -export interface ContentReplaceEntry { - path: string; - oldContent: string; - newContent: string; -} - -/** - * File read options. - */ -export interface ReadFileOptions { - /** Character encoding (default: binary/Uint8Array) */ - encoding?: 'utf-8' | 'base64' | 'binary'; - - /** Byte range to read (format: "start-end" or "start-") */ - range?: string; -} diff --git a/sdk/sandbox/src/types/index.ts b/sdk/sandbox/src/types/index.ts deleted file mode 100644 index 16eb347064..0000000000 --- a/sdk/sandbox/src/types/index.ts +++ /dev/null @@ -1,38 +0,0 @@ -// Re-export from capabilities -export type { ProviderCapabilities } from './capabilities'; -export { createFullCapabilities, createMinimalCapabilities } from './capabilities'; -// Re-export from execution -export type { - BackgroundExecution, - ExecuteOptions, - ExecuteResult, - OutputMessage, - StreamHandlers -} from './execution'; -// Re-export from filesystem -export type { - ContentReplaceEntry, - DirectoryEntry, - FileDeleteResult, - FileInfo, - FileReadResult, - FileWriteEntry, - FileWriteResult, - MoveEntry, - PermissionEntry, - ReadFileOptions, - SearchResult -} from './filesystem'; -// Re-export from sandbox -export type { - Endpoint, - ImageSpec, - NetworkPolicy, - ResourceLimits, - SandboxConfig, - SandboxId, - SandboxInfo, - SandboxMetrics, - SandboxState, - SandboxStatus -} from './sandbox'; diff --git a/sdk/sandbox/src/types/sandbox.ts b/sdk/sandbox/src/types/sandbox.ts deleted file mode 100644 index d520cb781d..0000000000 --- a/sdk/sandbox/src/types/sandbox.ts +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Unique identifier for a sandbox. - */ -export type SandboxId = string; - -/** - * Sandbox status states. - */ -export type SandboxState = - | 'Creating' - | 'Running' - | 'Pausing' - | 'Paused' - | 'Resuming' - | 'Deleting' - | 'Deleted' - | 'Error' - | string; // Extensible for provider-specific states - -/** - * Sandbox status information. - */ -export interface SandboxStatus { - state: SandboxState; - reason?: string; - message?: string; -} - -/** - * Resource limits for a sandbox. - */ -export interface ResourceLimits { - cpuCount?: number; - memoryMiB?: number; - diskGiB?: number; -} - -/** - * Image specification for sandbox creation. - */ -export interface ImageSpec { - repository: string; - tag?: string; - digest?: string; -} - -/** - * Network policy for sandbox. - */ -export interface NetworkPolicy { - allowEgress?: boolean; - allowedHosts?: string[]; -} - -/** - * Configuration for creating a sandbox. - */ -export interface SandboxConfig { - /** Container image specification */ - image: ImageSpec; - - /** Entrypoint command */ - entrypoint?: string[]; - - /** Timeout in seconds (0 for no timeout) */ - timeout?: number; - - /** Resource limits */ - resourceLimits?: ResourceLimits; - - /** Environment variables */ - env?: Record; - - /** Metadata for the sandbox */ - metadata?: Record; - - /** Network access policy */ - networkPolicy?: NetworkPolicy; - - /** Provider-specific extensions */ - extensions?: Record; -} - -/** - * Information about a sandbox. - */ -export interface SandboxInfo { - id: SandboxId; - image: ImageSpec; - entrypoint: string[]; - metadata?: Record; - status: SandboxStatus; - createdAt: Date; - expiresAt?: Date; - resourceLimits?: ResourceLimits; -} - -/** - * Sandbox metrics. - */ -export interface SandboxMetrics { - cpuCount: number; - cpuUsedPercentage: number; - memoryTotalMiB: number; - memoryUsedMiB: number; - timestamp: number; -} - -/** - * Endpoint information for accessing sandbox services. - */ -export interface Endpoint { - host: string; - port: number; - protocol: 'http' | 'https'; - url: string; -} diff --git a/sdk/sandbox/src/utils/base64.ts b/sdk/sandbox/src/utils/base64.ts deleted file mode 100644 index 8044b57e3b..0000000000 --- a/sdk/sandbox/src/utils/base64.ts +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Base64 encoding/decoding utilities. - * Works in both Node.js and browser environments. - */ - -/** - * Encode a Uint8Array to base64 string. - */ -export function bytesToBase64(bytes: Uint8Array): string { - // Use built-in btoa for browser compatibility - const binary = Array.from(bytes) - .map((b) => String.fromCharCode(b)) - .join(''); - return btoa(binary); -} - -/** - * Decode a base64 string to Uint8Array. - */ -export function base64ToBytes(base64: string): Uint8Array { - const binary = atob(base64.trim()); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - return bytes; -} - -/** - * Encode a string to base64. - */ -export function stringToBase64(str: string): string { - return btoa(str); -} - -/** - * Decode a base64 string to utf-8 string. - */ -export function base64ToString(base64: string): string { - return atob(base64.trim()); -} diff --git a/sdk/sandbox/src/utils/index.ts b/sdk/sandbox/src/utils/index.ts deleted file mode 100644 index 571d9ce650..0000000000 --- a/sdk/sandbox/src/utils/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -export { base64ToBytes, base64ToString, bytesToBase64, stringToBase64 } from './base64'; - -export { - asyncIterableToBuffer, - bufferToReadableStream, - readableStreamToAsyncIterable, - streamToString, - stringToReadableStream -} from './streams'; diff --git a/sdk/sandbox/src/utils/streams.ts b/sdk/sandbox/src/utils/streams.ts deleted file mode 100644 index eb146302b4..0000000000 --- a/sdk/sandbox/src/utils/streams.ts +++ /dev/null @@ -1,94 +0,0 @@ -/** - * Stream utilities for working with ReadableStream and AsyncIterable. - */ - -/** - * Convert an AsyncIterable to a Uint8Array. - * Collects all chunks into a single buffer. - */ -export async function asyncIterableToBuffer( - iterable: AsyncIterable -): Promise { - const chunks: Uint8Array[] = []; - let totalLength = 0; - - for await (const chunk of iterable) { - chunks.push(chunk); - totalLength += chunk.length; - } - - const result = new Uint8Array(totalLength); - let offset = 0; - for (const chunk of chunks) { - result.set(chunk, offset); - offset += chunk.length; - } - - return result; -} - -/** - * Convert a Uint8Array to a ReadableStream. - */ -export function bufferToReadableStream(buffer: Uint8Array): ReadableStream { - return new ReadableStream({ - start(controller) { - controller.enqueue(buffer); - controller.close(); - } - }); -} - -/** - * Convert a string to a ReadableStream. - */ -export function stringToReadableStream(str: string): ReadableStream { - const encoder = new TextEncoder(); - return bufferToReadableStream(encoder.encode(str)); -} - -/** - * Convert a ReadableStream to an AsyncIterable. - * (Native ReadableStream is already async iterable in modern environments, - * but this ensures compatibility.) - */ -export function readableStreamToAsyncIterable( - stream: ReadableStream -): AsyncIterable { - // If stream already has Symbol.asyncIterator, use it - if (stream[Symbol.asyncIterator]) { - return stream as AsyncIterable; - } - // Otherwise create an async iterable - return { - [Symbol.asyncIterator]: async function* () { - const reader = stream.getReader(); - try { - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; - } - if (value) { - yield value; - } - } - } finally { - reader.releaseLock(); - } - } - }; -} - -/** - * Read a stream and convert to string. - */ -export async function streamToString( - stream: ReadableStream | AsyncIterable -): Promise { - const iterable = - stream instanceof ReadableStream ? readableStreamToAsyncIterable(stream) : stream; - - const buffer = await asyncIterableToBuffer(iterable); - return new TextDecoder().decode(buffer); -} diff --git a/sdk/sandbox/tests/integration/command-execution.test.ts b/sdk/sandbox/tests/integration/command-execution.test.ts deleted file mode 100644 index 7d2a96cec0..0000000000 --- a/sdk/sandbox/tests/integration/command-execution.test.ts +++ /dev/null @@ -1,504 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it } from 'vitest'; -import type { MinimalProviderConnection } from '../../src/adapters/MinimalProviderAdapter'; -import { MinimalProviderAdapter } from '../../src/adapters/MinimalProviderAdapter'; -import type { ExecuteOptions, StreamHandlers } from '../../src/types'; -import { MockSandboxAdapter } from '../mocks/MockSandboxAdapter'; - -/** - * Integration tests for command execution operations. - * - * These tests cover the ICommandExecution interface implementations: - * - Standard command execution (execute) - * - Streaming command execution (executeStream) - * - Background command execution (executeBackground) - * - Command interruption (interrupt) - * - Execution options (working directory, timeout, etc.) - * - * Tests are run against both native and polyfilled implementations. - */ -describe('Command Execution', () => { - describe('Native Command Execution (MockSandboxAdapter)', () => { - let adapter: MockSandboxAdapter; - - beforeEach(() => { - adapter = new MockSandboxAdapter(); - }); - - afterEach(async () => { - await adapter.close(); - }); - - describe('Standard Execution', () => { - it('should execute simple command', async () => { - const result = await adapter.execute('echo hello'); - - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Executed:'); - expect(result.stderr).toBe(''); - }); - - it('should execute command with options', async () => { - const options: ExecuteOptions = { - workingDirectory: '/app', - timeout: 5000 - }; - - const result = await adapter.execute('pwd', options); - expect(result.exitCode).toBe(0); - }); - - it('should handle command with special characters', async () => { - const result = await adapter.execute('echo "Hello, World!"'); - expect(result.exitCode).toBe(0); - }); - - it('should return truncated flag for large output', async () => { - // Mock adapter doesn't truncate, but we verify the field exists - const result = await adapter.execute('cat large-file'); - expect(typeof result.truncated).toBe('boolean'); - }); - - it('should handle multiple commands in sequence', async () => { - const result1 = await adapter.execute('echo first'); - expect(result1.exitCode).toBe(0); - - const result2 = await adapter.execute('echo second'); - expect(result2.exitCode).toBe(0); - - const result3 = await adapter.execute('echo third'); - expect(result3.exitCode).toBe(0); - }); - - it('should handle commands with exit code 0', async () => { - const result = await adapter.execute('true'); - expect(result.exitCode).toBe(0); - }); - }); - - describe('Streaming Execution', () => { - it('should stream stdout', async () => { - const stdoutChunks: string[] = []; - - const handlers: StreamHandlers = { - onStdout: async (msg) => { - stdoutChunks.push(msg.text); - } - }; - - await adapter.executeStream('echo streamed', handlers); - - expect(stdoutChunks.length).toBeGreaterThan(0); - expect(stdoutChunks[0]).toContain('Streamed:'); - }); - - it('should stream stderr', async () => { - const stderrChunks: string[] = []; - - const handlers: StreamHandlers = { - onStderr: async (msg) => { - stderrChunks.push(msg.text); - } - }; - - await adapter.executeStream('echo error >&2', handlers); - - // May or may not have stderr depending on mock implementation - expect(stderrChunks).toBeDefined(); - }); - - it('should call onComplete with result', async () => { - let completedResult: { exitCode?: number } | undefined; - - const handlers: StreamHandlers = { - onComplete: async (result) => { - completedResult = result; - } - }; - - await adapter.executeStream('echo complete', handlers); - - expect(completedResult).toBeDefined(); - expect(completedResult?.exitCode).toBe(0); - }); - - it('should handle streaming with options', async () => { - const stdoutChunks: string[] = []; - - const handlers: StreamHandlers = { - onStdout: async (msg) => { - stdoutChunks.push(msg.text); - } - }; - - const options: ExecuteOptions = { - workingDirectory: '/tmp' - }; - - await adapter.executeStream('pwd', handlers, options); - expect(stdoutChunks.length).toBeGreaterThan(0); - }); - - it('should handle streaming errors', async () => { - const errors: { message: string }[] = []; - - const handlers: StreamHandlers = { - onError: async (err) => { - errors.push(err); - } - }; - - // Even if no error occurs, handlers should be valid - await adapter.executeStream('echo test', handlers); - expect(errors).toBeDefined(); - }); - }); - - describe('Background Execution', () => { - it('should execute command in background', async () => { - const handle = await adapter.executeBackground('sleep 10'); - - expect(handle.sessionId).toBeDefined(); - expect(typeof handle.sessionId).toBe('string'); - expect(typeof handle.kill).toBe('function'); - }); - - it('should kill background execution', async () => { - const handle = await adapter.executeBackground('long-running-process'); - - // Should not throw - await handle.kill(); - }); - - it('should execute background with options', async () => { - const options: ExecuteOptions = { - workingDirectory: '/app' - }; - - const handle = await adapter.executeBackground('node server.js', options); - expect(handle.sessionId).toBeDefined(); - }); - - it('should handle multiple background executions', async () => { - const handle1 = await adapter.executeBackground('process1'); - const handle2 = await adapter.executeBackground('process2'); - - expect(handle1.sessionId).not.toBe(handle2.sessionId); - - await handle1.kill(); - await handle2.kill(); - }); - }); - - describe('Command Interruption', () => { - it('should interrupt running command', async () => { - const handle = await adapter.executeBackground('sleep 100'); - - // Should not throw - await adapter.interrupt(handle.sessionId); - }); - - it('should handle interrupt for non-existent session', async () => { - // Should handle gracefully (may throw or not depending on implementation) - try { - await adapter.interrupt('non-existent-session'); - } catch { - // Expected in some implementations - } - }); - }); - - describe('Complex Command Scenarios', () => { - it('should handle command chaining', async () => { - const result = await adapter.execute('echo "first" && echo "second"'); - expect(result.exitCode).toBe(0); - }); - - it('should handle pipes', async () => { - const result = await adapter.execute('echo "hello" | tr a-z A-Z'); - expect(result.exitCode).toBe(0); - }); - - it('should handle environment variables', async () => { - const result = await adapter.execute('FOO=bar echo $FOO'); - expect(result.exitCode).toBe(0); - }); - - it('should handle multiline commands', async () => { - const result = await adapter.execute('echo line1\necho line2'); - expect(result.exitCode).toBe(0); - }); - }); - }); - - describe('Polyfilled Command Execution (MinimalProviderAdapter)', () => { - function createMockConnection(): MinimalProviderConnection { - const runningCommands = new Set(); - let commandId = 0; - - return { - id: 'cmd-test-sandbox', - - async execute(command: string) { - // Ping - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - - // Background command simulation - if (command.includes('background-process')) { - const id = `cmd-${++commandId}`; - runningCommands.add(id); - return { stdout: '', stderr: '', exitCode: 0, backgroundId: id }; - } - - // Simulate command execution - if (command.includes('echo')) { - const match = command.match(/echo "(.+?)"/); - const text = match ? match[1] : ''; - return { stdout: text, stderr: '', exitCode: 0 }; - } - - if (command.includes('error')) { - return { stdout: '', stderr: 'error occurred', exitCode: 1 }; - } - - return { stdout: `Executed: ${command}`, stderr: '', exitCode: 0 }; - }, - - async getStatus() { - return { state: 'Running' as const }; - }, - - async close() { - runningCommands.clear(); - } - }; - } - - it('should execute command via polyfill', async () => { - const connection = createMockConnection(); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const result = await adapter.execute('echo "test"'); - expect(result.exitCode).toBe(0); - expect(result.stdout).toBe('test'); - } finally { - await adapter.close(); - } - }); - - it('should handle working directory option', async () => { - const connection = createMockConnection(); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const result = await adapter.execute('pwd', { workingDirectory: '/app' }); - expect(result.exitCode).toBe(0); - } finally { - await adapter.close(); - } - }); - - it('should handle command with error exit code', async () => { - const connection = createMockConnection(); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const result = await adapter.execute('error'); - expect(result.exitCode).toBe(1); - expect(result.stderr).toContain('error'); - } finally { - await adapter.close(); - } - }); - - it('should stream output via polyfill', async () => { - const connection = createMockConnection(); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const outputs: string[] = []; - - await adapter.executeStream('echo "streamed output"', { - onStdout: async (msg) => { - outputs.push(msg.text); - } - }); - - // Polyfill should collect and emit output - expect(outputs.length).toBeGreaterThan(0); - } finally { - await adapter.close(); - } - }); - - it('should throw FeatureNotSupportedError for background execution', async () => { - const connection = createMockConnection(); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - // MinimalProvider does not support background execution - await expect(adapter.executeBackground('background-process')).rejects.toThrow(); - } finally { - await adapter.close(); - } - }); - }); - - describe('Cross-Provider Execution Parity', () => { - it('should produce similar results for same command', async () => { - const nativeAdapter = new MockSandboxAdapter(); - - const polyConnection: MinimalProviderConnection = { - id: 'parity-cmd-test', - async execute(command: string) { - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - return { stdout: 'parity-output', stderr: '', exitCode: 0 }; - }, - async getStatus() { - return { state: 'Running' as const }; - }, - async close() {} - }; - - const polyAdapter = new MinimalProviderAdapter(); - await polyAdapter.connect(polyConnection); - - // Both should return similar structure - const nativeResult = await nativeAdapter.execute('test-cmd'); - const polyResult = await polyAdapter.execute('test-cmd'); - - expect(typeof nativeResult.exitCode).toBe('number'); - expect(typeof polyResult.exitCode).toBe('number'); - expect(typeof nativeResult.stdout).toBe('string'); - expect(typeof polyResult.stdout).toBe('string'); - expect(typeof nativeResult.stderr).toBe('string'); - expect(typeof polyResult.stderr).toBe('string'); - - await nativeAdapter.close(); - await polyAdapter.close(); - }); - - it('should handle executeBackground with native adapter', async () => { - const nativeAdapter = new MockSandboxAdapter(); - - const nativeBg = await nativeAdapter.executeBackground('sleep 10'); - - // Should have sessionId and kill function - expect(typeof nativeBg.sessionId).toBe('string'); - expect(typeof nativeBg.kill).toBe('function'); - - await nativeBg.kill(); - await nativeAdapter.close(); - }); - - it('should throw error for background execution with minimal provider', async () => { - const polyConnection: MinimalProviderConnection = { - id: 'bg-parity-test', - async execute(command: string) { - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - return { stdout: '', stderr: '', exitCode: 0 }; - }, - async getStatus() { - return { state: 'Running' as const }; - }, - async close() {} - }; - - const polyAdapter = new MinimalProviderAdapter(); - await polyAdapter.connect(polyConnection); - - // Minimal provider does not support background execution - await expect(polyAdapter.executeBackground('sleep 10')).rejects.toThrow(); - - await polyAdapter.close(); - }); - }); - - describe('Execution Edge Cases', () => { - it('should handle empty command', async () => { - const adapter = new MockSandboxAdapter(); - const result = await adapter.execute(''); - expect(result.exitCode).toBe(0); - await adapter.close(); - }); - - it('should handle very long command', async () => { - const adapter = new MockSandboxAdapter(); - const longCommand = `echo ${'a'.repeat(1000)}`; - const result = await adapter.execute(longCommand); - expect(result.exitCode).toBe(0); - await adapter.close(); - }); - - it('should handle command with special shell characters', async () => { - const adapter = new MockSandboxAdapter(); - const specialChars = [ - 'echo "hello; world"', - 'echo "hello && world"', - 'echo "hello || world"', - 'echo "hello | world"', - 'echo "$HOME"', - 'echo "`date`"' - ]; - - for (const cmd of specialChars) { - const result = await adapter.execute(cmd); - expect(result.exitCode).toBe(0); - } - - await adapter.close(); - }); - - it('should handle unicode in commands', async () => { - const adapter = new MockSandboxAdapter(); - const result = await adapter.execute('echo "Hello 世界 🌍"'); - expect(result.exitCode).toBe(0); - await adapter.close(); - }); - }); - - describe('OpenSandbox Lifecycle Integration', () => { - it('should execute commands after create', async () => { - const adapter = new MockSandboxAdapter(); - - await adapter.create({ - image: { repository: 'alpine', tag: 'latest' } - }); - - const result = await adapter.execute('whoami'); - expect(result.exitCode).toBe(0); - - await adapter.close(); - }); - - it('should handle execution after pause/resume', async () => { - const adapter = new MockSandboxAdapter(); - - await adapter.create({ image: { repository: 'alpine' } }); - - await adapter.pause(); - await adapter.resume(); - - const result = await adapter.execute('echo resumed'); - expect(result.exitCode).toBe(0); - - await adapter.close(); - }); - }); -}); diff --git a/sdk/sandbox/tests/integration/filesystem-operations.test.ts b/sdk/sandbox/tests/integration/filesystem-operations.test.ts deleted file mode 100644 index d773dcf517..0000000000 --- a/sdk/sandbox/tests/integration/filesystem-operations.test.ts +++ /dev/null @@ -1,587 +0,0 @@ -import { afterEach, beforeEach, describe, expect, it } from 'vitest'; -import type { MinimalProviderConnection } from '../../src/adapters/MinimalProviderAdapter'; -import { MinimalProviderAdapter } from '../../src/adapters/MinimalProviderAdapter'; -import { MockSandboxAdapter } from '../mocks/MockSandboxAdapter'; - -/** - * Integration tests for filesystem operations. - * - * These tests cover the IFileSystem interface implementations: - * - File operations (read, write, delete, move, replace) - * - Streaming operations (read/write streams) - * - Directory operations (create, delete, list) - * - Metadata operations (get info, set permissions) - * - Search operations - * - * Tests are run against both native (MockSandboxAdapter) and polyfilled - * (MinimalProviderAdapter) implementations to ensure feature parity. - */ -describe('Filesystem Operations', () => { - describe('Native Filesystem (MockSandboxAdapter)', () => { - let adapter: MockSandboxAdapter; - - beforeEach(() => { - adapter = new MockSandboxAdapter(); - }); - - afterEach(async () => { - await adapter.close(); - }); - - describe('File Operations', () => { - it('should write and read single file', async () => { - const content = 'Hello, World!'; - const writeResult = await adapter.writeFiles([{ path: '/test.txt', data: content }]); - - expect(writeResult[0].error).toBeNull(); - expect(writeResult[0].bytesWritten).toBe(content.length); - - const readResult = await adapter.readFiles(['/test.txt']); - expect(readResult[0].error).toBeNull(); - expect(new TextDecoder().decode(readResult[0].content)).toBe(content); - }); - - it('should write and read multiple files', async () => { - const files = [ - { path: '/file1.txt', data: 'Content 1' }, - { path: '/file2.txt', data: 'Content 2' }, - { path: '/file3.txt', data: 'Content 3' } - ]; - - const writeResults = await adapter.writeFiles(files); - expect(writeResults.every((r) => r.error === null)).toBe(true); - - const readResults = await adapter.readFiles(files.map((f) => f.path)); - expect(readResults.every((r) => r.error === null)).toBe(true); - expect(readResults.map((r) => new TextDecoder().decode(r.content))).toEqual( - files.map((f) => f.data) - ); - }); - - it('should handle Uint8Array data', async () => { - const data = new Uint8Array([0x48, 0x65, 0x6c, 0x6c, 0x6f]); // "Hello" - - const writeResult = await adapter.writeFiles([{ path: '/binary.bin', data }]); - expect(writeResult[0].bytesWritten).toBe(5); - - const readResult = await adapter.readFiles(['/binary.bin']); - expect(readResult[0].content).toEqual(data); - }); - - it('should handle ArrayBuffer data', async () => { - const buffer = new ArrayBuffer(4); - new Uint8Array(buffer).set([1, 2, 3, 4]); - - const writeResult = await adapter.writeFiles([{ path: '/buffer.bin', data: buffer }]); - expect(writeResult[0].bytesWritten).toBe(4); - - const readResult = await adapter.readFiles(['/buffer.bin']); - expect(readResult[0].content).toEqual(new Uint8Array([1, 2, 3, 4])); - }); - - it('should delete files', async () => { - await adapter.writeFiles([ - { path: '/delete1.txt', data: 'content1' }, - { path: '/delete2.txt', data: 'content2' } - ]); - - const deleteResults = await adapter.deleteFiles(['/delete1.txt', '/delete2.txt']); - expect(deleteResults.every((r) => r.success)).toBe(true); - - // Verify deletion - const readResults = await adapter.readFiles(['/delete1.txt']); - expect(readResults[0].error).toBeDefined(); - }); - - it('should move files', async () => { - await adapter.writeFiles([{ path: '/source.txt', data: 'movable content' }]); - - await adapter.moveFiles([{ source: '/source.txt', destination: '/dest.txt' }]); - - // Old path should not exist - const oldRead = await adapter.readFiles(['/source.txt']); - expect(oldRead[0].error).toBeDefined(); - - // New path should exist - const newRead = await adapter.readFiles(['/dest.txt']); - expect(newRead[0].error).toBeNull(); - expect(new TextDecoder().decode(newRead[0].content)).toBe('movable content'); - }); - - it('should replace content in files', async () => { - await adapter.writeFiles([{ path: '/replace.txt', data: 'Hello, World! Hello!' }]); - - await adapter.replaceContent([ - { path: '/replace.txt', oldContent: 'Hello', newContent: 'Hi' } - ]); - - const readResult = await adapter.readFiles(['/replace.txt']); - const content = new TextDecoder().decode(readResult[0].content); - expect(content).toBe('Hi, World! Hi!'); - }); - - it('should handle read errors for non-existent files', async () => { - const readResults = await adapter.readFiles(['/non-existent.txt']); - - expect(readResults[0].error).toBeDefined(); - expect(readResults[0].content.length).toBe(0); - }); - - it('should handle file paths with special characters', async () => { - const paths = [ - '/path with spaces/file.txt', - '/path-with-dashes/file.txt', - '/path_with_underscores/file.txt', - '/nested/deeply/file.txt' - ]; - - for (const path of paths) { - await adapter.writeFiles([{ path, data: `content for ${path}` }]); - const readResult = await adapter.readFiles([path]); - expect(readResult[0].error).toBeNull(); - } - }); - }); - - describe('Directory Operations', () => { - it('should create directories', async () => { - await adapter.createDirectories(['/dir1', '/dir2', '/nested/dir3']); - - // Should be able to write to nested directory - await adapter.writeFiles([{ path: '/nested/dir3/file.txt', data: 'nested content' }]); - const readResult = await adapter.readFiles(['/nested/dir3/file.txt']); - expect(readResult[0].error).toBeNull(); - }); - - it('should list directory contents', async () => { - await adapter.writeFiles([ - { path: '/listdir/file1.txt', data: '1' }, - { path: '/listdir/file2.txt', data: '2' }, - { path: '/listdir/subdir/file3.txt', data: '3' } - ]); - - const entries = await adapter.listDirectory('/listdir'); - expect(entries.length).toBeGreaterThan(0); - - const names = entries.map((e) => e.name); - expect(names).toContain('file1.txt'); - expect(names).toContain('file2.txt'); - }); - - it('should delete directories', async () => { - await adapter.writeFiles([{ path: '/deldir/file.txt', data: 'to be deleted' }]); - - await adapter.deleteDirectories(['/deldir'], { recursive: true }); - - // Directory contents should be gone - const readResult = await adapter.readFiles(['/deldir/file.txt']); - expect(readResult[0].error).toBeDefined(); - }); - - it('should delete directories recursively', async () => { - await adapter.writeFiles([ - { path: '/recursive/nested/deep/file.txt', data: 'deep content' } - ]); - - await adapter.deleteDirectories(['/recursive'], { recursive: true }); - - const readResult = await adapter.readFiles(['/recursive/nested/deep/file.txt']); - expect(readResult[0].error).toBeDefined(); - }); - }); - - describe('File Metadata Operations', () => { - it('should get file info', async () => { - const content = 'test content for info'; - await adapter.writeFiles([{ path: '/info.txt', data: content }]); - - const info = await adapter.getFileInfo(['/info.txt']); - const fileInfo = info.get('/info.txt'); - - expect(fileInfo).toBeDefined(); - expect(fileInfo?.isFile).toBe(true); - expect(fileInfo?.size).toBe(content.length); - }); - - it('should set file permissions', async () => { - await adapter.writeFiles([{ path: '/perms.txt', data: 'content' }]); - - // Should not throw - await adapter.setPermissions([{ path: '/perms.txt', mode: 0o755 }]); - }); - }); - - describe('Streaming Operations', () => { - it('should read file as stream', async () => { - const content = 'streaming content here'; - await adapter.writeFiles([{ path: '/stream.txt', data: content }]); - - const stream = await adapter.readFileStream('/stream.txt'); - const chunks: Uint8Array[] = []; - - for await (const chunk of stream) { - chunks.push(chunk); - } - - const combined = new Uint8Array(chunks.reduce((sum, c) => sum + c.length, 0)); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.length; - } - - expect(new TextDecoder().decode(combined)).toBe(content); - }); - - it('should write file from stream', async () => { - const content = new Uint8Array([0x48, 0x69, 0x21]); // "Hi!" - const stream = new ReadableStream({ - start(controller) { - controller.enqueue(content); - controller.close(); - } - }); - - await adapter.writeFileStream('/from-stream.bin', stream); - - const readResult = await adapter.readFiles(['/from-stream.bin']); - expect(readResult[0].content).toEqual(content); - }); - }); - - describe('Search Operations', () => { - it('should search for files by pattern', async () => { - await adapter.writeFiles([ - { path: '/search/test.txt', data: '1' }, - { path: '/search/test.js', data: '2' }, - { path: '/search/other.txt', data: '3' } - ]); - - const results = await adapter.search('*.txt', '/search'); - expect(results.length).toBeGreaterThan(0); - - const txtFiles = results.filter((r) => r.path.endsWith('.txt')); - expect(txtFiles.length).toBeGreaterThan(0); - }); - - it('should search recursively', async () => { - await adapter.writeFiles([ - { path: '/deep/file1.txt', data: '1' }, - { path: '/deep/nested/file2.txt', data: '2' } - ]); - - const results = await adapter.search('*.txt'); - expect(results.length).toBeGreaterThanOrEqual(2); - }); - }); - }); - - describe('Polyfilled Filesystem (MinimalProviderAdapter)', () => { - function createMockConnection(mockFs: Map): MinimalProviderConnection { - return { - id: 'fs-test-sandbox', - - async execute(command: string) { - // Ping - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - - // Mkdir - if (command.startsWith('mkdir -p')) { - return { stdout: '', stderr: '', exitCode: 0 }; - } - - // Heredoc write - if (command.includes("<< 'POLYFILL_EOF'")) { - const pathMatch = command.match(/cat > "(.+?)" << 'POLYFILL_EOF'/); - if (pathMatch) { - const path = pathMatch[1]; - const lines = command.split('\n'); - const contentLines: string[] = []; - let inContent = false; - for (const line of lines) { - if (line.includes("<< 'POLYFILL_EOF'")) { - inContent = true; - continue; - } - if (line.trim() === 'POLYFILL_EOF') { - break; - } - if (inContent) { - contentLines.push(line); - } - } - mockFs.set(path, contentLines.join('\n')); - } - return { stdout: '', stderr: '', exitCode: 0 }; - } - - // Base64 write - if (command.includes('base64 -d')) { - const pathMatch = command.match(/> "(.+?)"$/); - const dataMatch = command.match(/echo "(.+?)" \| base64 -d/); - if (pathMatch && dataMatch) { - const path = pathMatch[1]?.replace(/\\"/g, '"'); - const base64Data = dataMatch[1] || ''; - try { - const decoded = atob(base64Data); - mockFs.set(path, decoded); - } catch { - // Invalid base64 - } - } - return { stdout: '', stderr: '', exitCode: 0 }; - } - - // Base64 read - if (command.includes('base64 -w 0')) { - const match = command.match(/cat "(.+?)" \| base64 -w 0/); - const path = match?.[1]?.replace(/\\"/g, '"'); - if (path && mockFs.has(path)) { - const content = mockFs.get(path) || ''; - const binary = Array.from(content) - .map((b) => String.fromCharCode(b.charCodeAt(0))) - .join(''); - return { stdout: btoa(binary), stderr: '', exitCode: 0 }; - } - return { stdout: '', stderr: 'cat: No such file', exitCode: 1 }; - } - - // List directory - if (command.includes('ls -la')) { - const match = command.match(/ls -la "?([^"]+)"?/); - const path = match?.[1] || '.'; - - const entries = Array.from(mockFs.keys()) - .filter((f) => f.startsWith(path)) - .map((f) => f.slice(path.length + 1).split('/')[0]) - .filter((f) => f); - - const uniqueEntries = [...new Set(entries)]; - const output = uniqueEntries - .map((e) => `-rw-r--r-- 1 root root 100 2024-01-15T10:00:00 ${e}`) - .join('\n'); - - return { stdout: output, stderr: '', exitCode: 0 }; - } - - // Delete file (rm) - if (command.startsWith('rm -f')) { - const match = command.match(/rm -f "(.+?)"/); - if (match) { - const path = match[1]; - mockFs.delete(path); - } - return { stdout: '', stderr: '', exitCode: 0 }; - } - - // Move file (mv) - if (command.startsWith('mv')) { - const match = command.match(/mv "(.+?)" "(.+?)"/); - if (match) { - const source = match[1]; - const dest = match[2]; - if (mockFs.has(source)) { - const content = mockFs.get(source); - if (content !== undefined) { - mockFs.set(dest, content); - mockFs.delete(source); - } - } - } - return { stdout: '', stderr: '', exitCode: 0 }; - } - - return { stdout: '', stderr: '', exitCode: 0 }; - }, - - async getStatus() { - return { state: 'Running' as const }; - }, - - async close() { - // No-op - } - }; - } - - it('should write and read files via polyfill', async () => { - const mockFs = new Map(); - const connection = createMockConnection(mockFs); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const content = 'polyfilled content'; - const writeResult = await adapter.writeFiles([{ path: '/poly.txt', data: content }]); - expect(writeResult[0].error).toBeNull(); - - const readResult = await adapter.readFiles(['/poly.txt']); - expect(readResult[0].error).toBeNull(); - expect(new TextDecoder().decode(readResult[0].content)).toBe(content); - } finally { - await adapter.close(); - } - }); - - it('should list directories via polyfill', async () => { - const mockFs = new Map(); - mockFs.set('/workspace/file1.txt', 'content1'); - mockFs.set('/workspace/file2.txt', 'content2'); - - const connection = createMockConnection(mockFs); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - const entries = await adapter.listDirectory('/workspace'); - expect(entries.length).toBe(2); - expect(entries.map((e) => e.name).sort()).toEqual(['file1.txt', 'file2.txt']); - } finally { - await adapter.close(); - } - }); - - it('should delete files via polyfill', async () => { - const mockFs = new Map(); - mockFs.set('/delete.txt', 'to delete'); - - const connection = createMockConnection(mockFs); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - expect(mockFs.has('/delete.txt')).toBe(true); - await adapter.deleteFiles(['/delete.txt']); - expect(mockFs.has('/delete.txt')).toBe(false); - } finally { - await adapter.close(); - } - }); - - it('should move files via polyfill', async () => { - const mockFs = new Map(); - mockFs.set('/source.txt', 'movable'); - - const connection = createMockConnection(mockFs); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - await adapter.moveFiles([{ source: '/source.txt', destination: '/dest.txt' }]); - expect(mockFs.has('/source.txt')).toBe(false); - expect(mockFs.has('/dest.txt')).toBe(true); - expect(mockFs.get('/dest.txt')).toBe('movable'); - } finally { - await adapter.close(); - } - }); - - it('should create directories via polyfill', async () => { - const mockFs = new Map(); - const connection = createMockConnection(mockFs); - const adapter = new MinimalProviderAdapter(); - - await adapter.connect(connection); - - try { - // Should not throw - await adapter.createDirectories(['/newdir', '/another/dir']); - } finally { - await adapter.close(); - } - }); - }); - - describe('Cross-Provider File Operation Parity', () => { - it('should produce equivalent results for same operations', async () => { - // Native filesystem adapter - const nativeAdapter = new MockSandboxAdapter(); - - // Polyfilled filesystem adapter - const mockFs = new Map(); - const polyConnection: MinimalProviderConnection = { - id: 'parity-test', - async execute(command: string) { - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - if (command.startsWith('mkdir -p')) { - return { stdout: '', stderr: '', exitCode: 0 }; - } - if (command.includes("<< 'POLYFILL_EOF'")) { - const pathMatch = command.match(/cat > "(.+?)" << 'POLYFILL_EOF'/); - if (pathMatch) { - const path = pathMatch[1]; - const lines = command.split('\n'); - const contentLines: string[] = []; - let inContent = false; - for (const line of lines) { - if (line.includes("<< 'POLYFILL_EOF'")) { - inContent = true; - continue; - } - if (line.trim() === 'POLYFILL_EOF') { - break; - } - if (inContent) { - contentLines.push(line); - } - } - mockFs.set(path, contentLines.join('\n')); - } - return { stdout: '', stderr: '', exitCode: 0 }; - } - if (command.includes('base64 -w 0')) { - const match = command.match(/cat "(.+?)" \| base64 -w 0/); - const path = match?.[1]?.replace(/\\"/g, '"'); - if (path && mockFs.has(path)) { - const content = mockFs.get(path) || ''; - const binary = Array.from(content) - .map((b) => String.fromCharCode(b.charCodeAt(0))) - .join(''); - return { stdout: btoa(binary), stderr: '', exitCode: 0 }; - } - return { stdout: '', stderr: 'No such file', exitCode: 1 }; - } - return { stdout: '', stderr: '', exitCode: 0 }; - }, - async getStatus() { - return { state: 'Running' as const }; - }, - async close() {} - }; - - const polyAdapter = new MinimalProviderAdapter(); - await polyAdapter.connect(polyConnection); - - // Perform same operations on both - const testData = 'parity test data'; - const testPath = '/parity.txt'; - - // Write - await nativeAdapter.writeFiles([{ path: testPath, data: testData }]); - await polyAdapter.writeFiles([{ path: testPath, data: testData }]); - - // Read - const nativeRead = await nativeAdapter.readFiles([testPath]); - const polyRead = await polyAdapter.readFiles([testPath]); - - // Results should be equivalent - expect(nativeRead[0].error).toBeNull(); - expect(polyRead[0].error).toBeNull(); - expect(new TextDecoder().decode(nativeRead[0].content)).toBe( - new TextDecoder().decode(polyRead[0].content) - ); - - // Cleanup - await nativeAdapter.close(); - await polyAdapter.close(); - }); - }); -}); diff --git a/sdk/sandbox/tests/integration/minimal-provider.test.ts b/sdk/sandbox/tests/integration/minimal-provider.test.ts deleted file mode 100644 index 48758d3c58..0000000000 --- a/sdk/sandbox/tests/integration/minimal-provider.test.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { - MinimalProviderAdapter, - type MinimalProviderConnection -} from '../../src/adapters/MinimalProviderAdapter'; - -interface MockExecutionResult { - stdout: string; - stderr: string; - exitCode: number; -} - -function handlePing(): MockExecutionResult { - return { stdout: 'PING', stderr: '', exitCode: 0 }; -} - -function handleMkdir(): MockExecutionResult { - return { stdout: '', stderr: '', exitCode: 0 }; -} - -function handleHeredoc(command: string, mockFs: Map): MockExecutionResult { - const pathMatch = command.match(/cat > "(.+?)" << 'POLYFILL_EOF'/); - if (pathMatch) { - const path = pathMatch[1]; - const lines = command.split('\n'); - const contentLines: string[] = []; - let inContent = false; - for (const line of lines) { - if (line.includes("<< 'POLYFILL_EOF'")) { - inContent = true; - continue; - } - if (line.trim() === 'POLYFILL_EOF') { - break; - } - if (inContent) { - contentLines.push(line); - } - } - mockFs.set(path, contentLines.join('\n')); - } - return { stdout: '', stderr: '', exitCode: 0 }; -} - -function handleBase64Read(command: string, mockFs: Map): MockExecutionResult { - const match = command.match(/cat "(.+?)" \| base64 -w 0/); - const path = match?.[1]?.replace(/\\"/g, '"'); - if (path && mockFs.has(path)) { - const content = mockFs.get(path); - if (!content) { - return { stdout: '', stderr: 'cat: No such file', exitCode: 1 }; - } - const binary = Array.from(content) - .map((b) => String.fromCharCode(b.charCodeAt(0))) - .join(''); - const base64 = btoa(binary); - return { stdout: base64, stderr: '', exitCode: 0 }; - } - return { stdout: '', stderr: 'cat: No such file', exitCode: 1 }; -} - -function handleBase64Write(command: string, mockFs: Map): MockExecutionResult { - const pathMatch = command.match(/> "(.+?)"$/); - const dataMatch = command.match(/echo "(.+?)" \| base64 -d/); - if (pathMatch && dataMatch) { - const path = pathMatch[1]?.replace(/\\"/g, '"'); - const base64Data = dataMatch[1] || ''; - try { - const decoded = atob(base64Data); - mockFs.set(path, decoded); - } catch { - // Invalid base64, ignore - } - } - return { stdout: '', stderr: '', exitCode: 0 }; -} - -function handleLs(command: string, mockFs: Map): MockExecutionResult { - const match = command.match(/ls -la "([^"]+)"/); - const path = match?.[1] || '.'; - - const entries = Array.from(mockFs.keys()) - .filter((f) => f.startsWith(path)) - .map((f) => f.slice(path.length + 1).split('/')[0]) - .filter((f) => f); - - const uniqueEntries = [...new Set(entries)]; - const output = uniqueEntries - .map((e) => `-rw-r--r-- 1 user group 100 2024-01-15T10:00:00 ${e}`) - .join('\n'); - - return { stdout: output, stderr: '', exitCode: 0 }; -} - -function createMockConnection(mockFs: Map): MinimalProviderConnection { - return { - id: 'integration-test-sandbox', - - async execute(command: string) { - if (command.includes('echo "PING"')) { - return handlePing(); - } - - if (command.includes('mkdir -p')) { - return handleMkdir(); - } - - if (command.includes("<< 'POLYFILL_EOF'")) { - return handleHeredoc(command, mockFs); - } - - if (command.includes('base64 -w 0')) { - return handleBase64Read(command, mockFs); - } - - if (command.includes('base64 -d')) { - return handleBase64Write(command, mockFs); - } - - if (command.includes('ls -la')) { - return handleLs(command, mockFs); - } - - return { stdout: `Executed: ${command}`, stderr: '', exitCode: 0 }; - }, - - async getStatus() { - return { state: 'Running' as const }; - }, - - async close() { - // No-op - } - }; -} - -/** - * Integration test demonstrating end-to-end usage of MinimalProviderAdapter. - * - * This test simulates a real minimal provider (e.g., SSH connection) - * and verifies that filesystem operations work via polyfills. - */ -describe('MinimalProvider Integration', () => { - it('should perform full workflow with polyfilled filesystem', async () => { - // Simulate a minimal connection - const mockFs = new Map(); - - const connection = createMockConnection(mockFs); - - // Create adapter and connect - const adapter = new MinimalProviderAdapter(); - await adapter.connect(connection); - - // Verify capabilities - expect(adapter.capabilities.nativeFileSystem).toBe(false); - expect(adapter.provider).toBe('minimal'); - - // Test ping - const pingResult = await adapter.ping(); - expect(pingResult).toBe(true); - - // Test file write (via polyfill) - const writeResults = await adapter.writeFiles([ - { path: '/workspace/test.txt', data: 'Hello, Integration Test!' } - ]); - expect(writeResults[0].error).toBeNull(); - - // Test file read (via polyfill) - const readResults = await adapter.readFiles(['/workspace/test.txt']); - expect(readResults[0].error).toBeNull(); - const content = new TextDecoder().decode(readResults[0].content); - expect(content).toBe('Hello, Integration Test!'); - - // Test directory listing (via polyfill) - const entries = await adapter.listDirectory('/workspace'); - expect(entries.length).toBeGreaterThan(0); - expect(entries[0].name).toBe('test.txt'); - - // Cleanup - await adapter.close(); - }); - - it('should demonstrate feature parity between adapters', async () => { - /** - * This test demonstrates that both OpenSandbox (native) and - * MinimalProvider (polyfilled) expose the same interface, - * enabling provider-agnostic code. - */ - - // Both adapters implement ISandbox - const providers = [ - { - name: 'minimal', - adapter: new MinimalProviderAdapter(), - expectedNativeFs: false - } - ]; - - for (const { name, adapter, expectedNativeFs } of providers) { - // Same interface, different implementations - expect(adapter.provider).toBe(name); - expect(adapter.capabilities.nativeFileSystem).toBe(expectedNativeFs); - - // All ISandbox methods are available - expect(typeof adapter.execute).toBe('function'); - expect(typeof adapter.readFiles).toBe('function'); - expect(typeof adapter.writeFiles).toBe('function'); - expect(typeof adapter.ping).toBe('function'); - } - }); -}); diff --git a/sdk/sandbox/tests/mocks/MockCommandExecution.ts b/sdk/sandbox/tests/mocks/MockCommandExecution.ts deleted file mode 100644 index b14b8c92b9..0000000000 --- a/sdk/sandbox/tests/mocks/MockCommandExecution.ts +++ /dev/null @@ -1,93 +0,0 @@ -import type { ICommandExecution } from '../../src/interfaces'; -import type { ExecuteOptions, ExecuteResult, StreamHandlers } from '../../src/types'; - -/** - * Mock implementation of ICommandExecution for testing. - */ -export class MockCommandExecution implements ICommandExecution { - private commands: Map = new Map(); - private executedCommands: { command: string; options?: ExecuteOptions }[] = []; - - /** - * Register a mock response for a command. - */ - mockCommand(command: string, result: ExecuteResult): void { - this.commands.set(command, result); - } - - /** - * Get list of executed commands for verification. - */ - getExecutedCommands(): { command: string; options?: ExecuteOptions }[] { - return [...this.executedCommands]; - } - - /** - * Clear all mock commands and execution history. - */ - clear(): void { - this.commands.clear(); - this.executedCommands = []; - } - - async execute(command: string, options?: ExecuteOptions): Promise { - this.executedCommands.push({ command, options }); - - // Check for exact match - if (this.commands.has(command)) { - const result = this.commands.get(command); - if (result) { - return result; - } - } - - // Check for partial match (for commands with dynamic parts) - for (const [key, result] of this.commands) { - if (command.includes(key) || key.includes(command)) { - return result; - } - } - - // Default response - return { - stdout: '', - stderr: '', - exitCode: 0 - }; - } - - async executeStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise { - const result = await this.execute(command, options); - - if (handlers.onStdout && result.stdout) { - await handlers.onStdout({ text: result.stdout }); - } - if (handlers.onStderr && result.stderr) { - await handlers.onStderr({ text: result.stderr }); - } - if (handlers.onComplete) { - await handlers.onComplete(result); - } - } - - async executeBackground( - command: string, - options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }> { - await this.execute(command, options); - return { - sessionId: `mock-${Date.now()}`, - kill: async () => { - // No-op - } - }; - } - - async interrupt(_sessionId: string): Promise { - // No-op in mock - } -} diff --git a/sdk/sandbox/tests/mocks/MockSandboxAdapter.ts b/sdk/sandbox/tests/mocks/MockSandboxAdapter.ts deleted file mode 100644 index 57cb8bae8a..0000000000 --- a/sdk/sandbox/tests/mocks/MockSandboxAdapter.ts +++ /dev/null @@ -1,397 +0,0 @@ -import { BaseSandboxAdapter } from '../../src/adapters/BaseSandboxAdapter'; -import { FeatureNotSupportedError } from '../../src/errors'; -import type { - ContentReplaceEntry, - DirectoryEntry, - ExecuteOptions, - ExecuteResult, - FileDeleteResult, - FileInfo, - FileReadResult, - FileWriteEntry, - FileWriteResult, - MoveEntry, - PermissionEntry, - ProviderCapabilities, - ReadFileOptions, - SandboxConfig, - SandboxId, - SandboxInfo, - SandboxMetrics, - SandboxStatus, - SearchResult, - StreamHandlers -} from '../../src/types'; -import { createFullCapabilities } from '../../src/types'; - -/** - * Mock adapter for testing the base class behavior. - * Uses full capabilities by default. - */ -export class MockSandboxAdapter extends BaseSandboxAdapter { - readonly provider = 'mock'; - readonly capabilities: ProviderCapabilities; - - _id: SandboxId = 'mock-sandbox-id'; - _status: SandboxStatus = { state: 'Running' }; - - // Storage for mock filesystem - private files = new Map(); - private directories = new Set(); - - constructor(capabilities: ProviderCapabilities = createFullCapabilities()) { - super(); - this.capabilities = capabilities; - - // Initialize polyfill if needed - if (!capabilities.nativeFileSystem) { - this.initializePolyfillService(this); - } - } - - get id(): SandboxId { - return this._id; - } - - get status(): SandboxStatus { - return this._status; - } - - // Mock control methods - setFile(path: string, content: Uint8Array | string): void { - this.files.set(path, typeof content === 'string' ? new TextEncoder().encode(content) : content); - } - - getFile(path: string): Uint8Array | undefined { - return this.files.get(path); - } - - clearFiles(): void { - this.files.clear(); - this.directories.clear(); - } - - // Lifecycle methods (stubs) - async create(_config: SandboxConfig): Promise { - this._status = { state: 'Running' }; - } - - async start(): Promise { - this._status = { state: 'Running' }; - } - - async stop(): Promise { - this._status = { state: 'Deleted' }; - } - - async pause(): Promise { - if (!this.capabilities.supportsPauseResume) { - throw new FeatureNotSupportedError( - 'Pause not supported by mock provider', - 'pause', - this.provider - ); - } - this._status = { state: 'Paused' }; - } - - async resume(): Promise { - if (!this.capabilities.supportsPauseResume) { - throw new FeatureNotSupportedError( - 'Resume not supported by mock provider', - 'resume', - this.provider - ); - } - this._status = { state: 'Running' }; - } - - async delete(): Promise { - this._status = { state: 'Deleted' }; - } - - async getInfo(): Promise { - return { - id: this._id, - image: { repository: 'mock', tag: 'latest' }, - entrypoint: [], - status: this._status, - createdAt: new Date() - }; - } - - async close(): Promise { - // No-op - } - - protected async nativeRenewExpiration(_additionalSeconds: number): Promise { - // No-op - } - - // Command execution (native) - private sessionCounter = 0; - - protected async nativeExecute( - command: string, - _options?: ExecuteOptions - ): Promise { - // Handle specific commands for polyfill tests - if (command.includes('nproc')) { - return { stdout: '2', stderr: '', exitCode: 0, truncated: false }; - } - if (command.includes('/proc/meminfo')) { - const stdout = 'MemTotal: 4096000 kB\nMemFree: 2048000 kB\nMemAvailable: 3072000 kB'; - return { stdout, stderr: '', exitCode: 0, truncated: false }; - } - if (command.includes('echo "PING"')) { - return { stdout: 'PING', stderr: '', exitCode: 0, truncated: false }; - } - // Make cat write commands fail (cat with > or <<) - if (command.includes('cat ') && (command.includes('>') || command.includes('<<'))) { - return { stdout: '', stderr: 'mock: write not implemented', exitCode: 1, truncated: false }; - } - // Default success for other commands - return { - stdout: `Executed: ${command}`, - stderr: '', - exitCode: 0, - truncated: false - }; - } - - protected async nativeExecuteStream( - command: string, - handlers: StreamHandlers, - options?: ExecuteOptions - ): Promise { - if (handlers.onStdout) { - await handlers.onStdout({ text: `Streamed: ${command}` }); - } - if (handlers.onComplete) { - await handlers.onComplete(await this.nativeExecute(command, options)); - } - } - - protected async nativeExecuteBackground( - _command: string, - _options?: ExecuteOptions - ): Promise<{ sessionId: string; kill(): Promise }> { - const sessionId = `mock-session-${++this.sessionCounter}`; - return { - sessionId, - kill: async () => { - // No-op - } - }; - } - - protected async nativeInterrupt(_sessionId: string): Promise { - // No-op - } - - // Filesystem (native when capabilities allow) - protected async nativeReadFiles( - paths: string[], - _options?: ReadFileOptions - ): Promise { - return paths.map((path) => { - const content = this.files.get(path); - if (content) { - return { path, content, error: null }; - } - return { - path, - content: new Uint8Array(), - error: new Error(`File not found: ${path}`) - }; - }); - } - - protected async nativeWriteFiles(entries: FileWriteEntry[]): Promise { - return entries.map((entry) => { - try { - let data: Uint8Array; - if (typeof entry.data === 'string') { - data = new TextEncoder().encode(entry.data); - } else if (entry.data instanceof Uint8Array) { - data = entry.data; - } else if (entry.data instanceof ArrayBuffer) { - data = new Uint8Array(entry.data); - } else { - throw new Error('Stream/Blob not supported in mock'); - } - - this.files.set(entry.path, data); - return { path: entry.path, bytesWritten: data.length, error: null }; - } catch (error) { - return { - path: entry.path, - bytesWritten: 0, - error: error instanceof Error ? error : new Error(String(error)) - }; - } - }); - } - - protected async nativeDeleteFiles(paths: string[]): Promise { - return paths.map((path) => { - const existed = this.files.has(path); - this.files.delete(path); - return { path, success: existed, error: null }; - }); - } - - protected async nativeListDirectory(path: string): Promise { - const entries: DirectoryEntry[] = []; - const seen = new Set(); - - for (const filePath of this.files.keys()) { - if (filePath.startsWith(`${path}/`) || filePath.startsWith(path)) { - const relativePath = filePath.slice(path.length).replace(/^\//, ''); - const name = relativePath.split('/')[0]; - if (!seen.has(name)) { - seen.add(name); - const isDir = relativePath.includes('/'); - entries.push({ - name, - path: `${path}/${name}`, - isDirectory: isDir, - isFile: !isDir - }); - } - } - } - - return entries; - } - - protected async nativeGetFileInfo(paths: string[]): Promise> { - const info = new Map(); - for (const path of paths) { - const content = this.files.get(path); - if (content) { - info.set(path, { - path, - size: content.length, - isFile: true, - isDirectory: false - }); - } - } - return info; - } - - protected async nativeMoveFiles(entries: MoveEntry[]): Promise { - for (const { source, destination } of entries) { - const content = this.files.get(source); - if (content) { - this.files.set(destination, content); - this.files.delete(source); - } - } - } - - protected async nativeReplaceContent(entries: ContentReplaceEntry[]): Promise { - for (const { path, oldContent, newContent } of entries) { - const content = this.files.get(path); - if (content) { - const text = new TextDecoder().decode(content); - const replaced = text.replace(new RegExp(oldContent, 'g'), newContent); - this.files.set(path, new TextEncoder().encode(replaced)); - } - } - } - - protected async nativeCreateDirectories( - paths: string[], - _options?: { mode?: number; owner?: string; group?: string } - ): Promise { - for (const path of paths) { - this.directories.add(path); - } - } - - protected async nativeDeleteDirectories( - paths: string[], - options?: { recursive?: boolean; force?: boolean } - ): Promise { - for (const path of paths) { - this.directories.delete(path); - if (options?.recursive) { - for (const filePath of this.files.keys()) { - if (filePath.startsWith(`${path}/`)) { - this.files.delete(filePath); - } - } - } - } - } - - protected nativeReadFileStream(path: string): AsyncIterable { - const content = this.files.get(path); - if (!content) { - throw new Error(`File not found: ${path}`); - } - return { - [Symbol.asyncIterator]: async function* () { - yield content; - } - }; - } - - protected async nativeWriteFileStream( - path: string, - stream: ReadableStream - ): Promise { - const reader = stream.getReader(); - const chunks: Uint8Array[] = []; - while (true) { - const { done, value } = await reader.read(); - if (done) { - break; - } - chunks.push(value); - } - const totalLength = chunks.reduce((sum, c) => sum + c.length, 0); - const combined = new Uint8Array(totalLength); - let offset = 0; - for (const chunk of chunks) { - combined.set(chunk, offset); - offset += chunk.length; - } - this.files.set(path, combined); - } - - protected async nativeSetPermissions(_entries: PermissionEntry[]): Promise { - // No-op in mock - } - - protected async nativeSearch(pattern: string, path?: string): Promise { - const results: SearchResult[] = []; - const regex = new RegExp(pattern.replace(/\*/g, '.*')); - for (const filePath of this.files.keys()) { - if (!path || filePath.startsWith(path)) { - const basename = filePath.split('/').pop() || ''; - if (regex.test(basename)) { - results.push({ path: filePath, isFile: true }); - } - } - } - return results; - } - - // Health check (native) - protected async nativePing(): Promise { - return true; - } - - protected async nativeGetMetrics(): Promise { - return { - cpuCount: 2, - cpuUsedPercentage: 10, - memoryTotalMiB: 4096, - memoryUsedMiB: 1024, - timestamp: Date.now() - }; - } -} diff --git a/sdk/sandbox/tests/mocks/index.ts b/sdk/sandbox/tests/mocks/index.ts deleted file mode 100644 index b9c624b993..0000000000 --- a/sdk/sandbox/tests/mocks/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { MockCommandExecution } from './MockCommandExecution'; -export { MockSandboxAdapter } from './MockSandboxAdapter'; diff --git a/sdk/sandbox/tests/unit/adapters/BaseSandboxAdapter.test.ts b/sdk/sandbox/tests/unit/adapters/BaseSandboxAdapter.test.ts deleted file mode 100644 index 5bce78bf42..0000000000 --- a/sdk/sandbox/tests/unit/adapters/BaseSandboxAdapter.test.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { beforeEach, describe, expect, it } from 'vitest'; -import { FeatureNotSupportedError } from '../../../src/errors'; -import { createFullCapabilities, createMinimalCapabilities } from '../../../src/types'; -import { MockSandboxAdapter } from '../../mocks/MockSandboxAdapter'; - -describe('BaseSandboxAdapter', () => { - describe('with full capabilities (native filesystem)', () => { - let adapter: MockSandboxAdapter; - - beforeEach(() => { - adapter = new MockSandboxAdapter(createFullCapabilities()); - adapter.setFile('/test.txt', new TextEncoder().encode('Hello')); - }); - - it('should report full capabilities', () => { - expect(adapter.capabilities.nativeFileSystem).toBe(true); - expect(adapter.capabilities.supportsStreamingOutput).toBe(true); - expect(adapter.capabilities.supportsBatchOperations).toBe(true); - }); - - it('should use native readFiles', async () => { - const results = await adapter.readFiles(['/test.txt']); - expect(results).toHaveLength(1); - expect(results[0].error).toBeNull(); - expect(new TextDecoder().decode(results[0].content)).toBe('Hello'); - }); - - it('should use native writeFiles', async () => { - const results = await adapter.writeFiles([{ path: '/new.txt', data: 'World' }]); - expect(results[0].error).toBeNull(); - expect(results[0].bytesWritten).toBe(5); - - const readBack = await adapter.readFiles(['/new.txt']); - expect(new TextDecoder().decode(readBack[0].content)).toBe('World'); - }); - - it('should execute commands natively', async () => { - const result = await adapter.execute('echo test'); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('echo test'); - }); - - it('should support streaming when capability is present', async () => { - const stdoutChunks: string[] = []; - - await adapter.executeStream('echo streaming', { - onStdout: (msg) => stdoutChunks.push(msg.text) - }); - - expect(stdoutChunks.length).toBeGreaterThan(0); - }); - - it('should throw FeatureNotSupportedError for unsupported pause', async () => { - // Create adapter with pause disabled - const caps = createFullCapabilities(); - caps.supportsPauseResume = false; - const limitedAdapter = new MockSandboxAdapter(caps); - - try { - await limitedAdapter.pause(); - expect(false).toBe(true); // Should not reach here - } catch (error) { - expect(error).toBeInstanceOf(FeatureNotSupportedError); - expect((error as FeatureNotSupportedError).feature).toBe('pause'); - } - }); - - it('should throw FeatureNotSupportedError for unsupported background execution', async () => { - const caps = createFullCapabilities(); - caps.supportsBackgroundExecution = false; - const limitedAdapter = new MockSandboxAdapter(caps); - - try { - await limitedAdapter.executeBackground('sleep 10'); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FeatureNotSupportedError); - } - }); - }); - - describe('with minimal capabilities (polyfilled filesystem)', () => { - let adapter: MockSandboxAdapter; - - beforeEach(() => { - adapter = new MockSandboxAdapter(createMinimalCapabilities()); - }); - - it('should report no native filesystem', () => { - expect(adapter.capabilities.nativeFileSystem).toBe(false); - expect(adapter.capabilities.supportsStreamingTransfer).toBe(false); - }); - - it('should route readFiles through polyfill', async () => { - // With minimal capabilities, polyfill service should be used - // The polyfill will try to execute cat commands - const result = await adapter.readFiles(['/any.txt']); - // Polyfill will fail because no mock command is set up - expect(result[0].error).not.toBeNull(); - }); - - it('should route writeFiles through polyfill', async () => { - const results = await adapter.writeFiles([{ path: '/test.txt', data: 'content' }]); - // Polyfill will fail because no mock command is set up - expect(results[0].error).not.toBeNull(); - }); - - it('should use fallback for streaming when not supported', async () => { - const stdoutChunks: string[] = []; - - await adapter.executeStream('echo test', { - onStdout: (msg) => stdoutChunks.push(msg.text) - }); - - // Should still work via fallback (execute + call handlers) - expect(stdoutChunks.length).toBeGreaterThan(0); - }); - - it('should ping via polyfill', async () => { - // With minimal capabilities, ping goes through polyfill - const result = await adapter.ping(); - // Should work via the echo PING fallback - expect(typeof result).toBe('boolean'); - }); - }); - - describe('waitUntilReady', () => { - it('should resolve when sandbox is ready', async () => { - const adapter = new MockSandboxAdapter(); - // Mock adapter's nativePing always returns true - await adapter.waitUntilReady(5000); - // Should not throw - expect(true).toBe(true); - }); - }); - - describe('capabilities', () => { - it('should allow checking individual capabilities', () => { - const adapter = new MockSandboxAdapter(); - expect(adapter.capabilities.nativeFileSystem).toBe(true); - expect(adapter.capabilities.nativeHealthCheck).toBe(true); - }); - }); -}); diff --git a/sdk/sandbox/tests/unit/adapters/MinimalProviderAdapter.test.ts b/sdk/sandbox/tests/unit/adapters/MinimalProviderAdapter.test.ts deleted file mode 100644 index 95d4306659..0000000000 --- a/sdk/sandbox/tests/unit/adapters/MinimalProviderAdapter.test.ts +++ /dev/null @@ -1,218 +0,0 @@ -import { beforeEach, describe, expect, it } from 'vitest'; -import { - MinimalProviderAdapter, - type MinimalProviderConnection -} from '../../../src/adapters/MinimalProviderAdapter'; -import { FeatureNotSupportedError } from '../../../src/errors'; - -// Mock connection for testing -class MockConnection implements MinimalProviderConnection { - id = 'mock-minimal-id'; - private shouldFail = false; - - setShouldFail(fail: boolean): void { - this.shouldFail = fail; - } - - async execute(command: string): Promise<{ stdout: string; stderr: string; exitCode: number }> { - if (this.shouldFail) { - return { stdout: '', stderr: 'Connection failed', exitCode: 1 }; - } - - // Simulate various command responses - if (command.includes('echo PING')) { - return { stdout: 'PING', stderr: '', exitCode: 0 }; - } - - if (command.includes('nproc')) { - return { stdout: '2', stderr: '', exitCode: 0 }; - } - - if (command.includes('/proc/meminfo')) { - const stdout = 'MemTotal: 4096000 kB\nMemFree: 2048000 kB\nMemAvailable: 3072000 kB'; - return { - stdout, - stderr: '', - exitCode: 0 - }; - } - - if (command.includes('cat ')) { - // Simulate file read via base64 - if (command.includes('test.txt')) { - // "Hello" in base64 - return { stdout: 'SGVsbG8=', stderr: '', exitCode: 0 }; - } - return { stdout: '', stderr: 'cat: No such file', exitCode: 1 }; - } - - if (command.includes('mkdir -p')) { - return { stdout: '', stderr: '', exitCode: 0 }; - } - - if (command.includes('base64 -d')) { - // Simulate write success - return { stdout: '', stderr: '', exitCode: 0 }; - } - - if (command.includes('ls -la')) { - return { - stdout: `total 8 -drwxr-xr-x 2 user group 4096 2024-01-15T10:00:00 . -drwxr-xr-x 3 user group 4096 2024-01-15T10:00:00 .. --rw-r--r-- 1 user group 100 2024-01-15T10:30:00 file.txt`, - stderr: '', - exitCode: 0 - }; - } - - // Default response - return { stdout: `Executed: ${command}`, stderr: '', exitCode: 0 }; - } - - async getStatus() { - return { state: 'Running' as const }; - } - - async close(): Promise { - // No-op - } -} - -describe('MinimalProviderAdapter', () => { - let adapter: MinimalProviderAdapter; - let mockConnection: MockConnection; - - beforeEach(() => { - mockConnection = new MockConnection(); - adapter = new MinimalProviderAdapter(); - }); - - describe('capabilities', () => { - it('should report minimal capabilities', () => { - expect(adapter.capabilities.nativeFileSystem).toBe(false); - expect(adapter.capabilities.supportsStreamingOutput).toBe(false); - expect(adapter.capabilities.supportsBackgroundExecution).toBe(false); - expect(adapter.capabilities.nativeHealthCheck).toBe(false); - expect(adapter.capabilities.nativeMetrics).toBe(false); - }); - }); - - describe('connect', () => { - it('should connect and initialize polyfill', async () => { - await adapter.connect(mockConnection); - - expect(adapter.id).toBe('mock-minimal-id'); - expect(adapter.status.state).toBe('Running'); - }); - }); - - describe('execute', () => { - beforeEach(async () => { - await adapter.connect(mockConnection); - }); - - it('should execute commands through connection', async () => { - const result = await adapter.execute('echo hello'); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Executed'); - }); - - it('should handle workingDirectory option', async () => { - const result = await adapter.execute('pwd', { workingDirectory: '/tmp' }); - expect(result.exitCode).toBe(0); - }); - }); - - describe('filesystem operations (via polyfill)', () => { - beforeEach(async () => { - await adapter.connect(mockConnection); - }); - - it('should read files via polyfill', async () => { - const results = await adapter.readFiles(['/test.txt']); - // The polyfill will execute cat + base64 - expect(results).toBeDefined(); - }); - - it('should list directories via polyfill', async () => { - const entries = await adapter.listDirectory('/home'); - expect(entries.length).toBeGreaterThan(0); - expect(entries[0].name).toBe('file.txt'); - }); - - it('should write files via polyfill', async () => { - const results = await adapter.writeFiles([{ path: '/test.txt', data: 'content' }]); - // Polyfill service handles the write - expect(results).toBeDefined(); - }); - }); - - describe('unsupported operations', () => { - beforeEach(async () => { - await adapter.connect(mockConnection); - }); - - it('should throw FeatureNotSupportedError for pause', async () => { - try { - await adapter.pause(); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FeatureNotSupportedError); - expect((error as FeatureNotSupportedError).feature).toBe('pause'); - } - }); - - it('should throw FeatureNotSupportedError for resume', async () => { - try { - await adapter.resume(); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FeatureNotSupportedError); - expect((error as FeatureNotSupportedError).feature).toBe('resume'); - } - }); - - it('should throw FeatureNotSupportedError for renewExpiration', async () => { - try { - await adapter.renewExpiration(3600); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FeatureNotSupportedError); - } - }); - }); - - describe('health check (via polyfill)', () => { - beforeEach(async () => { - await adapter.connect(mockConnection); - }); - - it('should ping via polyfill', async () => { - const result = await adapter.ping(); - expect(result).toBe(true); - }); - - it('should get metrics via polyfill', async () => { - const metrics = await adapter.getMetrics(); - expect(metrics.cpuCount).toBe(2); - expect(metrics.memoryTotalMiB).toBe(4000); - }); - }); - - describe('executeStream fallback', () => { - beforeEach(async () => { - await adapter.connect(mockConnection); - }); - - it('should fallback to execute when streaming not supported', async () => { - const stdoutChunks: string[] = []; - - await adapter.executeStream('echo test', { - onStdout: (msg) => stdoutChunks.push(msg.text) - }); - - expect(stdoutChunks.length).toBeGreaterThan(0); - }); - }); -}); diff --git a/sdk/sandbox/tests/unit/adapters/OpenSandboxAdapter.test.ts b/sdk/sandbox/tests/unit/adapters/OpenSandboxAdapter.test.ts deleted file mode 100644 index 2f2d6f81bd..0000000000 --- a/sdk/sandbox/tests/unit/adapters/OpenSandboxAdapter.test.ts +++ /dev/null @@ -1,463 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { OpenSandboxAdapter } from '../../../src/adapters/OpenSandboxAdapter'; -import { ConnectionError, SandboxStateError } from '../../../src/errors'; -import type { ImageSpec, ResourceLimits, SandboxConfig } from '../../../src/types'; - -/** - * Unit tests for OpenSandboxAdapter. - * - * These tests verify the OpenSandboxAdapter lifecycle, filesystem operations, - * command execution, and health checks using mocked SDK behavior. - */ -describe('OpenSandboxAdapter', () => { - describe('Lifecycle Methods', () => { - it('should initialize with correct default values', () => { - const adapter = new OpenSandboxAdapter(); - - expect(adapter.provider).toBe('opensandbox'); - expect(adapter.id).toBe(''); - expect(adapter.connectionState).toBe('disconnected'); - expect(adapter.capabilities.nativeFileSystem).toBe(false); - expect(adapter.capabilities.supportsStreamingOutput).toBe(true); - expect(adapter.capabilities.supportsPauseResume).toBe(true); - }); - - it('should initialize with custom connection config', () => { - const adapter = new OpenSandboxAdapter({ - baseUrl: 'https://api.example.com', - apiKey: 'test-api-key' - }); - - expect(adapter.provider).toBe('opensandbox'); - expect(adapter.connectionState).toBe('disconnected'); - }); - - it('should handle connection state transitions', async () => { - const adapter = new OpenSandboxAdapter(); - - // Initially disconnected - expect(adapter.connectionState).toBe('disconnected'); - - // After creation, should be connected (mocked) - // Note: Actual SDK calls are mocked in integration tests - }); - - it('should throw SandboxStateError when accessing sandbox before initialization', async () => { - const adapter = new OpenSandboxAdapter(); - - // Attempting operations before create/connect should throw - await expect(adapter.execute('echo test')).rejects.toThrow(SandboxStateError); - }); - - it('should handle connection errors gracefully', async () => { - // Test with a URL that will fail - using a reserved port that won't have a server - const adapter = new OpenSandboxAdapter({ - baseUrl: 'http://localhost:65530' - }); - - const config: SandboxConfig = { - image: { repository: 'nginx', tag: 'latest' } - }; - - // Should throw an error when SDK fails - try { - await adapter.create(config); - // If we reach here without throwing, that's unexpected - expect(true).toBe(false); // Force failure if no error thrown - } catch (error) { - expect(error instanceof ConnectionError || error instanceof Error).toBe(true); - } - }); - - it('should handle connect errors gracefully', async () => { - const adapter = new OpenSandboxAdapter({ - baseUrl: 'http://localhost:65530' - }); - - try { - await adapter.connect('non-existent-sandbox-id'); - expect(true).toBe(false); - } catch (error) { - expect(error instanceof ConnectionError || error instanceof Error).toBe(true); - } - }); - - it('should update status after lifecycle operations', async () => { - const adapter = new OpenSandboxAdapter(); - - // Status should be accessible - expect(adapter.status).toBeDefined(); - expect(['Creating', 'Running', 'Stopped', 'Paused', 'Deleted', 'Error']).toContain( - adapter.status.state - ); - }); - }); - - describe('Image and Resource Conversion', () => { - it('should convert ImageSpec to SDK format', () => { - const adapter = new OpenSandboxAdapter(); - - // Test tag format - const imageWithTag: ImageSpec = { repository: 'nginx', tag: 'latest' }; - // Access private method through type assertion for testing - const convertImageSpec = ( - adapter as unknown as { convertImageSpec(image: ImageSpec): string } - ).convertImageSpec; - expect(convertImageSpec(imageWithTag)).toBe('nginx:latest'); - - // Test digest format - const imageWithDigest: ImageSpec = { - repository: 'nginx', - digest: 'sha256:abc123' - }; - expect(convertImageSpec(imageWithDigest)).toBe('nginx@sha256:abc123'); - - // Test tag and digest - const imageWithBoth: ImageSpec = { - repository: 'nginx', - tag: '1.0', - digest: 'sha256:abc123' - }; - expect(convertImageSpec(imageWithBoth)).toBe('nginx:1.0@sha256:abc123'); - - // Test just repository - const imageRepoOnly: ImageSpec = { repository: 'nginx' }; - expect(convertImageSpec(imageRepoOnly)).toBe('nginx'); - }); - - it('should parse SDK image string to ImageSpec', () => { - const adapter = new OpenSandboxAdapter(); - const parseImageSpec = (adapter as unknown as { parseImageSpec(image: string): ImageSpec }) - .parseImageSpec; - - // Test tag format - const withTag = parseImageSpec('nginx:latest'); - expect(withTag.repository).toBe('nginx'); - expect(withTag.tag).toBe('latest'); - - // Test digest format - const withDigest = parseImageSpec('nginx@sha256:abc123'); - expect(withDigest.repository).toBe('nginx'); - expect(withDigest.digest).toBe('sha256:abc123'); - - // Test repository only - const repoOnly = parseImageSpec('nginx'); - expect(repoOnly.repository).toBe('nginx'); - expect(repoOnly.tag).toBeUndefined(); - expect(repoOnly.digest).toBeUndefined(); - }); - - it('should convert ResourceLimits to SDK format', () => { - const adapter = new OpenSandboxAdapter(); - const convertResourceLimits = ( - adapter as unknown as { - convertResourceLimits(limits?: ResourceLimits): Record | undefined; - } - ).convertResourceLimits; - - // Full limits - const limits: ResourceLimits = { - cpuCount: 2, - memoryMiB: 512, - diskGiB: 10 - }; - const converted = convertResourceLimits(limits); - expect(converted).toEqual({ - cpu: '2', - memory: '512Mi', - disk: '10Gi' - }); - - // Partial limits - const partial: ResourceLimits = { cpuCount: 4 }; - expect(convertResourceLimits(partial)).toEqual({ cpu: '4' }); - - // Empty limits - expect(convertResourceLimits({})).toEqual({}); - - // Undefined - expect(convertResourceLimits(undefined)).toBeUndefined(); - }); - - it('should parse SDK resource limits to ResourceLimits', () => { - const adapter = new OpenSandboxAdapter(); - const parseResourceLimits = ( - adapter as unknown as { - parseResourceLimits(resource?: Record): ResourceLimits | undefined; - } - ).parseResourceLimits; - - // Full resource limits - const sdkLimits = { - cpu: '2', - memory: '512Mi', - disk: '10Gi' - }; - const parsed = parseResourceLimits(sdkLimits); - expect(parsed).toEqual({ - cpuCount: 2, - memoryMiB: 512, - diskGiB: 10 - }); - - // GiB memory conversion - const gibMemory = { memory: '2Gi' }; - expect(parseResourceLimits(gibMemory)).toEqual({ memoryMiB: 2048 }); - - // Empty object - expect(parseResourceLimits({})).toEqual({}); - - // Undefined - expect(parseResourceLimits(undefined)).toBeUndefined(); - }); - }); - - describe('Sandbox Configuration', () => { - it('should handle SandboxConfig with all options', () => { - const _adapter = new OpenSandboxAdapter(); - - const fullConfig: SandboxConfig = { - image: { repository: 'node', tag: '18-alpine' }, - entrypoint: ['node', 'app.js'], - timeout: 3600, - resourceLimits: { - cpuCount: 2, - memoryMiB: 1024, - diskGiB: 20 - }, - env: { NODE_ENV: 'production', PORT: '3000' }, - metadata: { project: 'test', version: '1.0' } - }; - - // Config should be valid - expect(fullConfig.image.repository).toBe('node'); - expect(fullConfig.timeout).toBe(3600); - expect(fullConfig.resourceLimits?.cpuCount).toBe(2); - }); - - it('should handle minimal SandboxConfig', () => { - const minimalConfig: SandboxConfig = { - image: { repository: 'alpine' } - }; - - expect(minimalConfig.image.repository).toBe('alpine'); - expect(minimalConfig.timeout).toBeUndefined(); - }); - }); - - describe('Lifecycle State Management', () => { - it('should track connection state correctly', () => { - const adapter = new OpenSandboxAdapter(); - - // Initial state - expect(adapter.connectionState).toBe('disconnected'); - - // States should be one of the valid values - const validStates = ['disconnected', 'connecting', 'connected', 'closed']; - expect(validStates).toContain(adapter.connectionState); - }); - - it('should reset state on close', async () => { - const adapter = new OpenSandboxAdapter(); - - // Before close - expect(adapter.id).toBe(''); - - // After close should reset - await adapter.close(); - expect(adapter.connectionState).toBe('closed'); - }); - }); - - describe('Error Handling', () => { - it('should wrap SDK errors in ConnectionError for create', async () => { - const adapter = new OpenSandboxAdapter({ - baseUrl: 'http://localhost:1' // Invalid port - }); - - try { - await adapter.create({ image: { repository: 'test' } }); - } catch (error) { - // Should be a connection-related error - expect(error instanceof Error).toBe(true); - } - }); - - it('should wrap SDK errors in ConnectionError for connect', async () => { - const adapter = new OpenSandboxAdapter({ - baseUrl: 'http://localhost:1' - }); - - try { - await adapter.connect('invalid-id'); - } catch (error) { - expect(error instanceof Error).toBe(true); - } - }); - - it('should provide meaningful error messages', () => { - const connectionError = new ConnectionError( - 'Failed to create sandbox', - 'http://example.com', - new Error('Network timeout') - ); - - expect(connectionError.message).toContain('Failed to create sandbox'); - expect(connectionError.endpoint).toBe('http://example.com'); - expect(connectionError.cause).toBeDefined(); - }); - - it('should create SandboxStateError with expected state', () => { - const stateError = new SandboxStateError( - 'Sandbox not initialized', - 'disconnected', - 'connected' - ); - - expect(stateError.message).toContain('Sandbox not initialized'); - expect(stateError.currentState).toBe('disconnected'); - expect(stateError.requiredState).toBe('connected'); - }); - }); - - describe('Capabilities', () => { - it('should report full capabilities', () => { - const adapter = new OpenSandboxAdapter(); - const caps = adapter.capabilities; - - expect(caps.nativeFileSystem).toBe(false); - expect(caps.supportsBackgroundExecution).toBe(true); - expect(caps.supportsStreamingOutput).toBe(true); - expect(caps.supportsPauseResume).toBe(true); - expect(caps.supportsStreamingTransfer).toBe(true); - expect(caps.supportsBatchOperations).toBe(true); - expect(caps.supportsPermissions).toBe(true); - expect(caps.supportsSearch).toBe(true); - expect(caps.supportsRenews).toBe(true); - expect(caps.nativeHealthCheck).toBe(true); - expect(caps.nativeMetrics).toBe(true); - }); - - it('should have unique provider name', () => { - const adapter = new OpenSandboxAdapter(); - expect(adapter.provider).toBe('opensandbox'); - expect(adapter.provider).not.toBe('minimal'); - }); - }); - - describe('Wait Until Ready', () => { - it('should timeout when sandbox not ready', async () => { - const adapter = new OpenSandboxAdapter(); - - // Without proper initialization, should timeout or error - try { - await adapter.waitUntilReady(100); // Short timeout - } catch (error) { - // Expected to throw since sandbox not created - expect(error instanceof Error).toBe(true); - } - }); - }); - - describe('Runtime Configuration', () => { - it('should default to docker runtime', () => { - const adapter = new OpenSandboxAdapter(); - - expect(adapter.runtime).toBe('docker'); - }); - - it('should accept docker runtime explicitly', () => { - const adapter = new OpenSandboxAdapter({ runtime: 'docker' }); - - expect(adapter.runtime).toBe('docker'); - }); - - it('should accept kubernetes runtime', () => { - const adapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - expect(adapter.runtime).toBe('kubernetes'); - }); - - it('should have full capabilities in docker runtime', () => { - const adapter = new OpenSandboxAdapter({ runtime: 'docker' }); - - expect(adapter.capabilities.supportsPauseResume).toBe(true); - expect(adapter.capabilities.supportsBackgroundExecution).toBe(true); - expect(adapter.capabilities.supportsStreamingOutput).toBe(true); - }); - - it('should disable pause/resume in kubernetes runtime', () => { - const adapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - expect(adapter.capabilities.supportsPauseResume).toBe(false); - expect(adapter.capabilities.supportsBackgroundExecution).toBe(true); - expect(adapter.capabilities.supportsStreamingOutput).toBe(true); - }); - - it('should preserve common capabilities across runtimes', () => { - const dockerAdapter = new OpenSandboxAdapter({ runtime: 'docker' }); - const k8sAdapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - // Both should have these capabilities - const commonCapabilities = [ - 'nativeHealthCheck', - 'nativeMetrics', - 'supportsStreamingTransfer', - 'supportsBatchOperations', - 'supportsPermissions', - 'supportsSearch', - 'supportsRenews' - ] as const; - - for (const cap of commonCapabilities) { - expect(dockerAdapter.capabilities[cap]).toBe(true); - expect(k8sAdapter.capabilities[cap]).toBe(true); - } - }); - - it('should report correct provider for both runtimes', () => { - const dockerAdapter = new OpenSandboxAdapter({ runtime: 'docker' }); - const k8sAdapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - expect(dockerAdapter.provider).toBe('opensandbox'); - expect(k8sAdapter.provider).toBe('opensandbox'); - }); - - it('should handle runtime with other connection config options', () => { - const adapter = new OpenSandboxAdapter({ - baseUrl: 'https://api.example.com', - apiKey: 'test-api-key', - runtime: 'kubernetes' - }); - - expect(adapter.runtime).toBe('kubernetes'); - expect(adapter.connectionState).toBe('disconnected'); - }); - }); - - describe('Runtime State Transitions', () => { - it('should track runtime type independently of connection state', () => { - const dockerAdapter = new OpenSandboxAdapter({ runtime: 'docker' }); - const k8sAdapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - // Both start disconnected - expect(dockerAdapter.connectionState).toBe('disconnected'); - expect(k8sAdapter.connectionState).toBe('disconnected'); - - // Runtime types are preserved - expect(dockerAdapter.runtime).toBe('docker'); - expect(k8sAdapter.runtime).toBe('kubernetes'); - }); - - it('should maintain runtime through lifecycle operations', async () => { - const adapter = new OpenSandboxAdapter({ runtime: 'kubernetes' }); - - // Runtime is immutable - expect(adapter.runtime).toBe('kubernetes'); - - // After close, runtime should still be preserved - await adapter.close(); - expect(adapter.runtime).toBe('kubernetes'); - }); - }); -}); diff --git a/sdk/sandbox/tests/unit/factory/SandboxProviderFactory.test.ts b/sdk/sandbox/tests/unit/factory/SandboxProviderFactory.test.ts deleted file mode 100644 index adb55dedf9..0000000000 --- a/sdk/sandbox/tests/unit/factory/SandboxProviderFactory.test.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { MinimalProviderAdapter, OpenSandboxAdapter } from '../../../src/adapters'; -import { createSandbox, SandboxProviderFactory } from '../../../src/factory/SandboxProviderFactory'; - -describe('SandboxProviderFactory', () => { - describe('create', () => { - it('should create OpenSandbox adapter', () => { - const sandbox = SandboxProviderFactory.create({ - provider: 'opensandbox', - connection: { - baseUrl: 'http://localhost:8080', - apiKey: 'test-key' - } - }); - - expect(sandbox).toBeInstanceOf(OpenSandboxAdapter); - expect(sandbox.provider).toBe('opensandbox'); - expect(sandbox.capabilities.nativeFileSystem).toBe(false); - }); - - it('should create minimal provider adapter', () => { - const sandbox = SandboxProviderFactory.create({ - provider: 'minimal' - }); - - expect(sandbox).toBeInstanceOf(MinimalProviderAdapter); - expect(sandbox.provider).toBe('minimal'); - expect(sandbox.capabilities.nativeFileSystem).toBe(false); - }); - - it('should throw error for unknown provider', () => { - try { - SandboxProviderFactory.create({ - provider: 'unknown' - }); - expect(false).toBe(true); // Should not reach here - } catch (error) { - expect(error).toBeInstanceOf(Error); - expect((error as Error).message).toContain('Unknown provider'); - } - }); - }); - - describe('getAvailableProviders', () => { - it('should list available providers', () => { - const providers = SandboxProviderFactory.getAvailableProviders(); - - expect(providers).toContain('opensandbox'); - expect(providers).toContain('minimal'); - }); - }); - - describe('registerProvider', () => { - it('should allow registering custom providers', () => { - const customFactory = () => new MinimalProviderAdapter(); - - SandboxProviderFactory.registerProvider('custom', customFactory); - - const sandbox = SandboxProviderFactory.create({ - provider: 'custom' - }); - - expect(sandbox).toBeDefined(); - - // Should now be in available providers - const providers = SandboxProviderFactory.getAvailableProviders(); - expect(providers).toContain('custom'); - }); - }); -}); - -describe('createSandbox convenience function', () => { - it('should work as shorthand for factory.create', () => { - const sandbox = createSandbox({ - provider: 'opensandbox' - }); - - expect(sandbox).toBeInstanceOf(OpenSandboxAdapter); - }); -}); diff --git a/sdk/sandbox/tests/unit/polyfill/CommandPolyfillService.test.ts b/sdk/sandbox/tests/unit/polyfill/CommandPolyfillService.test.ts deleted file mode 100644 index 08ca7c3987..0000000000 --- a/sdk/sandbox/tests/unit/polyfill/CommandPolyfillService.test.ts +++ /dev/null @@ -1,332 +0,0 @@ -import { beforeEach, describe, expect, it } from 'vitest'; -import { FileOperationError } from '../../../src/errors'; -import { CommandPolyfillService } from '../../../src/polyfill/CommandPolyfillService'; -import { bytesToBase64 } from '../../../src/utils/base64'; -import { MockCommandExecution } from '../../mocks/MockCommandExecution'; - -describe('CommandPolyfillService', () => { - let mockExecutor: MockCommandExecution; - let polyfill: CommandPolyfillService; - - beforeEach(() => { - mockExecutor = new MockCommandExecution(); - polyfill = new CommandPolyfillService(mockExecutor); - }); - - describe('readFile', () => { - it('should read file via base64 encoding', async () => { - const expectedContent = 'Hello, World!'; - const base64Content = bytesToBase64(new TextEncoder().encode(expectedContent)); - - mockExecutor.mockCommand('cat "/test/file.txt" | base64 -w 0', { - stdout: base64Content, - stderr: '', - exitCode: 0 - }); - - const result = await polyfill.readFile('/test/file.txt'); - const decoded = new TextDecoder().decode(result); - - expect(decoded).toBe(expectedContent); - }); - - it('should throw FileOperationError for non-existent file', async () => { - mockExecutor.mockCommand('cat "/nonexistent" | base64 -w 0', { - stdout: '', - stderr: 'cat: /nonexistent: No such file or directory', - exitCode: 1 - }); - - try { - await polyfill.readFile('/nonexistent'); - expect(false).toBe(true); // Should not reach here - } catch (error) { - expect(error).toBeInstanceOf(FileOperationError); - expect((error as FileOperationError).fileErrorCode).toBe('FILE_NOT_FOUND'); - } - }); - - it('should throw FileOperationError for permission denied', async () => { - mockExecutor.mockCommand('cat "/secret" | base64 -w 0', { - stdout: '', - stderr: 'cat: /secret: Permission denied', - exitCode: 1 - }); - - try { - await polyfill.readFile('/secret'); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FileOperationError); - expect((error as FileOperationError).fileErrorCode).toBe('PERMISSION_DENIED'); - } - }); - }); - - describe('writeFile', () => { - it('should write file via base64 encoding', async () => { - const content = new TextEncoder().encode('Test content'); - const base64Content = bytesToBase64(content); - - mockExecutor.mockCommand('mkdir -p "/test"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - mockExecutor.mockCommand(`echo "${base64Content}" | base64 -d > "/test/output.txt"`, { - stdout: '', - stderr: '', - exitCode: 0 - }); - - const bytesWritten = await polyfill.writeFile('/test/output.txt', content); - expect(bytesWritten).toBe(content.length); - - const commands = mockExecutor.getExecutedCommands(); - expect(commands.some((c) => c.command.includes('mkdir -p'))).toBe(true); - expect(commands.some((c) => c.command.includes('base64 -d'))).toBe(true); - }); - - it('should write text file directly', async () => { - mockExecutor.mockCommand('mkdir -p "/test"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - mockExecutor.mockCommand('cat > "/test/text.txt" << \'POLYFILL_EOF\'', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - const bytesWritten = await polyfill.writeTextFile('/test/text.txt', 'Hello World'); - expect(bytesWritten).toBe(11); - }); - }); - - describe('deleteFiles', () => { - it('should delete multiple files', async () => { - mockExecutor.mockCommand('rm -f "/file1.txt"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - mockExecutor.mockCommand('rm -f "/file2.txt"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - const results = await polyfill.deleteFiles(['/file1.txt', '/file2.txt']); - - expect(results).toHaveLength(2); - expect(results[0].success).toBe(true); - expect(results[1].success).toBe(true); - }); - - it('should report failures for files that cannot be deleted', async () => { - mockExecutor.mockCommand('rm -f "/protected"', { - stdout: '', - stderr: 'rm: cannot remove', - exitCode: 1 - }); - - const results = await polyfill.deleteFiles(['/protected']); - - expect(results[0].success).toBe(false); - expect(results[0].error).toBeDefined(); - }); - }); - - describe('createDirectories', () => { - it('should create directories with mkdir -p', async () => { - mockExecutor.mockCommand('mkdir -p "/new/dir"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - await polyfill.createDirectories(['/new/dir']); - - const commands = mockExecutor.getExecutedCommands(); - expect(commands[0].command).toBe('mkdir -p "/new/dir"'); - }); - - it('should set permissions when specified', async () => { - mockExecutor.mockCommand('mkdir -p "/new/dir"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - mockExecutor.mockCommand('chmod 755 "/new/dir"', { - stdout: '', - stderr: '', - exitCode: 0 - }); - - await polyfill.createDirectories(['/new/dir'], { mode: 0o755 }); - - const commands = mockExecutor.getExecutedCommands(); - expect(commands.some((c) => c.command.includes('chmod 755'))).toBe(true); - }); - }); - - describe('listDirectory', () => { - it('should parse ls -la output', async () => { - const lsOutput = `total 12 -drwxr-xr-x 2 user group 4096 2024-01-15T10:30:00 . -drwxr-xr-x 3 user group 4096 2024-01-15T10:30:00 .. --rw-r--r-- 1 user group 123 2024-01-15T11:00:00 file.txt -drwxr-xr-x 2 user group 4096 2024-01-15T10:45:00 subdir`; - - mockExecutor.mockCommand( - 'ls -la "/test" --time-style=+"%Y-%m-%dT%H:%M:%S" 2>/dev/null || echo "DIRECTORY_NOT_FOUND"', - { - stdout: lsOutput, - stderr: '', - exitCode: 0 - } - ); - - const entries = await polyfill.listDirectory('/test'); - - expect(entries).toHaveLength(2); // . and .. excluded - expect(entries.find((e) => e.name === 'file.txt')?.isFile).toBe(true); - expect(entries.find((e) => e.name === 'subdir')?.isDirectory).toBe(true); - }); - - it('should throw FileOperationError for non-existent directory', async () => { - mockExecutor.mockCommand( - 'ls -la "/nonexistent" --time-style=+"%Y-%m-%dT%H:%M:%S" 2>/dev/null || echo "DIRECTORY_NOT_FOUND"', - { - stdout: 'DIRECTORY_NOT_FOUND', - stderr: '', - exitCode: 0 - } - ); - - try { - await polyfill.listDirectory('/nonexistent'); - expect(false).toBe(true); - } catch (error) { - expect(error).toBeInstanceOf(FileOperationError); - } - }); - }); - - describe('getFileInfo', () => { - it('should parse stat output', async () => { - // Format: size|mtime|ctime|mode|user|group|type - mockExecutor.mockCommand( - 'stat -c \'%s|%Y|%W|%a|%U|%G|%F\' "/test/file.txt" 2>/dev/null || echo "STAT_FAILED"', - { - stdout: '1234|1705312200|1705311000|644|user|group|regular file', - stderr: '', - exitCode: 0 - } - ); - - const info = await polyfill.getFileInfo(['/test/file.txt']); - - expect(info.has('/test/file.txt')).toBe(true); - const fileInfo = info.get('/test/file.txt'); - expect(fileInfo).toBeDefined(); - expect(fileInfo?.size).toBe(1234); - expect(fileInfo?.isFile).toBe(true); - expect(fileInfo?.mode).toBe(0o644); - }); - }); - - describe('search', () => { - it('should use find command for search', async () => { - mockExecutor.mockCommand( - 'find "/home" -name \'*.txt\' -print 2>/dev/null || echo "FIND_FAILED"', - { - stdout: '/home/file1.txt\n/home/file2.txt', - stderr: '', - exitCode: 0 - } - ); - - const results = await polyfill.search('*.txt', '/home'); - - expect(results).toHaveLength(2); - expect(results[0].path).toBe('/home/file1.txt'); - expect(results[1].path).toBe('/home/file2.txt'); - }); - - it('should fallback to ls + grep if find not available', async () => { - mockExecutor.mockCommand( - 'find "/home" -name \'*.log\' -print 2>/dev/null || echo "FIND_FAILED"', - { - stdout: 'FIND_FAILED', - stderr: '', - exitCode: 0 - } - ); - - mockExecutor.mockCommand('ls -R "/home" 2>/dev/null | grep -E "*.log" || true', { - stdout: 'app.log\nerror.log', - stderr: '', - exitCode: 0 - }); - - const results = await polyfill.search('*.log', '/home'); - - expect(results.length).toBeGreaterThan(0); - }); - }); - - describe('ping', () => { - it('should return true for healthy sandbox', async () => { - mockExecutor.mockCommand('echo "PING"', { - stdout: 'PING', - stderr: '', - exitCode: 0 - }); - - const result = await polyfill.ping(); - expect(result).toBe(true); - }); - - it('should return false for unhealthy sandbox', async () => { - mockExecutor.mockCommand('echo "PING"', { - stdout: '', - stderr: 'Connection refused', - exitCode: 1 - }); - - const result = await polyfill.ping(); - expect(result).toBe(false); - }); - }); - - describe('getMetrics', () => { - it('should parse /proc information', async () => { - mockExecutor.mockCommand('nproc 2>/dev/null || echo "1"', { - stdout: '4', - stderr: '', - exitCode: 0 - }); - - mockExecutor.mockCommand('cat /proc/meminfo 2>/dev/null || echo "FAILED"', { - stdout: `MemTotal: 8192000 kB -MemFree: 4096000 kB -MemAvailable: 6144000 kB`, - stderr: '', - exitCode: 0 - }); - - const metrics = await polyfill.getMetrics(); - - expect(metrics.cpuCount).toBe(4); - expect(metrics.memoryTotalMiB).toBe(8000); - expect(metrics.memoryUsedMiB).toBe(2000); - expect(metrics.timestamp).toBeGreaterThan(0); - }); - }); -}); diff --git a/sdk/sandbox/tsconfig.json b/sdk/sandbox/tsconfig.json deleted file mode 100644 index acd142a756..0000000000 --- a/sdk/sandbox/tsconfig.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "compilerOptions": { - "target": "esnext", - "module": "esnext", - "moduleResolution": "bundler", - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "forceConsistentCasingInFileNames": true, - "noEmit": true, - "esModuleInterop": true, - "resolveJsonModule": true, - "isolatedModules": true - } -} diff --git a/sdk/sandbox/vitest.config.mts b/sdk/sandbox/vitest.config.mts deleted file mode 100644 index 05d795b8ae..0000000000 --- a/sdk/sandbox/vitest.config.mts +++ /dev/null @@ -1,10 +0,0 @@ -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - test: { - // This configuration runs the sandbox tests in isolation, - // without the global setup (e.g., MongoDB connection) from the root config. - dir: 'tests', - testTimeout: 30000, - }, -}); \ No newline at end of file diff --git a/test/integrationTest/vectorDB/.env.test.tempalte b/test/.env.test.tempalte similarity index 100% rename from test/integrationTest/vectorDB/.env.test.tempalte rename to test/.env.test.tempalte diff --git a/test/integrationTest/vectorDB/globalSetup.ts b/test/integrationTest/vectorDB/globalSetup.ts deleted file mode 100644 index a50e6e8b9f..0000000000 --- a/test/integrationTest/vectorDB/globalSetup.ts +++ /dev/null @@ -1,12 +0,0 @@ -// Load vector database environment variables before tests run -export default async function setup() { - console.log('Vector DB integration tests - environment loaded'); - console.log('PG_URL configured:', Boolean(process.env.PG_URL)); - console.log('OCEANBASE_URL configured:', Boolean(process.env.OCEANBASE_URL)); - console.log('MILVUS_ADDRESS configured:', Boolean(process.env.MILVUS_ADDRESS)); - console.log('SEEKDB_URL configured:', Boolean(process.env.SEEKDB_URL)); - - return async () => { - // Cleanup if needed - }; -} diff --git a/test/integrationTest/vectorDB/milvus/index.integration.test.ts b/test/integrationTest/vectorDB/milvus/index.integration.test.ts index 66498de7a0..c234951c5a 100644 --- a/test/integrationTest/vectorDB/milvus/index.integration.test.ts +++ b/test/integrationTest/vectorDB/milvus/index.integration.test.ts @@ -1,11 +1,15 @@ -import { describe } from 'vitest'; -import { MilvusCtrl } from '@fastgpt/service/common/vectorDB/milvus'; +import { describe, vi } from 'vitest'; import { createVectorDBTestSuite } from '../testSuites'; -const isEnabled = Boolean(process.env.MILVUS_ADDRESS); -const describePg = isEnabled ? describe : describe.skip; +// Unmock vector controllers for integration tests +vi.unmock('@fastgpt/service/common/vectorDB/milvus'); +vi.unmock('@fastgpt/service/common/vectorDB/constants'); -describePg('Milvus Vector Integration', () => { +import { MilvusCtrl } from '@fastgpt/service/common/vectorDB/milvus'; + +const isEnabled = Boolean(process.env.MILVUS_ADDRESS); + +describe.skipIf(!isEnabled)('Milvus Vector Integration', () => { const vectorCtrl = new MilvusCtrl(); createVectorDBTestSuite(vectorCtrl); }); diff --git a/test/integrationTest/vectorDB/oceanbase/index.integration.test.ts b/test/integrationTest/vectorDB/oceanbase/index.integration.test.ts index 3c2f98cec0..8b5e659547 100644 --- a/test/integrationTest/vectorDB/oceanbase/index.integration.test.ts +++ b/test/integrationTest/vectorDB/oceanbase/index.integration.test.ts @@ -1,11 +1,15 @@ -import { describe } from 'vitest'; -import { ObVectorCtrl } from '@fastgpt/service/common/vectorDB/oceanbase'; +import { describe, vi } from 'vitest'; import { createVectorDBTestSuite } from '../testSuites'; -const isEnabled = Boolean(process.env.OCEANBASE_URL); -const describePg = isEnabled ? describe : describe.skip; +// Unmock vector controllers for integration tests +vi.unmock('@fastgpt/service/common/vectorDB/oceanbase'); +vi.unmock('@fastgpt/service/common/vectorDB/constants'); -describePg('Oceanbase Vector Integration', () => { +import { ObVectorCtrl } from '@fastgpt/service/common/vectorDB/oceanbase'; + +const isEnabled = Boolean(process.env.OCEANBASE_URL); + +describe.skipIf(!isEnabled)('Oceanbase Vector Integration', () => { const vectorCtrl = new ObVectorCtrl({ type: 'oceanbase' }); createVectorDBTestSuite(vectorCtrl); }); diff --git a/test/integrationTest/vectorDB/pg/index.integration.test.ts b/test/integrationTest/vectorDB/pg/index.integration.test.ts index 81218d09a7..864cd01218 100644 --- a/test/integrationTest/vectorDB/pg/index.integration.test.ts +++ b/test/integrationTest/vectorDB/pg/index.integration.test.ts @@ -1,11 +1,15 @@ -import { describe } from 'vitest'; -import { PgVectorCtrl } from '@fastgpt/service/common/vectorDB/pg'; +import { describe, vi } from 'vitest'; import { createVectorDBTestSuite } from '../testSuites'; -const isEnabled = Boolean(process.env.PG_URL); -const describePg = isEnabled ? describe : describe.skip; +// Unmock vector controllers for integration tests +vi.unmock('@fastgpt/service/common/vectorDB/pg'); +vi.unmock('@fastgpt/service/common/vectorDB/constants'); -describePg('PG Vector Integration', () => { +import { PgVectorCtrl } from '@fastgpt/service/common/vectorDB/pg'; + +const isEnabled = Boolean(process.env.PG_URL); + +describe.skipIf(!isEnabled)('PG Vector Integration', () => { const vectorCtrl = new PgVectorCtrl(); createVectorDBTestSuite(vectorCtrl); }); diff --git a/test/integrationTest/vectorDB/seekdb/index.integration.test.ts b/test/integrationTest/vectorDB/seekdb/index.integration.test.ts index c1ef238a21..b66b9b33cd 100644 --- a/test/integrationTest/vectorDB/seekdb/index.integration.test.ts +++ b/test/integrationTest/vectorDB/seekdb/index.integration.test.ts @@ -1,11 +1,15 @@ -import { describe } from 'vitest'; -import { SeekVectorCtrl } from '@fastgpt/service/common/vectorDB/seekdb'; +import { describe, vi } from 'vitest'; import { createVectorDBTestSuite } from '../testSuites'; -const isEnabled = Boolean(process.env.SEEKDB_URL); -const describePg = isEnabled ? describe : describe.skip; +// Unmock vector controllers for integration tests +vi.unmock('@fastgpt/service/common/vectorDB/oceanbase'); +vi.unmock('@fastgpt/service/common/vectorDB/constants'); -describePg('Seekdb Vector Integration', () => { +import { SeekVectorCtrl } from '@fastgpt/service/common/vectorDB/seekdb'; + +const isEnabled = Boolean(process.env.SEEKDB_URL); + +describe.skipIf(!isEnabled)('Seekdb Vector Integration', () => { const vectorCtrl = new SeekVectorCtrl({ type: 'seekdb' }); createVectorDBTestSuite(vectorCtrl); }); diff --git a/test/integrationTest/vectorDB/setup.ts b/test/integrationTest/vectorDB/setup.ts deleted file mode 100644 index bf065576f9..0000000000 --- a/test/integrationTest/vectorDB/setup.ts +++ /dev/null @@ -1,4 +0,0 @@ -import { loadVectorDBEnv } from './utils'; - -// Load env before any modules that read process.env -loadVectorDBEnv({ envFileNames: ['.env.test.local'] }); diff --git a/test/integrationTest/vectorDB/vitest.config.mts b/test/integrationTest/vectorDB/vitest.config.mts deleted file mode 100644 index d80302bac8..0000000000 --- a/test/integrationTest/vectorDB/vitest.config.mts +++ /dev/null @@ -1,23 +0,0 @@ -import { resolve } from 'path'; -import { defineConfig } from 'vitest/config'; - -export default defineConfig({ - resolve: { - alias: { - '@': resolve(__dirname, '../../../projects/app/src'), - '@fastgpt': resolve(__dirname, '../../../packages'), - '@test': resolve(__dirname, '../..') - } - }, - test: { - name: 'vectorDB', - root: resolve(__dirname), - setupFiles: './setup.ts', - include: ['**/*.test.ts'], - exclude: ['node_modules', 'dist'], - testTimeout: 60000, - hookTimeout: 60000, - fileParallelism: false, - reporters: ['verbose'] - } -}); diff --git a/test/setup.ts b/test/setup.ts index 2581e2b809..81b5634850 100644 --- a/test/setup.ts +++ b/test/setup.ts @@ -8,9 +8,12 @@ import setupModels from './setupModels'; import { clean } from './datas/users'; import { connectionLogMongo, connectionMongo } from '@fastgpt/service/common/mongo'; import { delay } from '@fastgpt/global/common/system/utils'; +import { loadVectorDBEnv } from './utils/env'; vi.stubEnv('NODE_ENV', 'test'); +loadVectorDBEnv({ envFileNames: ['.env.test.local'] }); + beforeAll(async () => { vi.stubEnv('MONGODB_URI', inject('MONGODB_URI')); await connectMongo({ db: connectionMongo, url: inject('MONGODB_URI') }); diff --git a/test/integrationTest/vectorDB/utils.ts b/test/utils/env.ts similarity index 90% rename from test/integrationTest/vectorDB/utils.ts rename to test/utils/env.ts index 76196acf9e..da49c037e3 100644 --- a/test/integrationTest/vectorDB/utils.ts +++ b/test/utils/env.ts @@ -26,7 +26,8 @@ const parseEnvFile = (filePath: string) => { export const loadVectorDBEnv = (options: LoadVectorEnvOptions = {}) => { const envFileNames = options.envFileNames ?? ['.env.test.local']; - const baseDir = resolve(__dirname); + // __dirname is test/utils/, go up one level to test/ + const baseDir = resolve(__dirname, '..'); for (const envFileName of envFileNames) { const filePath = resolve(baseDir, envFileName); diff --git a/vitest.config.mts b/vitest.config.mts index 7514b8252b..8b31494fe2 100644 --- a/vitest.config.mts +++ b/vitest.config.mts @@ -26,13 +26,11 @@ export default defineConfig({ maxConcurrency: 10, pool: 'threads', include: [ - 'test/test.ts', - 'test/cases/**/*.test.ts', + 'test/**/*.test.ts', 'projects/app/test/**/*.test.ts', 'projects/sandbox/test/**/*.test.ts', 'projects/marketplace/test/**/*.test.ts' ], - exclude: ['test/vectorDB/**'], testTimeout: 20000, hookTimeout: 30000, reporters: ['github-actions', 'default']