diff --git a/bun.lock b/bun.lock index f2cc38f1..f9588ab6 100644 --- a/bun.lock +++ b/bun.lock @@ -4,26 +4,23 @@ "": { "name": "dockstatapi", "dependencies": { - "@elysiajs/server-timing": "^1.3.0", - "@elysiajs/static": "^1.3.0", - "@elysiajs/swagger": "^1.3.0", "chalk": "^5.4.1", "date-fns": "^4.1.0", "docker-compose": "^1.2.0", - "dockerode": "^4.0.6", - "elysia": "latest", - "elysia-remote-dts": "^1.0.2", + "dockerode": "^4.0.7", + "js-yaml": "^4.1.0", "knip": "latest", - "logestic": "^1.2.4", "split2": "^4.2.0", "winston": "^3.17.0", - "yaml": "^2.7.1", + "yaml": "^2.8.0", }, "devDependencies": { "@biomejs/biome": "1.9.4", + "@its_4_nik/gitai": "^1.1.14", "@types/bun": "latest", - "@types/dockerode": "^3.3.38", - "@types/node": "^22.15.17", + "@types/dockerode": "^3.3.42", + "@types/js-yaml": "^4.0.9", + "@types/node": "^22.16.0", "@types/split2": "^4.2.3", "bun-types": "latest", "cross-env": "^7.0.3", @@ -61,24 +58,100 @@ "@dabh/diagnostics": ["@dabh/diagnostics@2.0.3", "", { "dependencies": { "colorspace": "1.1.x", "enabled": "2.0.x", "kuler": "^2.0.0" } }, "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA=="], - "@elysiajs/server-timing": ["@elysiajs/server-timing@1.3.0", "", { "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-c5Ay0Va7gIWjJ9CawHx05UtKP6UQVkMKCFnf16eBG0G/GgUkrMMGHWD/duCBaDbeRwbbb7IwHDoaFvStWrB2IQ=="], + "@emnapi/core": ["@emnapi/core@1.4.3", "", { "dependencies": { "@emnapi/wasi-threads": "1.0.2", "tslib": "^2.4.0" } }, "sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g=="], - "@elysiajs/static": ["@elysiajs/static@1.3.0", "", { "dependencies": { "node-cache": "^5.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-7mWlj2U/AZvH27IfRKqpUjDP1W9ZRldF9NmdnatFEtx0AOy7YYgyk0rt5hXrH6wPcR//2gO2Qy+k5rwswpEhJA=="], + "@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="], - "@elysiajs/swagger": ["@elysiajs/swagger@1.3.0", "", { "dependencies": { "@scalar/themes": "^0.9.52", "@scalar/types": "^0.0.12", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.3.0" } }, "sha512-0fo3FWkDRPNYpowJvLz3jBHe9bFe6gruZUyf+feKvUEEMG9ZHptO1jolSoPE0ffFw1BgN1/wMsP19p4GRXKdfg=="], + "@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.0.2", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA=="], - "@grpc/grpc-js": ["@grpc/grpc-js@1.13.3", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-FTXHdOoPbZrBjlVLHuKbDZnsTxXv2BlHF57xw6LuThXacXvtkahEPED0CKMk6obZDf65Hv4k3z62eyPNpvinIg=="], + "@google/generative-ai": ["@google/generative-ai@0.24.1", "", {}, "sha512-MqO+MLfM6kjxcKoy0p1wRzG3b4ZZXtPI+z2IE26UogS2Cm/XHO+7gGRBh6gcJsOiIVoH93UwKvW4HdgiOZCy9Q=="], + + "@grpc/grpc-js": ["@grpc/grpc-js@1.13.4", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg=="], "@grpc/proto-loader": ["@grpc/proto-loader@0.7.15", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ=="], + "@inquirer/checkbox": ["@inquirer/checkbox@4.1.9", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-DBJBkzI5Wx4jFaYm221LHvAhpKYkhVS0k9plqHwaHhofGNxvYB7J3Bz8w+bFJ05zaMb0sZNHo4KdmENQFlNTuQ=="], + + "@inquirer/confirm": ["@inquirer/confirm@5.1.13", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-EkCtvp67ICIVVzjsquUiVSd+V5HRGOGQfsqA4E4vMWhYnB7InUL0pa0TIWt1i+OfP16Gkds8CdIu6yGZwOM1Yw=="], + + "@inquirer/core": ["@inquirer/core@10.1.14", "", { "dependencies": { "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "cli-width": "^4.1.0", "mute-stream": "^2.0.0", "signal-exit": "^4.1.0", "wrap-ansi": "^6.2.0", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-Ma+ZpOJPewtIYl6HZHZckeX1STvDnHTCB2GVINNUlSEn2Am6LddWwfPkIGY0IUFVjUUrr/93XlBwTK6mfLjf0A=="], + + "@inquirer/editor": ["@inquirer/editor@4.2.14", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7", "external-editor": "^3.1.0" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-yd2qtLl4QIIax9DTMZ1ZN2pFrrj+yL3kgIWxm34SS6uwCr0sIhsNyudUjAo5q3TqI03xx4SEBkUJqZuAInp9uA=="], + + "@inquirer/expand": ["@inquirer/expand@4.0.16", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-oiDqafWzMtofeJyyGkb1CTPaxUkjIcSxePHHQCfif8t3HV9pHcw1Kgdw3/uGpDvaFfeTluwQtWiqzPVjAqS3zA=="], + + "@inquirer/figures": ["@inquirer/figures@1.0.12", "", {}, "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ=="], + + "@inquirer/input": ["@inquirer/input@4.2.0", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-opqpHPB1NjAmDISi3uvZOTrjEEU5CWVu/HBkDby8t93+6UxYX0Z7Ps0Ltjm5sZiEbWenjubwUkivAEYQmy9xHw=="], + + "@inquirer/number": ["@inquirer/number@3.0.16", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-kMrXAaKGavBEoBYUCgualbwA9jWUx2TjMA46ek+pEKy38+LFpL9QHlTd8PO2kWPUgI/KB+qi02o4y2rwXbzr3Q=="], + + "@inquirer/password": ["@inquirer/password@4.0.16", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-g8BVNBj5Zeb5/Y3cSN+hDUL7CsIFDIuVxb9EPty3lkxBaYpjL5BNRKSYOF9yOLe+JOcKFd+TSVeADQ4iSY7rbg=="], + + "@inquirer/prompts": ["@inquirer/prompts@7.6.0", "", { "dependencies": { "@inquirer/checkbox": "^4.1.9", "@inquirer/confirm": "^5.1.13", "@inquirer/editor": "^4.2.14", "@inquirer/expand": "^4.0.16", "@inquirer/input": "^4.2.0", "@inquirer/number": "^3.0.16", "@inquirer/password": "^4.0.16", "@inquirer/rawlist": "^4.1.4", "@inquirer/search": "^3.0.16", "@inquirer/select": "^4.2.4" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-jAhL7tyMxB3Gfwn4HIJ0yuJ5pvcB5maYUcouGcgd/ub79f9MqZ+aVnBtuFf+VC2GTkCBF+R+eo7Vi63w5VZlzw=="], + + "@inquirer/rawlist": ["@inquirer/rawlist@4.1.4", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-5GGvxVpXXMmfZNtvWw4IsHpR7RzqAR624xtkPd1NxxlV5M+pShMqzL4oRddRkg8rVEOK9fKdJp1jjVML2Lr7TQ=="], + + "@inquirer/search": ["@inquirer/search@3.0.16", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-POCmXo+j97kTGU6aeRjsPyuCpQQfKcMXdeTMw708ZMtWrj5aykZvlUxH4Qgz3+Y1L/cAVZsSpA+UgZCu2GMOMg=="], + + "@inquirer/select": ["@inquirer/select@4.2.4", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-unTppUcTjmnbl/q+h8XeQDhAqIOmwWYWNyiiP2e3orXrg6tOaa5DHXja9PChCSbChOsktyKgOieRZFnajzxoBg=="], + + "@inquirer/type": ["@inquirer/type@3.0.7", "", { "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA=="], + + "@its_4_nik/gitai": ["@its_4_nik/gitai@1.1.14", "", { "dependencies": { "@google/generative-ai": "^0.24.1", "commander": "^14.0.0", "ignore": "^7.0.5", "inquirer": "^12.6.3", "ollama": "^0.5.16" }, "peerDependencies": { "typescript": "^5.8.3" }, "bin": { "gitai": "dist/gitai.js" } }, "sha512-vpZnCWtgMcfqPNpkjOpEG3+dEr+t87C0wlH+FOiHDiLVw2ebZir9QJiw7yOl75hhkxHqXVDnluj6U0e3yAfzqA=="], + "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], + "@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.11", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.9.0" } }, "sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA=="], + "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + "@oxc-resolver/binding-darwin-arm64": ["@oxc-resolver/binding-darwin-arm64@11.5.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-IQZZP6xjGvVNbXVPEwZeCDTkG7iajFsVZSaq7QwxuiJqkcE/GKd0GxGQMs6jjE72nrgSGVHQD/yws1PNzP9j5w=="], + + "@oxc-resolver/binding-darwin-x64": ["@oxc-resolver/binding-darwin-x64@11.5.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-nY15IBY5NjOPKIDRJ2sSLr0GThFXz4J4lgIo4fmnXanJjeeXaM5aCOL3oIxT7RbONqyMki0lzMkbX7PWqW3/lw=="], + + "@oxc-resolver/binding-freebsd-x64": ["@oxc-resolver/binding-freebsd-x64@11.5.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-WQibNtsWiJZ36Q2QKYSedN6c4xoZtLhU7UOFPGTMaw/J8eb+WYh5pfzTtZR9WGZQRoS3kj0E/9683Wuskz5mMQ=="], + + "@oxc-resolver/binding-linux-arm-gnueabihf": ["@oxc-resolver/binding-linux-arm-gnueabihf@11.5.0", "", { "os": "linux", "cpu": "arm" }, "sha512-oZj20OTnjGn1qnBGYTjRXEMyd0inlw127s+DTC+Y0kdxoz5BUMqUhq5M9mZ1BH4c1qPlRto6shOFVrK4hNkhhA=="], + + "@oxc-resolver/binding-linux-arm64-gnu": ["@oxc-resolver/binding-linux-arm64-gnu@11.5.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-zxFuO4Btd1BSFjuaO0mnIA9XRWP4FX3bTbVO9KjKvO8MX6Ig2+ZDNHpzzK2zkOunHGc4sJQm5oDTcMvww+hyag=="], + + "@oxc-resolver/binding-linux-arm64-musl": ["@oxc-resolver/binding-linux-arm64-musl@11.5.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-mmDNrt2yyEnsPrmq3wzRsqEYM+cpVuv8itgYU++BNJrfzdJpK+OpvR3rPToTZSOZQt3iYLfqQ2hauIIraJnJGw=="], + + "@oxc-resolver/binding-linux-riscv64-gnu": ["@oxc-resolver/binding-linux-riscv64-gnu@11.5.0", "", { "os": "linux", "cpu": "none" }, "sha512-CxW3/uVUlSpIEJ3sLi5Q+lk7SVgQoxUKBTsMwpY2nFiCmtzHBOuwMMKES1Hk+w/Eirz09gDjoIrxkzg3ETDSGQ=="], + + "@oxc-resolver/binding-linux-s390x-gnu": ["@oxc-resolver/binding-linux-s390x-gnu@11.5.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-RxfVqJnmO7uEGzpEgEzVb5Sxjy8NAYpQj+7JZZunxIyJiDK1KgOJqVJ0NZnRC1UAe/yyEpO82wQIOInaLqFBgA=="], + + "@oxc-resolver/binding-linux-x64-gnu": ["@oxc-resolver/binding-linux-x64-gnu@11.5.0", "", { "os": "linux", "cpu": "x64" }, "sha512-Ri36HuV91PVXFw1BpTisJOZ2x9dkfgsvrjVa3lPX+QS6QRvvcdogGjPTTqgg8WkzCh6RTzd7Lx9mCZQdw06HTQ=="], + + "@oxc-resolver/binding-linux-x64-musl": ["@oxc-resolver/binding-linux-x64-musl@11.5.0", "", { "os": "linux", "cpu": "x64" }, "sha512-xskd2J4Jnfuze2jYKiZx4J+PY4hJ5Z0MuVh8JPNvu/FY1+SAdRei9S95dhc399Nw6eINre7xOrsugr11td3k4Q=="], + + "@oxc-resolver/binding-wasm32-wasi": ["@oxc-resolver/binding-wasm32-wasi@11.5.0", "", { "dependencies": { "@napi-rs/wasm-runtime": "^0.2.11" }, "cpu": "none" }, "sha512-ZAHTs0MzHUlHAqKffvutprVhO7OlENWisu1fW/bVY6r+TPxsl25Q0lzbOUhrxTIJ9f0Sl5meCI2fkPeovZA7bQ=="], + + "@oxc-resolver/binding-win32-arm64-msvc": ["@oxc-resolver/binding-win32-arm64-msvc@11.5.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-4/3RJnkrKo7EbBdWAYsSHZEjgZ8TYYAt/HrHDo5yy/5dUvxvPoetNtAudCiYKNgJOlFLzmzIXyn713MljEy6RA=="], + + "@oxc-resolver/binding-win32-x64-msvc": ["@oxc-resolver/binding-win32-x64-msvc@11.5.0", "", { "os": "win32", "cpu": "x64" }, "sha512-poXrxQLJA770Xy3gAS9mrC/dp6GatYdvNlwCWwjL6lzBNToEK66kx3tgqIaOYIqtjJDKYR58P3jWgmwJyJxEAQ=="], + + "@oxlint/darwin-arm64": ["@oxlint/darwin-arm64@1.6.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-m3wyqBh1TOHjpr/dXeIZY7OoX+MQazb+bMHQdDtwUvefrafUx+5YHRvulYh1sZSQ449nQ3nk3qj5qj535vZRjg=="], + + "@oxlint/darwin-x64": ["@oxlint/darwin-x64@1.6.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-75fJfF/9xNypr7cnOYoZBhfmG1yP7ex3pUOeYGakmtZRffO9z1i1quLYhjZsmaDXsAIZ3drMhenYHMmFKS3SRg=="], + + "@oxlint/linux-arm64-gnu": ["@oxlint/linux-arm64-gnu@1.6.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-YhXGf0FXa72bEt4F7eTVKx5X3zWpbAOPnaA/dZ6/g8tGhw1m9IFjrabVHFjzcx3dQny4MgA59EhyElkDvpUe8A=="], + + "@oxlint/linux-arm64-musl": ["@oxlint/linux-arm64-musl@1.6.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-T3JDhx8mjGjvh5INsPZJrlKHmZsecgDYvtvussKRdkc1Nnn7WC+jH9sh5qlmYvwzvmetlPVNezAoNvmGO9vtMg=="], + + "@oxlint/linux-x64-gnu": ["@oxlint/linux-x64-gnu@1.6.0", "", { "os": "linux", "cpu": "x64" }, "sha512-Dx7ghtAl8aXBdqofJpi338At6lkeCtTfoinTYQXd9/TEJx+f+zCGNlQO6nJz3ydJBX48FDuOFKkNC+lUlWrd8w=="], + + "@oxlint/linux-x64-musl": ["@oxlint/linux-x64-musl@1.6.0", "", { "os": "linux", "cpu": "x64" }, "sha512-7KvMGdWmAZtAtg6IjoEJHKxTXdAcrHnUnqfgs0JpXst7trquV2mxBeRZusQXwxpu4HCSomKMvJfsp1qKaqSFDg=="], + + "@oxlint/win32-arm64": ["@oxlint/win32-arm64@1.6.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-iSGC9RwX+dl7o5KFr5aH7Gq3nFbkq/3Gda6mxNPMvNkWrgXdIyiINxpyD8hJu566M+QSv1wEAu934BZotFDyoQ=="], + + "@oxlint/win32-x64": ["@oxlint/win32-x64@1.6.0", "", { "os": "win32", "cpu": "x64" }, "sha512-jOj3L/gfLc0IwgOTkZMiZ5c673i/hbAmidlaylT0gE6H18hln9HxPgp5GCf4E4y6mwEJlW8QC5hQi221+9otdA=="], + "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="], "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="], @@ -99,25 +172,19 @@ "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="], - "@scalar/openapi-types": ["@scalar/openapi-types@0.1.1", "", {}, "sha512-NMy3QNk6ytcCoPUGJH0t4NNr36OWXgZhA3ormr3TvhX1NDgoF95wFyodGVH8xiHeUyn2/FxtETm8UBLbB5xEmg=="], - - "@scalar/themes": ["@scalar/themes@0.9.86", "", { "dependencies": { "@scalar/types": "0.1.7" } }, "sha512-QUHo9g5oSWi+0Lm1vJY9TaMZRau8LHg+vte7q5BVTBnu6NuQfigCaN+ouQ73FqIVd96TwMO6Db+dilK1B+9row=="], - - "@scalar/types": ["@scalar/types@0.0.12", "", { "dependencies": { "@scalar/openapi-types": "0.1.1", "@unhead/schema": "^1.9.5" } }, "sha512-XYZ36lSEx87i4gDqopQlGCOkdIITHHEvgkuJFrXFATQs9zHARop0PN0g4RZYWj+ZpCUclOcaOjbCt8JGe22mnQ=="], - - "@sinclair/typebox": ["@sinclair/typebox@0.34.33", "", {}, "sha512-5HAV9exOMcXRUxo+9iYB5n09XxzCXnfy4VTNW4xnDv+FgjzAGY989C28BIdljKqmF+ZltUwujE3aossvcVtq6g=="], + "@tybys/wasm-util": ["@tybys/wasm-util@0.9.0", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw=="], - "@tokenizer/inflate": ["@tokenizer/inflate@0.2.7", "", { "dependencies": { "debug": "^4.4.0", "fflate": "^0.8.2", "token-types": "^6.0.0" } }, "sha512-MADQgmZT1eKjp06jpI2yozxaU9uVs4GzzgSL+uEq7bVcJ9V1ZXQkeGNql1fsSI0gMy1vhvNTNbUqrx+pZfJVmg=="], + "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="], - "@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="], + "@types/docker-modem": ["@types/docker-modem@3.0.6", "", { "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg=="], - "@types/bun": ["@types/bun@1.2.13", "", { "dependencies": { "bun-types": "1.2.13" } }, "sha512-u6vXep/i9VBxoJl3GjZsl/BFIsvML8DfVDO0RYLEwtSZSp981kEO1V5NwRcO1CPJ7AmvpbnDCiMKo3JvbDEjAg=="], + "@types/dockerode": ["@types/dockerode@3.3.42", "", { "dependencies": { "@types/docker-modem": "*", "@types/node": "*", "@types/ssh2": "*" } }, "sha512-U1jqHMShibMEWHdxYhj3rCMNCiLx5f35i4e3CEUuW+JSSszc/tVqc6WCAPdhwBymG5R/vgbcceagK0St7Cq6Eg=="], - "@types/docker-modem": ["@types/docker-modem@3.0.6", "", { "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg=="], + "@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="], - "@types/dockerode": ["@types/dockerode@3.3.38", "", { "dependencies": { "@types/docker-modem": "*", "@types/node": "*", "@types/ssh2": "*" } }, "sha512-nnrcfUe2iR+RyOuz0B4bZgQwD9djQa9ADEjp7OAgBs10pYT0KSCtplJjcmBDJz0qaReX5T7GbE5i4VplvzUHvA=="], + "@types/node": ["@types/node@22.16.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-B2egV9wALML1JCpv3VQoQ+yesQKAmNMBIAY7OteVrikcOcAkWm+dGL6qpeCktPjAv6N1JLnhbNiqS35UpFyBsQ=="], - "@types/node": ["@types/node@22.15.17", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw=="], + "@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="], "@types/split2": ["@types/split2@4.2.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-59OXIlfUsi2k++H6CHgUQKEb2HKRokUA39HY1i1dS8/AIcqVjtAAFdf8u+HxTWK/4FUHMJQlKSZ4I6irCBJ1Zw=="], @@ -125,7 +192,7 @@ "@types/triple-beam": ["@types/triple-beam@1.3.5", "", {}, "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw=="], - "@unhead/schema": ["@unhead/schema@1.11.20", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-0zWykKAaJdm+/Y7yi/Yds20PrUK7XabLe9c3IRcjnwYmSWY6z0Cr19VIs3ozCj8P+GhR+/TI2mwtGlueCEYouA=="], + "ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], "ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], @@ -149,15 +216,17 @@ "buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="], - "bun-types": ["bun-types@1.2.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-rRjA1T6n7wto4gxhAO/ErZEtOXyEZEmnIHQfl0Dt1QQSB4QV0iP6BZ9/YB5fZaHFQ2dwHFrmPaRQ9GGMX01k9Q=="], + "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="], "chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="], + "chardet": ["chardet@0.7.0", "", {}, "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA=="], + "chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], - "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], + "cli-width": ["cli-width@4.1.0", "", {}, "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ=="], - "clone": ["clone@2.1.2", "", {}, "sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w=="], + "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], "color": ["color@3.2.1", "", { "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" } }, "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA=="], @@ -169,7 +238,7 @@ "colorspace": ["colorspace@1.1.4", "", { "dependencies": { "color": "^3.1.3", "text-hex": "1.0.x" } }, "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w=="], - "cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="], + "commander": ["commander@14.0.0", "", {}, "sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA=="], "cpu-features": ["cpu-features@0.0.10", "", { "dependencies": { "buildcheck": "~0.0.6", "nan": "^2.19.0" } }, "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA=="], @@ -177,51 +246,41 @@ "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], + "csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="], + "date-fns": ["date-fns@4.1.0", "", {}, "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg=="], - "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="], + "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], "docker-compose": ["docker-compose@1.2.0", "", { "dependencies": { "yaml": "^2.2.2" } }, "sha512-wIU1eHk3Op7dFgELRdmOYlPYS4gP8HhH1ZmZa13QZF59y0fblzFDFmKPhyc05phCy2hze9OEvNZAsoljrs+72w=="], "docker-modem": ["docker-modem@5.0.6", "", { "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", "ssh2": "^1.15.0" } }, "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ=="], - "dockerode": ["dockerode@4.0.6", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", "tar-fs": "~2.1.2", "uuid": "^10.0.0" } }, "sha512-FbVf3Z8fY/kALB9s+P9epCpWhfi/r0N2DgYYcYpsAUlaTxPjdsitsFobnltb+lyCgAIvf9C+4PSWlTnHlJMf1w=="], - - "elysia": ["elysia@1.3.1", "", { "dependencies": { "cookie": "^1.0.2", "exact-mirror": "0.1.2", "fast-decode-uri-component": "^1.0.1" }, "optionalDependencies": { "@sinclair/typebox": "^0.34.33", "openapi-types": "^12.1.3" }, "peerDependencies": { "file-type": ">= 20.0.0", "typescript": ">= 5.0.0" } }, "sha512-En41P6cDHcHtQ0nvfsn9ayB+8ahQJqG1nzvPX8FVZjOriFK/RtZPQBtXMfZDq/AsVIk7JFZGFEtAVEmztNJVhQ=="], - - "elysia-remote-dts": ["elysia-remote-dts@1.0.2", "", { "dependencies": { "debug": "4.4.0", "get-tsconfig": "4.10.0" }, "peerDependencies": { "elysia": ">= 1.0.0", "typescript": ">=5" } }, "sha512-ktRxKGozPDW24d3xbUS2sMLNsRHHX/a4Pgqyzv2O0X4HsDrD+agoUYL/PvYQrGJKPSc3xzvU5uvhNHFhEql6aw=="], + "dockerode": ["dockerode@4.0.7", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", "tar-fs": "~2.1.2", "uuid": "^10.0.0" } }, "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA=="], "emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], "enabled": ["enabled@2.0.0", "", {}, "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ=="], - "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], - - "enhanced-resolve": ["enhanced-resolve@5.18.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg=="], + "end-of-stream": ["end-of-stream@1.4.5", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg=="], "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], - "exact-mirror": ["exact-mirror@0.1.2", "", { "peerDependencies": { "@sinclair/typebox": "^0.34.15" }, "optionalPeers": ["@sinclair/typebox"] }, "sha512-wFCPCDLmHbKGUb8TOi/IS7jLsgR8WVDGtDK3CzcB4Guf/weq7G+I+DkXiRSZfbemBFOxOINKpraM6ml78vo8Zw=="], - - "fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="], + "external-editor": ["external-editor@3.1.0", "", { "dependencies": { "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" } }, "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew=="], "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="], "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="], - "fd-package-json": ["fd-package-json@1.2.0", "", { "dependencies": { "walk-up-path": "^3.0.1" } }, "sha512-45LSPmWf+gC5tdCQMNH4s9Sr00bIkiD9aN7dc5hqkrEw1geRYyDQS1v1oMHAW3ysfxfndqGsrDREHHjNNbKUfA=="], + "fd-package-json": ["fd-package-json@2.0.0", "", { "dependencies": { "walk-up-path": "^4.0.0" } }, "sha512-jKmm9YtsNXN789RS/0mSzOC1NUq9mkVd65vbSSVsKdjGvYXBuE4oWe2QOEoFeRmJg+lPuZxpmrfFclNhoRMneQ=="], "fecha": ["fecha@4.2.3", "", {}, "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw=="], - "fflate": ["fflate@0.8.2", "", {}, "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A=="], - - "file-type": ["file-type@20.5.0", "", { "dependencies": { "@tokenizer/inflate": "^0.2.6", "strtok3": "^10.2.0", "token-types": "^6.0.0", "uint8array-extras": "^1.4.0" } }, "sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg=="], - "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], "fn.name": ["fn.name@1.1.0", "", {}, "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="], - "formatly": ["formatly@0.2.3", "", { "dependencies": { "fd-package-json": "^1.2.0" }, "bin": { "formatly": "bin/index.mjs" } }, "sha512-WH01vbXEjh9L3bqn5V620xUAWs32CmK4IzWRRY6ep5zpa/mrisL4d9+pRVuETORVDTQw8OycSO1WC68PL51RaA=="], + "formatly": ["formatly@0.2.4", "", { "dependencies": { "fd-package-json": "^2.0.0" }, "bin": { "formatly": "bin/index.mjs" } }, "sha512-lIN7GpcvX/l/i24r/L9bnJ0I8Qn01qijWpQpDDvTLL29nKqSaJJu4h20+7VJ6m2CAhQ2/En/GbxDiHCzq/0MyA=="], "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="], @@ -229,18 +288,18 @@ "get-east-asian-width": ["get-east-asian-width@1.3.0", "", {}, "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ=="], - "get-tsconfig": ["get-tsconfig@4.10.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A=="], - "glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], - - "hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="], + "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + "ignore": ["ignore@7.0.5", "", {}, "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="], + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + "inquirer": ["inquirer@12.7.0", "", { "dependencies": { "@inquirer/core": "^10.1.14", "@inquirer/prompts": "^7.6.0", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "mute-stream": "^2.0.0", "run-async": "^4.0.4", "rxjs": "^7.8.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-KKFRc++IONSyE2UYw9CJ1V0IWx5yQKomwB+pp3cWomWs+v2+ZsG11G2OVfAjFS6WWCppKw+RfKmpqGfSzD5QBQ=="], + "is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="], "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], @@ -259,14 +318,12 @@ "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], - "knip": ["knip@5.55.1", "", { "dependencies": { "@nodelib/fs.walk": "^1.2.3", "enhanced-resolve": "^5.18.1", "fast-glob": "^3.3.3", "formatly": "^0.2.3", "jiti": "^2.4.2", "js-yaml": "^4.1.0", "minimist": "^1.2.8", "picocolors": "^1.1.0", "picomatch": "^4.0.1", "smol-toml": "^1.3.1", "strip-json-comments": "5.0.1", "zod": "^3.22.4", "zod-validation-error": "^3.0.3" }, "peerDependencies": { "@types/node": ">=18", "typescript": ">=5.0.4" }, "bin": { "knip": "bin/knip.js", "knip-bun": "bin/knip-bun.js" } }, "sha512-NYXjgGrXgMdabUKCP2TlBH/e83m9KnLc1VLyWHUtoRrCEJ/C15YtbafrpTvm3td+jE4VdDPgudvXT1IMtCx8lw=="], + "knip": ["knip@5.61.3", "", { "dependencies": { "@nodelib/fs.walk": "^1.2.3", "fast-glob": "^3.3.3", "formatly": "^0.2.4", "jiti": "^2.4.2", "js-yaml": "^4.1.0", "minimist": "^1.2.8", "oxc-resolver": "^11.1.0", "picocolors": "^1.1.1", "picomatch": "^4.0.1", "smol-toml": "^1.3.4", "strip-json-comments": "5.0.2", "zod": "^3.22.4", "zod-validation-error": "^3.0.3" }, "peerDependencies": { "@types/node": ">=18", "typescript": ">=5.0.4" }, "bin": { "knip": "bin/knip.js", "knip-bun": "bin/knip-bun.js" } }, "sha512-8iSz8i8ufIjuUwUKzEwye7ROAW0RzCze7T770bUiz0PKL+SSwbs4RS32fjMztLwcOzSsNPlXdUAeqmkdzXxJ1Q=="], "kuler": ["kuler@2.0.0", "", {}, "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A=="], "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], - "logestic": ["logestic@1.2.4", "", { "dependencies": { "chalk": "^5.3.0" }, "peerDependencies": { "elysia": "^1.1.3", "typescript": "^5.0.0" } }, "sha512-Wka/xFdKgqU6JBk8yxAUsqcUjPA/aExpcnm7KnOAxlLo1U71kuWGeEjPw8XVLZzLleTWwmRqJUb2yI5XZP+vAA=="], - "logform": ["logform@2.7.0", "", { "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", "fecha": "^4.2.0", "ms": "^2.1.1", "safe-stable-stringify": "^2.3.1", "triple-beam": "^1.3.0" } }, "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ=="], "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="], @@ -281,31 +338,33 @@ "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], - "nan": ["nan@2.22.2", "", {}, "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ=="], + "mute-stream": ["mute-stream@2.0.0", "", {}, "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA=="], - "nanoid": ["nanoid@5.1.5", "", { "bin": { "nanoid": "bin/nanoid.js" } }, "sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw=="], + "nan": ["nan@2.22.2", "", {}, "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ=="], - "node-cache": ["node-cache@5.1.2", "", { "dependencies": { "clone": "2.x" } }, "sha512-t1QzWwnk4sjLWaQAS8CHgOJ+RAfmHpxFWmc36IWTiWHQfs0w5JDMBS1b1ZxQteo0vVVuWJvIUKHDkkeK7vIGCg=="], + "ollama": ["ollama@0.5.16", "", { "dependencies": { "whatwg-fetch": "^3.6.20" } }, "sha512-OEbxxOIUZtdZgOaTPAULo051F5y+Z1vosxEYOoABPnQKeW7i4O8tJNlxCB+xioyoorVqgjkdj+TA1f1Hy2ug/w=="], "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], "one-time": ["one-time@1.0.0", "", { "dependencies": { "fn.name": "1.x.x" } }, "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g=="], - "openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="], + "os-tmpdir": ["os-tmpdir@1.0.2", "", {}, "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g=="], - "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], + "oxc-resolver": ["oxc-resolver@11.5.0", "", { "optionalDependencies": { "@oxc-resolver/binding-darwin-arm64": "11.5.0", "@oxc-resolver/binding-darwin-x64": "11.5.0", "@oxc-resolver/binding-freebsd-x64": "11.5.0", "@oxc-resolver/binding-linux-arm-gnueabihf": "11.5.0", "@oxc-resolver/binding-linux-arm64-gnu": "11.5.0", "@oxc-resolver/binding-linux-arm64-musl": "11.5.0", "@oxc-resolver/binding-linux-riscv64-gnu": "11.5.0", "@oxc-resolver/binding-linux-s390x-gnu": "11.5.0", "@oxc-resolver/binding-linux-x64-gnu": "11.5.0", "@oxc-resolver/binding-linux-x64-musl": "11.5.0", "@oxc-resolver/binding-wasm32-wasi": "11.5.0", "@oxc-resolver/binding-win32-arm64-msvc": "11.5.0", "@oxc-resolver/binding-win32-x64-msvc": "11.5.0" } }, "sha512-lG/AiquYQP/4OOXaKmlPvLeCOxtlZ535489H3yk4euimwnJXIViQus2Y9Mc4c45wFQ0UYM1rFduiJ8+RGjUtTQ=="], - "pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="], + "oxlint": ["oxlint@1.6.0", "", { "optionalDependencies": { "@oxlint/darwin-arm64": "1.6.0", "@oxlint/darwin-x64": "1.6.0", "@oxlint/linux-arm64-gnu": "1.6.0", "@oxlint/linux-arm64-musl": "1.6.0", "@oxlint/linux-x64-gnu": "1.6.0", "@oxlint/linux-x64-musl": "1.6.0", "@oxlint/win32-arm64": "1.6.0", "@oxlint/win32-x64": "1.6.0" }, "bin": { "oxlint": "bin/oxlint", "oxc_language_server": "bin/oxc_language_server" } }, "sha512-jtaD65PqzIa1udvSxxscTKBxYKuZoFXyKGLiU1Qjo1ulq3uv/fQDtoV1yey1FrQZrQjACGPi1Widsy1TucC7Jg=="], - "peek-readable": ["peek-readable@7.0.0", "", {}, "sha512-nri2TO5JE3/mRryik9LlHFT53cgHfRK0Lt0BAZQXku/AW3E6XLt2GaY8siWi7dvW/m1z0ecn+J+bpDa9ZN3IsQ=="], + "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], - "protobufjs": ["protobufjs@7.5.1", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-3qx3IRjR9WPQKagdwrKjO3Gu8RgQR2qqw+1KnigWhoVjFqegIj1K3bP11sGqhxrO46/XL7lekuG4jmjL+4cLsw=="], + "prettier": ["prettier@3.6.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ=="], + + "protobufjs": ["protobufjs@7.5.3", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw=="], - "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], + "pump": ["pump@3.0.3", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA=="], "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], @@ -313,12 +372,14 @@ "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], - "resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="], - "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], + "run-async": ["run-async@4.0.4", "", { "dependencies": { "oxlint": "^1.2.0", "prettier": "^3.5.3" } }, "sha512-2cgeRHnV11lSXBEhq7sN7a5UVjTKm9JTb9x8ApIT//16D7QL96AgnNeWSGoB4gIHc0iYw/Ha0Z+waBaCYZVNhg=="], + "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], + "rxjs": ["rxjs@7.8.2", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA=="], + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], "safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="], @@ -329,9 +390,11 @@ "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], + "signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], + "simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="], - "smol-toml": ["smol-toml@1.3.4", "", {}, "sha512-UOPtVuYkzYGee0Bd2Szz8d2G3RfMfJ2t3qVdZUAozZyAk+a0Sxa+QKix0YCwjL/A1RR0ar44nCxaoN9FxdJGwA=="], + "smol-toml": ["smol-toml@1.4.1", "", {}, "sha512-CxdwHXyYTONGHThDbq5XdwbFsuY4wlClRGejfE2NtwUtiHYsP1QtNsHb/hnj31jKYSchztJsaA8pSQoVzkfCFg=="], "split-ca": ["split-ca@1.0.1", "", {}, "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="], @@ -347,39 +410,37 @@ "strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - "strip-json-comments": ["strip-json-comments@5.0.1", "", {}, "sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw=="], - - "strtok3": ["strtok3@10.2.2", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^7.0.0" } }, "sha512-Xt18+h4s7Z8xyZ0tmBoRmzxcop97R4BAh+dXouUDCYn+Em+1P3qpkUfI5ueWLT8ynC5hZ+q4iPEmGG1urvQGBg=="], - - "tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="], + "strip-json-comments": ["strip-json-comments@5.0.2", "", {}, "sha512-4X2FR3UwhNUE9G49aIsJW5hRRR3GXGTBTZRMfv568O60ojM8HcWjV/VxAxCDW3SUND33O6ZY66ZuRcdkj73q2g=="], - "tar-fs": ["tar-fs@2.1.2", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA=="], + "tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], "tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], "text-hex": ["text-hex@1.0.0", "", {}, "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg=="], - "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], + "tmp": ["tmp@0.0.33", "", { "dependencies": { "os-tmpdir": "~1.0.2" } }, "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw=="], - "token-types": ["token-types@6.0.0", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-lbDrTLVsHhOMljPscd0yitpozq7Ga2M5Cvez5AjGg8GASBjtt6iERCAJ93yommPmz62fb45oFIXHEZ3u9bfJEA=="], + "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], "triple-beam": ["triple-beam@1.4.1", "", {}, "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg=="], + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="], - "type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + "type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], - "uint8array-extras": ["uint8array-extras@1.4.0", "", {}, "sha512-ZPtzy0hu4cZjv3z5NW9gfKnNLjoz4y6uv4HlelAjDK7sY/xOkKZv9xK/WQpcsBB3jEybChz9DPC2U/+cusjJVQ=="], - "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], "uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="], - "walk-up-path": ["walk-up-path@3.0.1", "", {}, "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA=="], + "walk-up-path": ["walk-up-path@4.0.0", "", {}, "sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A=="], + + "whatwg-fetch": ["whatwg-fetch@3.6.20", "", {}, "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg=="], "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], @@ -393,21 +454,21 @@ "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], - "yaml": ["yaml@2.7.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ=="], + "yaml": ["yaml@2.8.0", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ=="], "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], - "zhead": ["zhead@2.2.4", "", {}, "sha512-8F0OI5dpWIA5IGG5NHUg9staDwz/ZPxZtvGVf01j7vHqSyZ0raHY+78atOVxRqb73AotX22uV1pXt3gYSstGag=="], + "yoctocolors-cjs": ["yoctocolors-cjs@2.1.2", "", {}, "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA=="], - "zod": ["zod@3.24.4", "", {}, "sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg=="], + "zod": ["zod@3.25.75", "", {}, "sha512-OhpzAmVzabPOL6C3A3gpAifqr9MqihV/Msx3gor2b2kviCgcb+HM9SEOpMWwwNp9MRunWnhtAKUoo0AHhjyPPg=="], - "zod-validation-error": ["zod-validation-error@3.4.1", "", { "peerDependencies": { "zod": "^3.24.4" } }, "sha512-1KP64yqDPQ3rupxNv7oXhf7KdhHHgaqbKuspVoiN93TT0xrBjql+Svjkdjq/Qh/7GSMmgQs3AfvBT0heE35thw=="], + "zod-validation-error": ["zod-validation-error@3.5.2", "", { "peerDependencies": { "zod": "^3.25.0" } }, "sha512-mdi7YOLtram5dzJ5aDtm1AG9+mxRma1iaMrZdYIpFO7epdKBUwLHIxTF8CPDeCQ828zAXYtizrKlEJAtzgfgrw=="], - "@scalar/themes/@scalar/types": ["@scalar/types@0.1.7", "", { "dependencies": { "@scalar/openapi-types": "0.2.0", "@unhead/schema": "^1.11.11", "nanoid": "^5.1.5", "type-fest": "^4.20.0", "zod": "^3.23.8" } }, "sha512-irIDYzTQG2KLvFbuTI8k2Pz/R4JR+zUUSykVTbEMatkzMmVFnn1VzNSMlODbadycwZunbnL2tA27AXed9URVjw=="], + "@inquirer/core/wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="], - "@types/ssh2/@types/node": ["@types/node@18.19.100", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-ojmMP8SZBKprc3qGrGk8Ujpo80AXkrP7G2tOT4VWr5jlr5DHjsJF+emXJz+Wm0glmy4Js62oKMdZZ6B9Y+tEcA=="], + "@types/ssh2/@types/node": ["@types/node@18.19.115", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-kNrFiTgG4a9JAn1LMQeLOv3MvXIPokzXziohMrMsvpYgLpdEt/mMiVYc4sGKtDfyxM5gIDF4VgrPRyCw4fHOYg=="], "cliui/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], @@ -415,13 +476,15 @@ "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - "color-string/color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], - "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], "yargs/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - "@scalar/themes/@scalar/types/@scalar/openapi-types": ["@scalar/openapi-types@0.2.0", "", { "dependencies": { "zod": "^3.23.8" } }, "sha512-waiKk12cRCqyUCWTOX0K1WEVX46+hVUK+zRPzAahDJ7G0TApvbNkuy5wx7aoUyEk++HHde0XuQnshXnt8jsddA=="], + "@inquirer/core/wrap-ansi/ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], + + "@inquirer/core/wrap-ansi/string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], + + "@inquirer/core/wrap-ansi/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], "@types/ssh2/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], @@ -435,10 +498,18 @@ "yargs/string-width/strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + "@inquirer/core/wrap-ansi/ansi-styles/color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], + + "@inquirer/core/wrap-ansi/string-width/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], + + "@inquirer/core/wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + "cliui/wrap-ansi/ansi-styles/color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], "yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + "@inquirer/core/wrap-ansi/ansi-styles/color-convert/color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], + "cliui/wrap-ansi/ansi-styles/color-convert/color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], } } diff --git a/data/.gitignore b/data/.gitignore deleted file mode 100644 index aed31992..00000000 --- a/data/.gitignore +++ /dev/null @@ -1 +0,0 @@ -./dockstatapi* diff --git a/dependency-graph.mmd b/dependency-graph.mmd index db1c046d..60821b11 100644 --- a/dependency-graph.mmd +++ b/dependency-graph.mmd @@ -8,247 +8,240 @@ flowchart LR subgraph 0["src"] 1["index.ts"] -subgraph 6["core"] -subgraph 7["stacks"] -8["checker.ts"] -1R["controller.ts"] -subgraph 1T["operations"] -1U["runStackCommand.ts"] -1V["stackHelpers.ts"] -1W["stackStatus.ts"] +subgraph 2["handlers"] +3["index.ts"] +4["config.ts"] +subgraph P["modules"] +Q["logs-socket.ts"] +1I["starter.ts"] +1J["docker-socket.ts"] end +1F["database.ts"] +1G["docker.ts"] +1H["logs.ts"] +1L["stacks.ts"] +1U["store.ts"] +1V["themes.ts"] +1W["utils.ts"] end -subgraph 9["database"] -A["index.ts"] -B["backup.ts"] -E["_dbState.ts"] -F["database.ts"] -K["helper.ts"] -P["config.ts"] -Q["containerStats.ts"] -R["dockerHosts.ts"] -S["hostStats.ts"] -U["logs.ts"] -V["stacks.ts"] +subgraph B["core"] +subgraph C["database"] +D["index.ts"] +E["backup.ts"] +G["_dbState.ts"] +H["database.ts"] +M["helper.ts"] +S["config.ts"] +T["containerStats.ts"] +U["dockerHosts.ts"] +V["hostStats.ts"] +W["logs.ts"] +X["stacks.ts"] +Z["stores.ts"] +10["themes.ts"] end -subgraph L["utils"] -M["logger.ts"] -W["helpers.ts"] -18["calculations.ts"] +subgraph N["utils"] +O["logger.ts"] +Y["helpers.ts"] +15["calculations.ts"] 1C["change-me-checker.ts"] 1D["package-json.ts"] -1F["swagger-readme.ts"] -1K["response-handler.ts"] end -subgraph Z["docker"] -10["monitor.ts"] -15["client.ts"] -16["scheduler.ts"] -17["store-container-stats.ts"] -19["store-host-stats.ts"] +subgraph 11["docker"] +12["scheduler.ts"] +13["store-container-stats.ts"] +14["client.ts"] +16["store-host-stats.ts"] end -subgraph 11["plugins"] -12["plugin-manager.ts"] +subgraph 18["plugins"] +19["plugin-manager.ts"] 1B["loader.ts"] end +subgraph 1M["stacks"] +1N["controller.ts"] +1P["checker.ts"] +subgraph 1Q["operations"] +1R["runStackCommand.ts"] +1S["stackHelpers.ts"] +1T["stackStatus.ts"] end -subgraph N["routes"] -O["live-logs.ts"] -X["live-stacks.ts"] -1J["api-config.ts"] -1L["docker-manager.ts"] -1M["docker-stats.ts"] -1N["docker-websocket.ts"] -1P["logs.ts"] -1Q["stacks.ts"] end -subgraph 1G["middleware"] -1H["auth.ts"] end end -subgraph 2["~"] -subgraph 3["typings"] -4["database"] -C["misc"] -T["docker"] -Y["websocket"] -13["plugin"] -1A["dockerode"] -1I["elysiajs"] -1S["docker-compose"] +subgraph 5["~"] +subgraph 6["typings"] +7["database"] +8["docker"] +9["plugin"] +F["misc"] +17["dockerode"] +1K["websocket"] +1O["docker-compose"] end end -5["elysia-remote-dts"] -subgraph D["fs"] -H["promises"] +subgraph A["fs"] +J["promises"] end -G["bun:sqlite"] -I["os"] -J["path"] -14["events"] +I["bun:sqlite"] +K["os"] +L["path"] +R["stream"] +1A["events"] 1E["package.json"] -1O["stream"] -1-->8 -1-->X -1-->A -1-->10 -1-->16 -1-->1B -1-->M -1-->1D -1-->1F -1-->1H -1-->1J -1-->1L -1-->1M -1-->1N -1-->O -1-->1P -1-->1Q -1-->4 -1-->5 -8-->A -8-->M -A-->B -A-->P -A-->Q -A-->F -A-->R -A-->S -A-->U -A-->V -B-->E -B-->F -B-->K -B-->M -B-->C -B-->D -F-->G -F-->D -F-->H -F-->I -F-->J -K-->E -K-->M -M-->E -M-->A +1-->3 +3-->4 +3-->1F +3-->1G +3-->1H +3-->1I +3-->1L +3-->1U +3-->1V +3-->1W +4-->D +4-->E +4-->12 +4-->19 +4-->O +4-->1D +4-->7 +4-->8 +4-->9 +4-->A +D-->E +D-->S +D-->T +D-->H +D-->U +D-->V +D-->W +D-->X +D-->Z +D-->10 +E-->G +E-->H +E-->M +E-->O +E-->F +E-->A +H-->I +H-->A +H-->J +H-->K +H-->L +M-->G M-->O -M-->4 -M-->J -O-->M -O-->4 -P-->F -P-->K -Q-->F -Q-->K -R-->F -R-->K -S-->F -S-->K -S-->T -U-->F -U-->K -U-->4 -V-->W -V-->F -V-->K -V-->4 +O-->Q +O-->G +O-->D +O-->7 +O-->L +Q-->O +Q-->7 +Q-->R +S-->H +S-->M +T-->H +T-->M +T-->7 +U-->H +U-->M +U-->8 +V-->H +V-->M +V-->8 +W-->H W-->M -X-->M +W-->7 X-->Y -10-->12 -10-->A -10-->15 +X-->H +X-->M +X-->7 +Y-->O +Z-->H +Z-->M +10-->O +10-->H 10-->M -10-->T -12-->M -12-->T +10-->7 +12-->D 12-->13 -12-->14 -15-->M -15-->T -16-->A +12-->16 +12-->O +12-->7 +13-->O +13-->D +13-->14 +13-->15 +13-->7 +14-->O +14-->8 +16-->D +16-->14 +16-->O +16-->8 16-->17 -16-->19 -16-->M -16-->4 -17-->M -17-->A -17-->15 -17-->18 -19-->A -19-->15 -19-->W -19-->M -19-->T +19-->O +19-->1B +19-->8 +19-->9 19-->1A 1B-->1C -1B-->M -1B-->12 -1B-->D -1B-->J -1C-->M -1C-->H +1B-->O +1B-->19 +1B-->A +1B-->L +1C-->O +1C-->J 1D-->1E -1H-->A -1H-->M -1H-->4 -1H-->1I -1J-->A -1J-->B -1J-->12 -1J-->M -1J-->1D -1J-->1K -1J-->1H -1J-->4 +1F-->D +1G-->D +1G-->14 +1G-->O +1G-->8 +1G-->17 +1H-->D +1H-->O +1I-->1J +1I-->12 +1I-->19 +1J-->Q 1J-->D -1K-->M -1K-->1I -1L-->A -1L-->M -1L-->1K -1L-->T -1M-->A -1M-->15 -1M-->18 -1M-->W -1M-->M -1M-->1K -1M-->T -1M-->1A -1N-->A -1N-->15 -1N-->18 -1N-->M -1N-->1K +1J-->14 +1J-->15 +1J-->O +1J-->7 +1J-->8 +1J-->1K +1L-->D +1L-->1N +1L-->O +1L-->7 +1N-->1J +1N-->1P +1N-->1R +1N-->1S +1N-->1T +1N-->D +1N-->O +1N-->7 1N-->1O -1P-->A -1P-->M -1Q-->A -1Q-->1R -1Q-->M -1Q-->1K -1Q-->4 -1R-->8 -1R-->1U -1R-->1V -1R-->1W -1R-->A -1R-->M -1R-->X -1R-->4 +1N-->J +1P-->D +1P-->O +1R-->1J 1R-->1S -1R-->H -1U-->1V -1U-->M -1U-->X -1U-->1S -1V-->A -1V-->W -1V-->M -1V-->1S -1W-->1U -1W-->A -1W-->M +1R-->O +1R-->1O +1S-->D +1S-->Y +1S-->O +1S-->1O +1T-->1R +1T-->D +1T-->O +1U-->Z +1V-->D +1V-->7 +1W-->O diff --git a/dependency-graph.svg b/dependency-graph.svg index 54234f89..495b4a21 100644 --- a/dependency-graph.svg +++ b/dependency-graph.svg @@ -4,1608 +4,1578 @@ - - + + dependency-cruiser output - + cluster_fs - -fs + +fs cluster_src - -src + +src cluster_src/core - -core + +core cluster_src/core/database - -database + +database cluster_src/core/docker - -docker + +docker cluster_src/core/plugins - -plugins + +plugins cluster_src/core/stacks - -stacks + +stacks cluster_src/core/stacks/operations - -operations + +operations cluster_src/core/utils - -utils + +utils -cluster_src/middleware - -middleware +cluster_src/handlers + +handlers -cluster_src/routes - -routes +cluster_src/handlers/modules + +modules cluster_~ - -~ + +~ cluster_~/typings - -typings + +typings bun:sqlite - -bun:sqlite - - - - - -elysia-remote-dts - - -elysia-remote-dts + +bun:sqlite - + events - - -events + + +events - + fs - - -fs + + +fs - + fs/promises - - -promises + + +promises - + os - - -os + + +os - + package.json - - -package.json + + +package.json - + path - - -path + + +path - + src/core/database/_dbState.ts - - -_dbState.ts + + +_dbState.ts - + src/core/database/backup.ts - - -backup.ts + + +backup.ts src/core/database/backup.ts->fs - - + + src/core/database/backup.ts->src/core/database/_dbState.ts - - + + - + src/core/database/database.ts - - -database.ts + + +database.ts src/core/database/backup.ts->src/core/database/database.ts - - + + - + src/core/database/helper.ts - - -helper.ts + + +helper.ts src/core/database/backup.ts->src/core/database/helper.ts - - - - + + + + - + src/core/utils/logger.ts - - -logger.ts + + +logger.ts src/core/database/backup.ts->src/core/utils/logger.ts - - - - + + + + - + ~/typings/misc - - -misc + + +misc src/core/database/backup.ts->~/typings/misc - - + + - + src/core/database/database.ts->bun:sqlite - - + + - + src/core/database/database.ts->fs - - + + - + src/core/database/database.ts->fs/promises - - + + - + src/core/database/database.ts->os - - + + - + src/core/database/database.ts->path - - + + - + src/core/database/helper.ts->src/core/database/_dbState.ts - - + + - + src/core/database/helper.ts->src/core/utils/logger.ts - - - - + + + + - + src/core/utils/logger.ts->path - - + + - + src/core/utils/logger.ts->src/core/database/_dbState.ts - - + + + + + +~/typings/database + + +database + + + + + +src/core/utils/logger.ts->~/typings/database + + src/core/database/index.ts - -index.ts + +index.ts - + src/core/utils/logger.ts->src/core/database/index.ts - - - - - - - -~/typings/database - - -database - - + + + + - - -src/core/utils/logger.ts->~/typings/database - - - - + -src/routes/live-logs.ts - - -live-logs.ts +src/handlers/modules/logs-socket.ts + + +logs-socket.ts - - -src/core/utils/logger.ts->src/routes/live-logs.ts - - - - + + +src/core/utils/logger.ts->src/handlers/modules/logs-socket.ts + + + + - + src/core/database/config.ts - - -config.ts + + +config.ts src/core/database/config.ts->src/core/database/database.ts - - + + src/core/database/config.ts->src/core/database/helper.ts - - - - + + + + - + src/core/database/containerStats.ts - - -containerStats.ts + + +containerStats.ts src/core/database/containerStats.ts->src/core/database/database.ts - - + + src/core/database/containerStats.ts->src/core/database/helper.ts - - - - + + + + + + + +src/core/database/containerStats.ts->~/typings/database + + src/core/database/dockerHosts.ts - -dockerHosts.ts + +dockerHosts.ts - + src/core/database/dockerHosts.ts->src/core/database/database.ts - - + + - + src/core/database/dockerHosts.ts->src/core/database/helper.ts - - - - + + + + - + +~/typings/docker + + +docker + + + + + +src/core/database/dockerHosts.ts->~/typings/docker + + + + + src/core/database/hostStats.ts - - -hostStats.ts + + +hostStats.ts - + src/core/database/hostStats.ts->src/core/database/database.ts - - + + - + src/core/database/hostStats.ts->src/core/database/helper.ts - - - - - - - -~/typings/docker - - -docker - - + + + + - + src/core/database/hostStats.ts->~/typings/docker - - + + - + src/core/database/index.ts->src/core/database/backup.ts - - - - + + + + - + src/core/database/index.ts->src/core/database/database.ts - - + + - + src/core/database/index.ts->src/core/database/config.ts - - - - + + + + - + src/core/database/index.ts->src/core/database/containerStats.ts - - - - + + + + - + src/core/database/index.ts->src/core/database/dockerHosts.ts - - - - + + + + - + src/core/database/index.ts->src/core/database/hostStats.ts - - - - + + + + src/core/database/logs.ts - -logs.ts + +logs.ts - + src/core/database/index.ts->src/core/database/logs.ts - - - - + + + + src/core/database/stacks.ts - -stacks.ts + +stacks.ts - + src/core/database/index.ts->src/core/database/stacks.ts - - - - + + + + + + + +src/core/database/stores.ts + + +stores.ts + + + + + +src/core/database/index.ts->src/core/database/stores.ts + + + + + + + +src/core/database/themes.ts + + +themes.ts + + + + + +src/core/database/index.ts->src/core/database/themes.ts + + + + - + src/core/database/logs.ts->src/core/database/database.ts - - + + - + src/core/database/logs.ts->src/core/database/helper.ts - - - - + + + + - + src/core/database/logs.ts->~/typings/database - - + + - + src/core/database/stacks.ts->src/core/database/database.ts - - + + - + src/core/database/stacks.ts->src/core/database/helper.ts - - - - + + + + - + src/core/database/stacks.ts->~/typings/database - - + + - + src/core/utils/helpers.ts - - -helpers.ts + + +helpers.ts - + src/core/database/stacks.ts->src/core/utils/helpers.ts - - - - + + + + + + + +src/core/database/stores.ts->src/core/database/database.ts + + + + + +src/core/database/stores.ts->src/core/database/helper.ts + + + + + + + +src/core/database/themes.ts->src/core/database/database.ts + + + + + +src/core/database/themes.ts->src/core/database/helper.ts + + + + + + + +src/core/database/themes.ts->src/core/utils/logger.ts + + + + + + + +src/core/database/themes.ts->~/typings/database + + - + src/core/utils/helpers.ts->src/core/utils/logger.ts - - - - + + - + src/core/docker/client.ts - - -client.ts + + +client.ts - + src/core/docker/client.ts->src/core/utils/logger.ts - - + + - + src/core/docker/client.ts->~/typings/docker - - - - - -src/core/docker/monitor.ts - - -monitor.ts - - - - - -src/core/docker/monitor.ts->src/core/utils/logger.ts - - - - - -src/core/docker/monitor.ts->~/typings/docker - - - - - -src/core/docker/monitor.ts->src/core/database/index.ts - - - - - -src/core/docker/monitor.ts->src/core/docker/client.ts - - - - - -src/core/plugins/plugin-manager.ts - - -plugin-manager.ts - - - - - -src/core/docker/monitor.ts->src/core/plugins/plugin-manager.ts - - - - - -src/core/plugins/plugin-manager.ts->events - - - - - -src/core/plugins/plugin-manager.ts->src/core/utils/logger.ts - - - - - -src/core/plugins/plugin-manager.ts->~/typings/docker - - - - - -~/typings/plugin - - -plugin - - - - - -src/core/plugins/plugin-manager.ts->~/typings/plugin - - + + - + src/core/docker/scheduler.ts - - -scheduler.ts + + +scheduler.ts - + src/core/docker/scheduler.ts->src/core/utils/logger.ts - - - - - -src/core/docker/scheduler.ts->src/core/database/index.ts - - + + - + src/core/docker/scheduler.ts->~/typings/database - - + + + + + +src/core/docker/scheduler.ts->src/core/database/index.ts + + - + src/core/docker/store-container-stats.ts - - -store-container-stats.ts + + +store-container-stats.ts - + src/core/docker/scheduler.ts->src/core/docker/store-container-stats.ts - - + + - + src/core/docker/store-host-stats.ts - - -store-host-stats.ts + + +store-host-stats.ts - + src/core/docker/scheduler.ts->src/core/docker/store-host-stats.ts - - + + - + src/core/docker/store-container-stats.ts->src/core/utils/logger.ts - - + + + + + +src/core/docker/store-container-stats.ts->~/typings/database + + - + src/core/docker/store-container-stats.ts->src/core/database/index.ts - - + + - + src/core/docker/store-container-stats.ts->src/core/docker/client.ts - - + + - + src/core/utils/calculations.ts - - -calculations.ts + + +calculations.ts - + src/core/docker/store-container-stats.ts->src/core/utils/calculations.ts - - + + - + src/core/docker/store-host-stats.ts->src/core/utils/logger.ts - - + + - + src/core/docker/store-host-stats.ts->~/typings/docker - - + + - + src/core/docker/store-host-stats.ts->src/core/database/index.ts - - - - - -src/core/docker/store-host-stats.ts->src/core/utils/helpers.ts - - + + - + src/core/docker/store-host-stats.ts->src/core/docker/client.ts - - + + - + ~/typings/dockerode - - -dockerode + + +dockerode - + src/core/docker/store-host-stats.ts->~/typings/dockerode - - + + - + src/core/plugins/loader.ts - - -loader.ts + + +loader.ts - + src/core/plugins/loader.ts->fs - - + + - + src/core/plugins/loader.ts->path - - + + - + src/core/plugins/loader.ts->src/core/utils/logger.ts - - - - - -src/core/plugins/loader.ts->src/core/plugins/plugin-manager.ts - - + + - + src/core/utils/change-me-checker.ts - - -change-me-checker.ts + + +change-me-checker.ts - + src/core/plugins/loader.ts->src/core/utils/change-me-checker.ts - - + + + + + +src/core/plugins/plugin-manager.ts + + +plugin-manager.ts + + + + + +src/core/plugins/loader.ts->src/core/plugins/plugin-manager.ts + + + + - + src/core/utils/change-me-checker.ts->fs/promises - - + + - + src/core/utils/change-me-checker.ts->src/core/utils/logger.ts - - + + + + + +src/core/plugins/plugin-manager.ts->events + + + + + +src/core/plugins/plugin-manager.ts->src/core/utils/logger.ts + + + + + +src/core/plugins/plugin-manager.ts->~/typings/docker + + + + + +src/core/plugins/plugin-manager.ts->src/core/plugins/loader.ts + + + + + + + +~/typings/plugin + + +plugin + + + + + +src/core/plugins/plugin-manager.ts->~/typings/plugin + + src/core/stacks/checker.ts - -checker.ts + +checker.ts - + src/core/stacks/checker.ts->src/core/utils/logger.ts - - + + - + src/core/stacks/checker.ts->src/core/database/index.ts - - + + src/core/stacks/controller.ts - -controller.ts + +controller.ts - + src/core/stacks/controller.ts->fs/promises - - + + - + src/core/stacks/controller.ts->src/core/utils/logger.ts - - - - - -src/core/stacks/controller.ts->src/core/database/index.ts - - + + - + src/core/stacks/controller.ts->~/typings/database - - + + + + + +src/core/stacks/controller.ts->src/core/database/index.ts + + - + src/core/stacks/controller.ts->src/core/stacks/checker.ts - - + + - + +src/handlers/modules/docker-socket.ts + + +docker-socket.ts + + + + + +src/core/stacks/controller.ts->src/handlers/modules/docker-socket.ts + + + + + src/core/stacks/operations/runStackCommand.ts - - -runStackCommand.ts + + +runStackCommand.ts - + src/core/stacks/controller.ts->src/core/stacks/operations/runStackCommand.ts - - + + - + src/core/stacks/operations/stackHelpers.ts - - -stackHelpers.ts + + +stackHelpers.ts - + src/core/stacks/controller.ts->src/core/stacks/operations/stackHelpers.ts - - + + - + src/core/stacks/operations/stackStatus.ts - - -stackStatus.ts + + +stackStatus.ts - + src/core/stacks/controller.ts->src/core/stacks/operations/stackStatus.ts - - - - - -src/routes/live-stacks.ts - - -live-stacks.ts - - - - - -src/core/stacks/controller.ts->src/routes/live-stacks.ts - - + + ~/typings/docker-compose - -docker-compose + +docker-compose - + src/core/stacks/controller.ts->~/typings/docker-compose - - + + + + + +src/handlers/modules/docker-socket.ts->src/core/utils/logger.ts + + + + + +src/handlers/modules/docker-socket.ts->~/typings/database + + + + + +src/handlers/modules/docker-socket.ts->~/typings/docker + + + + + +src/handlers/modules/docker-socket.ts->src/core/database/index.ts + + + + + +src/handlers/modules/docker-socket.ts->src/core/docker/client.ts + + + + + +src/handlers/modules/docker-socket.ts->src/core/utils/calculations.ts + + + + + +src/handlers/modules/docker-socket.ts->src/handlers/modules/logs-socket.ts + + + + + +~/typings/websocket + + +websocket + + + + + +src/handlers/modules/docker-socket.ts->~/typings/websocket + + - + src/core/stacks/operations/runStackCommand.ts->src/core/utils/logger.ts - - + + + + + +src/core/stacks/operations/runStackCommand.ts->src/handlers/modules/docker-socket.ts + + - + src/core/stacks/operations/runStackCommand.ts->src/core/stacks/operations/stackHelpers.ts - - - - - -src/core/stacks/operations/runStackCommand.ts->src/routes/live-stacks.ts - - + + - + src/core/stacks/operations/runStackCommand.ts->~/typings/docker-compose - - + + - + src/core/stacks/operations/stackHelpers.ts->src/core/utils/logger.ts - - + + - + src/core/stacks/operations/stackHelpers.ts->src/core/database/index.ts - - + + - + src/core/stacks/operations/stackHelpers.ts->src/core/utils/helpers.ts - - + + - + src/core/stacks/operations/stackHelpers.ts->~/typings/docker-compose - - + + - + src/core/stacks/operations/stackStatus.ts->src/core/utils/logger.ts - - + + - + src/core/stacks/operations/stackStatus.ts->src/core/database/index.ts - - + + - + src/core/stacks/operations/stackStatus.ts->src/core/stacks/operations/runStackCommand.ts - - + + - - -src/routes/live-stacks.ts->src/core/utils/logger.ts - - - - - -~/typings/websocket - - -websocket - + + +src/handlers/modules/logs-socket.ts->src/core/utils/logger.ts + + + + + + +src/handlers/modules/logs-socket.ts->~/typings/database + + - - -src/routes/live-stacks.ts->~/typings/websocket - - + + +stream + + +stream + - - -src/routes/live-logs.ts->src/core/utils/logger.ts - - - - - - -src/routes/live-logs.ts->~/typings/database - - + + +src/handlers/modules/logs-socket.ts->stream + + src/core/utils/package-json.ts - -package-json.ts + +package-json.ts - + src/core/utils/package-json.ts->package.json - - + + - + -src/core/utils/response-handler.ts - - -response-handler.ts +src/handlers/config.ts + + +config.ts - - -src/core/utils/response-handler.ts->src/core/utils/logger.ts - - + + +src/handlers/config.ts->fs + + - - -~/typings/elysiajs - - -elysiajs - + + +src/handlers/config.ts->src/core/database/backup.ts + + + + +src/handlers/config.ts->src/core/utils/logger.ts + + - - -src/core/utils/response-handler.ts->~/typings/elysiajs - - + + +src/handlers/config.ts->~/typings/database + + - - -src/core/utils/swagger-readme.ts - - -swagger-readme.ts - + + +src/handlers/config.ts->~/typings/docker + + + + +src/handlers/config.ts->src/core/database/index.ts + + - - -src/index.ts - - -index.ts + + +src/handlers/config.ts->src/core/docker/scheduler.ts + + + + + +src/handlers/config.ts->src/core/plugins/plugin-manager.ts + + + + + +src/handlers/config.ts->~/typings/plugin + + + + + +src/handlers/config.ts->src/core/utils/package-json.ts + + + + + +src/handlers/database.ts + + +database.ts - - -src/index.ts->elysia-remote-dts - - + + +src/handlers/database.ts->src/core/database/index.ts + + - - -src/index.ts->src/core/utils/logger.ts - - + + +src/handlers/docker.ts + + +docker.ts + - - -src/index.ts->src/core/database/index.ts - - - + -src/index.ts->~/typings/database - - +src/handlers/docker.ts->src/core/utils/logger.ts + + - - -src/index.ts->src/core/docker/monitor.ts - - + + +src/handlers/docker.ts->~/typings/docker + + - - -src/index.ts->src/core/docker/scheduler.ts - - + + +src/handlers/docker.ts->src/core/database/index.ts + + - - -src/index.ts->src/core/plugins/loader.ts - - + + +src/handlers/docker.ts->src/core/docker/client.ts + + - - -src/index.ts->src/core/stacks/checker.ts - - + + +src/handlers/docker.ts->~/typings/dockerode + + - - -src/index.ts->src/routes/live-stacks.ts - - + + +src/handlers/index.ts + + +index.ts + - - -src/index.ts->src/routes/live-logs.ts - - - - -src/index.ts->src/core/utils/package-json.ts - - + + +src/handlers/index.ts->src/handlers/config.ts + + - - -src/index.ts->src/core/utils/swagger-readme.ts - - + + +src/handlers/index.ts->src/handlers/database.ts + + - + + +src/handlers/index.ts->src/handlers/docker.ts + + + + -src/middleware/auth.ts - - -auth.ts +src/handlers/logs.ts + + +logs.ts - - -src/index.ts->src/middleware/auth.ts - - + + +src/handlers/index.ts->src/handlers/logs.ts + + - + -src/routes/api-config.ts - - -api-config.ts +src/handlers/modules/starter.ts + + +starter.ts - - -src/index.ts->src/routes/api-config.ts - - + + +src/handlers/index.ts->src/handlers/modules/starter.ts + + - + -src/routes/docker-manager.ts - - -docker-manager.ts +src/handlers/stacks.ts + + +stacks.ts - - -src/index.ts->src/routes/docker-manager.ts - - + + +src/handlers/index.ts->src/handlers/stacks.ts + + - + -src/routes/docker-stats.ts - - -docker-stats.ts +src/handlers/store.ts + + +store.ts - - -src/index.ts->src/routes/docker-stats.ts - - + + +src/handlers/index.ts->src/handlers/store.ts + + - + -src/routes/docker-websocket.ts - - -docker-websocket.ts +src/handlers/themes.ts + + +themes.ts - - -src/index.ts->src/routes/docker-websocket.ts - - + + +src/handlers/index.ts->src/handlers/themes.ts + + - + -src/routes/logs.ts - - -logs.ts - - - - - -src/index.ts->src/routes/logs.ts - - - - - -src/routes/stacks.ts - - -stacks.ts +src/handlers/utils.ts + + +utils.ts - - -src/index.ts->src/routes/stacks.ts - - - - - -src/middleware/auth.ts->src/core/utils/logger.ts - - - - - -src/middleware/auth.ts->src/core/database/index.ts - - - - - -src/middleware/auth.ts->~/typings/database - - - - - -src/middleware/auth.ts->~/typings/elysiajs - - - - - -src/routes/api-config.ts->fs - - - - - -src/routes/api-config.ts->src/core/database/backup.ts - - - - - -src/routes/api-config.ts->src/core/utils/logger.ts - - - - - -src/routes/api-config.ts->src/core/database/index.ts - - + + +src/handlers/index.ts->src/handlers/utils.ts + + - + -src/routes/api-config.ts->~/typings/database - - - - - -src/routes/api-config.ts->src/core/plugins/plugin-manager.ts - - - - - -src/routes/api-config.ts->src/core/utils/package-json.ts - - - - - -src/routes/api-config.ts->src/core/utils/response-handler.ts - - +src/handlers/logs.ts->src/core/utils/logger.ts + + - + -src/routes/api-config.ts->src/middleware/auth.ts - - - - - -src/routes/docker-manager.ts->src/core/utils/logger.ts - - - - - -src/routes/docker-manager.ts->~/typings/docker - - +src/handlers/logs.ts->src/core/database/index.ts + + - - -src/routes/docker-manager.ts->src/core/database/index.ts - - - - - -src/routes/docker-manager.ts->src/core/utils/response-handler.ts - - + + +src/handlers/modules/starter.ts->src/core/docker/scheduler.ts + + - - -src/routes/docker-stats.ts->src/core/utils/logger.ts - - + + +src/handlers/modules/starter.ts->src/core/plugins/plugin-manager.ts + + - + -src/routes/docker-stats.ts->~/typings/docker - - - - - -src/routes/docker-stats.ts->src/core/database/index.ts - - - - - -src/routes/docker-stats.ts->src/core/utils/helpers.ts - - +src/handlers/modules/starter.ts->src/handlers/modules/docker-socket.ts + + - - -src/routes/docker-stats.ts->src/core/docker/client.ts - - - - - -src/routes/docker-stats.ts->src/core/utils/calculations.ts - - - - - -src/routes/docker-stats.ts->~/typings/dockerode - - - - - -src/routes/docker-stats.ts->src/core/utils/response-handler.ts - - - - + -src/routes/docker-websocket.ts->src/core/utils/logger.ts - - +src/handlers/stacks.ts->src/core/utils/logger.ts + + - - -src/routes/docker-websocket.ts->src/core/database/index.ts - - + + +src/handlers/stacks.ts->~/typings/database + + - + -src/routes/docker-websocket.ts->src/core/docker/client.ts - - +src/handlers/stacks.ts->src/core/database/index.ts + + - + -src/routes/docker-websocket.ts->src/core/utils/calculations.ts - - +src/handlers/stacks.ts->src/core/stacks/controller.ts + + - - -src/routes/docker-websocket.ts->src/core/utils/response-handler.ts - - + + +src/handlers/store.ts->src/core/database/stores.ts + + - - -stream - - -stream + + +src/handlers/themes.ts->~/typings/database + + + + + +src/handlers/themes.ts->src/core/database/index.ts + + + + + +src/handlers/utils.ts->src/core/utils/logger.ts + + + + + +src/index.ts + + +index.ts - - -src/routes/docker-websocket.ts->stream - - - - - -src/routes/logs.ts->src/core/utils/logger.ts - - - - - -src/routes/logs.ts->src/core/database/index.ts - - - - - -src/routes/stacks.ts->src/core/utils/logger.ts - - - - - -src/routes/stacks.ts->src/core/database/index.ts - - - - - -src/routes/stacks.ts->~/typings/database - - - - - -src/routes/stacks.ts->src/core/stacks/controller.ts - - - - - -src/routes/stacks.ts->src/core/utils/response-handler.ts - - + + +src/index.ts->src/handlers/index.ts + + diff --git a/docker/docker-compose.dev.yaml b/docker/docker-compose.dev.yaml index f302c585..7d4e6ca8 100644 --- a/docker/docker-compose.dev.yaml +++ b/docker/docker-compose.dev.yaml @@ -5,7 +5,7 @@ services: image: lscr.io/linuxserver/socket-proxy:latest volumes: - /var/run/docker.sock:/var/run/docker.sock:ro - restart: unless-stopped + restart: never read_only: true tmpfs: - /run @@ -44,9 +44,10 @@ services: sqlite-web: container_name: sqlite-web image: ghcr.io/coleifer/sqlite-web:latest + restart: never ports: - 8080:8080 volumes: - - ../data:/data:ro + - /home/nik/Documents/Code-local/dockstat-project/DockStat/data:/data:ro environment: - SQLITE_DATABASE=dockstatapi.db diff --git a/package.json b/package.json index 0b7a5fb9..94eec8d0 100644 --- a/package.json +++ b/package.json @@ -24,20 +24,12 @@ "lint": "biome check --formatter-enabled=true --linter-enabled=true --organize-imports-enabled=true --fix src" }, "dependencies": { - "@elysiajs/cors": "^1.3.3", - "@elysiajs/html": "^1.3.0", - "@elysiajs/server-timing": "^1.3.0", - "@elysiajs/static": "^1.3.0", - "@elysiajs/swagger": "^1.3.0", "chalk": "^5.4.1", "date-fns": "^4.1.0", "docker-compose": "^1.2.0", "dockerode": "^4.0.7", - "elysia": "latest", - "elysia-remote-dts": "^1.0.3", "js-yaml": "^4.1.0", "knip": "latest", - "logestic": "^1.2.4", "split2": "^4.2.0", "winston": "^3.17.0", "yaml": "^2.8.0" @@ -46,9 +38,9 @@ "@biomejs/biome": "1.9.4", "@its_4_nik/gitai": "^1.1.14", "@types/bun": "latest", - "@types/dockerode": "^3.3.41", + "@types/dockerode": "^3.3.42", "@types/js-yaml": "^4.0.9", - "@types/node": "^22.15.32", + "@types/node": "^22.16.0", "@types/split2": "^4.2.3", "bun-types": "latest", "cross-env": "^7.0.3", diff --git a/src/core/database/backup.ts b/src/core/database/backup.ts index 4efa130c..df6a744a 100644 --- a/src/core/database/backup.ts +++ b/src/core/database/backup.ts @@ -60,9 +60,9 @@ export async function backupDatabase(): Promise { copyFileSync(`${backupDir}dockstatapi.db`, backupFilename); logger.info(`Backup created successfully: ${backupFilename}`); logger.debug("File copy operation completed without errors"); - } catch (e) { - logger.error(`Failed to create backup file: ${(e as Error).message}`); - throw e; + } catch (error) { + logger.error(`Failed to create backup file: ${(error as Error).message}`); + throw new Error(error as string); } return backupFilename; @@ -97,9 +97,9 @@ export function restoreDatabase(backupFilename: string): void { copyFileSync(backupFile, `${backupDir}dockstatapi.db`); logger.info(`Database restored successfully from: ${backupFilename}`); logger.debug("Database file replacement completed"); - } catch (e) { - logger.error(`Restore failed: ${(e as Error).message}`); - throw e; + } catch (error) { + logger.error(`Restore failed: ${(error as Error).message}`); + throw new Error(error as string); } }, () => { diff --git a/src/core/database/config.ts b/src/core/database/config.ts index f2460e06..0fa66da7 100644 --- a/src/core/database/config.ts +++ b/src/core/database/config.ts @@ -3,11 +3,9 @@ import { executeDbOperation } from "./helper"; const stmt = { update: db.prepare( - "UPDATE config SET fetching_interval = ?, keep_data_for = ?, api_key = ?", - ), - select: db.prepare( - "SELECT keep_data_for, fetching_interval, api_key FROM config", + "UPDATE config SET fetching_interval = ?, keep_data_for = ?", ), + select: db.prepare("SELECT keep_data_for, fetching_interval FROM config"), deleteOld: db.prepare( `DELETE FROM container_stats WHERE timestamp < datetime('now', '-' || ? || ' days')`, ), @@ -16,14 +14,10 @@ const stmt = { ), }; -export function updateConfig( - fetching_interval: number, - keep_data_for: number, - api_key: string, -) { +export function updateConfig(fetching_interval: number, keep_data_for: number) { return executeDbOperation( "Update Config", - () => stmt.update.run(fetching_interval, keep_data_for, api_key), + () => stmt.update.run(fetching_interval, keep_data_for), () => { if ( typeof fetching_interval !== "number" || diff --git a/src/core/database/containerStats.ts b/src/core/database/containerStats.ts index a50ea4c2..a8466701 100644 --- a/src/core/database/containerStats.ts +++ b/src/core/database/containerStats.ts @@ -1,4 +1,4 @@ -import type { containerStatistics } from "~/typings/database"; +import type { container_stats } from "~/typings/database"; import { db } from "./database"; import { executeDbOperation } from "./helper"; @@ -9,35 +9,26 @@ const insert = db.prepare(` const get = db.prepare("SELECT * FROM container_stats"); -export function addContainerStats( - id: string, - hostId: string, - name: string, - image: string, - status: string, - state: string, - cpu_usage: number, - memory_usage: number, -) { +export function addContainerStats(stats: container_stats) { return executeDbOperation( "Add Container Stats", () => insert.run( - id, - hostId, - name, - image, - status, - state, - cpu_usage, - memory_usage, + stats.id, + stats.hostId, + stats.name, + stats.image, + stats.status, + stats.state, + stats.cpu_usage, + stats.memory_usage, ), () => { if ( - typeof id !== "string" || - typeof hostId !== "string" || - typeof cpu_usage !== "number" || - typeof memory_usage !== "number" + typeof stats.id !== "string" || + typeof stats.hostId !== "number" || + typeof stats.cpu_usage !== "number" || + typeof stats.memory_usage !== "number" ) { throw new TypeError("Invalid container stats parameters"); } @@ -45,8 +36,8 @@ export function addContainerStats( ); } -export function getContainerStats(): containerStatistics[] { +export function getContainerStats(): container_stats[] { return executeDbOperation("Get Container Stats", () => get.all(), - ) as containerStatistics[]; + ) as container_stats[]; } diff --git a/src/core/database/database.ts b/src/core/database/database.ts index 204666ef..f5949b41 100644 --- a/src/core/database/database.ts +++ b/src/core/database/database.ts @@ -13,26 +13,26 @@ const uid = userInfo().uid; export let db: Database; try { - const databasePath = path.join(dataFolder, "dockstatapi.db"); - console.log("Database path:", databasePath); - console.log(`Running as: ${username} (${uid}:${gid})`); + const databasePath = path.join(dataFolder, "dockstatapi.db"); + console.log("Database path:", databasePath); + console.log(`Running as: ${username} (${uid}:${gid})`); - if (!existsSync(dataFolder)) { - await mkdir(dataFolder, { recursive: true, mode: 0o777 }); - console.log("Created data directory:", dataFolder); - } + if (!existsSync(dataFolder)) { + await mkdir(dataFolder, { recursive: true, mode: 0o777 }); + console.log("Created data directory:", dataFolder); + } - db = new Database(databasePath, { create: true }); - console.log("Database opened successfully"); + db = new Database(databasePath, { create: true }); + console.log("Database opened successfully"); - db.exec("PRAGMA journal_mode = WAL;"); + db.exec("PRAGMA journal_mode = WAL;"); } catch (error) { - console.error(`Cannot start DockStatAPI: ${error}`); - process.exit(500); + console.error(`Cannot start DockStatAPI: ${error}`); + process.exit(500); } export function init() { - db.exec(` + db.exec(` CREATE TABLE IF NOT EXISTS backend_log_entries ( timestamp STRING NOT NULL, level TEXT NOT NULL, @@ -90,30 +90,83 @@ export function init() { CREATE TABLE IF NOT EXISTS config ( keep_data_for NUMBER NOT NULL, - fetching_interval NUMBER NOT NULL, - api_key TEXT NOT NULL + fetching_interval NUMBER NOT NULL ); + + CREATE TABLE IF NOT EXISTS store_repos ( + slug TEXT NOT NULL, + base TEXT NOT NULL ); + + CREATE TABLE IF NOT EXISTS themes ( + name TEXT PRIMARY KEY, + creator TEXT NOT NULL, + vars TEXT NOT NULL, + tags TEXT NOT NULL + ) `); - const configRow = db - .prepare("SELECT COUNT(*) AS count FROM config") - .get() as { count: number }; - - if (configRow.count === 0) { - db.prepare( - 'INSERT INTO config (keep_data_for, fetching_interval, api_key) VALUES (7, 5, "changeme")', - ).run(); - } - - const hostRow = db - .prepare("SELECT COUNT(*) AS count FROM docker_hosts") - .get() as { count: number }; - - if (hostRow.count === 0) { - db.prepare( - "INSERT INTO docker_hosts (name, hostAddress, secure) VALUES (?, ?, ?)", - ).run("Localhost", "localhost:2375", false); - } + const themeRows = db + .prepare("SELECT COUNT(*) AS count FROM themes") + .get() as { count: number }; + + const defaultCss = ` + .root, + #root, + #docs-root { + --accent: #818cf9; + --muted-bg: #0f172a; + --gradient-from: #1e293b; + --gradient-to: #334155; + --border: #334155; + --border-accent: rgba(129, 140, 249, 0.3); + --text-primary: #f8fafc; + --text-secondary: #94a3b8; + --text-tertiary: #64748b; + --state-success: #4ade80; + --state-warning: #facc15; + --state-error: #f87171; + --state-info: #38bdf8; + --shadow-glow: 0 0 15px rgba(129, 140, 249, 0.5); + --background-gradient: linear-gradient(145deg, #0f172a 0%, #1e293b 100%); + } + `; + + if (themeRows.count === 0) { + db.prepare( + "INSERT INTO themes (name, creator, vars, tags) VALUES (?,?,?,?)", + ).run("default", "Its4Nik", defaultCss, "[default]"); + } + + const configRow = db + .prepare("SELECT COUNT(*) AS count FROM config") + .get() as { count: number }; + + if (configRow.count === 0) { + db.prepare( + "INSERT INTO config (keep_data_for, fetching_interval) VALUES (7, 5)", + ).run(); + } + + const hostRow = db + .prepare("SELECT COUNT(*) AS count FROM docker_hosts") + .get() as { count: number }; + + if (hostRow.count === 0) { + db.prepare( + "INSERT INTO docker_hosts (name, hostAddress, secure) VALUES (?, ?, ?)", + ).run("Localhost", "localhost:2375", false); + } + + const storeRow = db + .prepare("SELECT COUNT(*) AS count FROM store_repos") + .get() as { count: number }; + + if (storeRow.count === 0) { + db.prepare("INSERT INTO store_repos (slug, base) VALUES (?, ?)").run( + "DockStacks", + "https://raw.githubusercontent.com/Its4Nik/DockStacks/refs/heads/main/Index.json", + ); + } } init(); diff --git a/src/core/database/index.ts b/src/core/database/index.ts index c381e7a6..7bc61473 100644 --- a/src/core/database/index.ts +++ b/src/core/database/index.ts @@ -9,6 +9,8 @@ import * as dockerHosts from "~/core/database/dockerHosts"; import * as hostStats from "~/core/database/hostStats"; import * as logs from "~/core/database/logs"; import * as stacks from "~/core/database/stacks"; +import * as stores from "~/core/database/stores"; +import * as themes from "~/core/database/themes"; export const dbFunctions = { ...dockerHosts, @@ -18,6 +20,8 @@ export const dbFunctions = { ...hostStats, ...stacks, ...backup, + ...stores, + ...themes, }; export type dbFunctions = typeof dbFunctions; diff --git a/src/core/database/stores.ts b/src/core/database/stores.ts new file mode 100644 index 00000000..c8a330cb --- /dev/null +++ b/src/core/database/stores.ts @@ -0,0 +1,31 @@ +import { db } from "./database"; +import { executeDbOperation } from "./helper"; + +const stmt = { + insert: db.prepare(` + INSERT INTO store_repos (slug, base) VALUES (?, ?) + `), + selectAll: db.prepare(` + SELECT slug, base FROM store_repos + `), + delete: db.prepare(` + DELETE FROM store_repos WHERE slug = ? + `), +}; + +export function getStoreRepos() { + return executeDbOperation("Get Store Repos", () => stmt.selectAll.all()) as { + slug: string; + base: string; + }[]; +} + +export function addStoreRepo(slug: string, base: string) { + return executeDbOperation("Add Store Repo", () => + stmt.insert.run(slug, base), + ); +} + +export function deleteStoreRepo(slug: string) { + return executeDbOperation("Delete Store Repo", () => stmt.delete.run(slug)); +} diff --git a/src/core/database/themes.ts b/src/core/database/themes.ts new file mode 100644 index 00000000..08f245dd --- /dev/null +++ b/src/core/database/themes.ts @@ -0,0 +1,34 @@ +import type { Theme } from "~/typings/database"; +import { db } from "./database"; +import { executeDbOperation } from "./helper"; +import { logger } from "../utils/logger"; + +const stmt = { + insert: db.prepare(` + INSERT INTO themes (name, creator, vars, tags) VALUES (?, ?, ?, ?) + `), + remove: db.prepare("DELETE FROM themes WHERE name = ?"), + read: db.prepare("SELECT * FROM themes WHERE name = ?"), + readAll: db.prepare("SELECT * FROM themes"), +}; + +export function getThemes() { + return executeDbOperation("Get Themes", () => stmt.readAll.all()) as Theme[]; +} + +export function addTheme({ name, creator, vars, tags }: Theme) { + return executeDbOperation("Save Theme", () => + stmt.insert.run(name, creator, vars, tags.toString()), + ); +} +export function getSpecificTheme(name: string): Theme { + return executeDbOperation( + "Getting specific Theme", + () => stmt.read.get(name) as Theme, + ); +} + +export function deleteTheme(name: string) { + logger.debug(`Removing ${name} from themes `); + return executeDbOperation("Remove Theme", () => stmt.remove.run(name)); +} diff --git a/src/core/docker/client.ts b/src/core/docker/client.ts index ad65540b..788a910c 100644 --- a/src/core/docker/client.ts +++ b/src/core/docker/client.ts @@ -4,6 +4,8 @@ import type { DockerHost } from "~/typings/docker"; export const getDockerClient = (host: DockerHost): Docker => { try { + logger.info(`Setting up host: ${JSON.stringify(host)}`); + const inputUrl = host.hostAddress.includes("://") ? host.hostAddress : `${host.secure ? "https" : "http"}://${host.hostAddress}`; diff --git a/src/core/docker/monitor.ts b/src/core/docker/monitor.ts index d10c3c65..e4a2510c 100644 --- a/src/core/docker/monitor.ts +++ b/src/core/docker/monitor.ts @@ -68,7 +68,7 @@ async function startFor(host: DockerHost) { if (event.Type === "container") { const containerInfo: ContainerInfo = { id: event.Actor?.ID || event.id || "", - hostId: host.name, + hostId: host.id, name: event.Actor?.Attributes?.name || "", image: event.Actor?.Attributes?.image || event.from || "", status: event.status || event.Actor?.Attributes?.status || "", diff --git a/src/core/docker/scheduler.ts b/src/core/docker/scheduler.ts index 8682411b..fa6da95c 100644 --- a/src/core/docker/scheduler.ts +++ b/src/core/docker/scheduler.ts @@ -5,111 +5,146 @@ import { logger } from "~/core/utils/logger"; import type { config } from "~/typings/database"; function convertFromMinToMs(minutes: number): number { - return minutes * 60 * 1000; + return minutes * 60 * 1000; } async function initialRun( - scheduleName: string, - scheduleFunction: Promise | void, - isAsync: boolean, + scheduleName: string, + scheduleFunction: Promise | void, + isAsync: boolean, ) { - try { - if (isAsync) { - await scheduleFunction; - } else { - scheduleFunction; - } - logger.info(`Startup run success for: ${scheduleName}`); - } catch (error) { - logger.error(`Startup run failed for ${scheduleName}, ${error as string}`); - } + try { + if (isAsync) { + await scheduleFunction; + } else { + scheduleFunction; + } + logger.info(`Startup run success for: ${scheduleName}`); + } catch (error) { + logger.error(`Startup run failed for ${scheduleName}, ${error as string}`); + } +} + +type CancelFn = () => void; +let cancelFunctions: CancelFn[] = []; + +async function reloadSchedules() { + logger.info("Reloading schedules..."); + + cancelFunctions.forEach((cancel) => cancel()); + cancelFunctions = []; + + await setSchedules(); +} + +function scheduledJob( + name: string, + jobFn: () => Promise, + intervalMs: number, +): CancelFn { + let stopped = false; + + async function run() { + if (stopped) return; + const start = Date.now(); + logger.info(`Task Start: ${name}`); + try { + await jobFn(); + logger.info(`Task End: ${name} succeeded.`); + } catch (e) { + logger.error(`Task End: ${name} failed:`, e); + } + const elapsed = Date.now() - start; + const delay = Math.max(0, intervalMs - elapsed); + setTimeout(run, delay); + } + + run(); + + return () => { + stopped = true; + }; } async function setSchedules() { - try { - const rawConfigData: unknown[] = dbFunctions.getConfig(); - const configData = rawConfigData[0]; - - if ( - !configData || - typeof (configData as config).keep_data_for !== "number" || - typeof (configData as config).fetching_interval !== "number" - ) { - logger.error("Invalid configuration data:", configData); - throw new Error("Invalid configuration data"); - } - - const { keep_data_for, fetching_interval } = configData as config; - - if (keep_data_for === undefined) { - const errMsg = "keep_data_for is undefined"; - logger.error(errMsg); - throw new Error(errMsg); - } - - if (fetching_interval === undefined) { - const errMsg = "fetching_interval is undefined"; - logger.error(errMsg); - throw new Error(errMsg); - } - - logger.info( - `Scheduling: Fetching container statistics every ${fetching_interval} minutes`, - ); - - logger.info( - `Scheduling: Updating host statistics every ${fetching_interval} minutes`, - ); - - logger.info( - `Scheduling: Cleaning up Database every hour and deleting data older then ${keep_data_for} days`, - ); - - // Schedule container data fetching - await initialRun("storeContainerData", storeContainerData(), true); - setInterval(async () => { - try { - logger.info("Task Start: Fetching container data."); - await storeContainerData(); - logger.info("Task End: Container data fetched successfully."); - } catch (error) { - logger.error("Error in fetching container data:", error); - } - }, convertFromMinToMs(fetching_interval)); - - // Schedule Host statistics updates - await initialRun("storeHostData", storeHostData(), true); - setInterval(async () => { - try { - logger.info("Task Start: Updating host stats."); - await storeHostData(); - logger.info("Task End: Updating host stats successfully."); - } catch (error) { - logger.error("Error in updating host stats:", error); - } - }, convertFromMinToMs(fetching_interval)); - - // Schedule database cleanup - await initialRun( - "dbFunctions.deleteOldData", - dbFunctions.deleteOldData(keep_data_for), - false, - ); - setInterval(() => { - try { - logger.info("Task Start: Cleaning up old database data."); - dbFunctions.deleteOldData(keep_data_for); - logger.info("Task End: Database cleanup completed."); - } catch (error) { - logger.error("Error in database cleanup task:", error); - } - }, convertFromMinToMs(60)); - - logger.info("Schedules have been set successfully."); - } catch (error) { - logger.error("Error setting schedules:", error); - throw error; - } + logger.info("Starting DockStatAPI"); + try { + const rawConfigData: unknown[] = dbFunctions.getConfig(); + const configData = rawConfigData[0]; + + if ( + !configData || + typeof (configData as config).keep_data_for !== "number" || + typeof (configData as config).fetching_interval !== "number" + ) { + logger.error("Invalid configuration data:", configData); + throw new Error("Invalid configuration data"); + } + + const { keep_data_for, fetching_interval } = configData as config; + + if (keep_data_for === undefined) { + const errMsg = "keep_data_for is undefined"; + logger.error(errMsg); + throw new Error(errMsg); + } + + if (fetching_interval === undefined) { + const errMsg = "fetching_interval is undefined"; + logger.error(errMsg); + throw new Error(errMsg); + } + + logger.info( + `Scheduling: Fetching container statistics every ${fetching_interval} minutes`, + ); + + logger.info( + `Scheduling: Updating host statistics every ${fetching_interval} minutes`, + ); + + logger.info( + `Scheduling: Cleaning up Database every hour and deleting data older then ${keep_data_for} days`, + ); + // Schedule container data fetching + await initialRun("storeContainerData", storeContainerData(), true); + cancelFunctions.push( + scheduledJob( + "storeContainerData", + storeContainerData, + convertFromMinToMs(fetching_interval), + ), + ); + + // Schedule Host statistics updates + await initialRun("storeHostData", storeHostData(), true); + cancelFunctions.push( + scheduledJob( + "storeHostData", + storeHostData, + convertFromMinToMs(fetching_interval), + ), + ); + + // Schedule database cleanup + await initialRun( + "dbFunctions.deleteOldData", + dbFunctions.deleteOldData(keep_data_for), + false, + ); + cancelFunctions.push( + scheduledJob( + "cleanupOldData", + () => Promise.resolve(dbFunctions.deleteOldData(keep_data_for)), + convertFromMinToMs(60), + ), + ); + + logger.info("Schedules have been set successfully."); + } catch (error) { + logger.error("Error setting schedules:", error); + throw new Error(error as string); + } } -export { setSchedules }; +export { setSchedules, reloadSchedules }; diff --git a/src/core/docker/store-container-stats.ts b/src/core/docker/store-container-stats.ts index 33b9c0fb..a2778777 100644 --- a/src/core/docker/store-container-stats.ts +++ b/src/core/docker/store-container-stats.ts @@ -5,6 +5,7 @@ import { calculateCpuPercent, calculateMemoryUsage, } from "~/core/utils/calculations"; +import type { container_stats } from "~/typings/database"; import { logger } from "../utils/logger"; async function storeContainerData() { @@ -68,16 +69,18 @@ async function storeContainerData() { }, ); - dbFunctions.addContainerStats( - containerInfo.Id, - host.name, - containerName, - containerInfo.Image, - containerInfo.Status, - containerInfo.State, - calculateCpuPercent(stats), - calculateMemoryUsage(stats), - ); + const parsed: container_stats = { + cpu_usage: calculateCpuPercent(stats), + hostId: host.id, + id: containerInfo.Id, + image: containerInfo.Image, + memory_usage: calculateMemoryUsage(stats), + name: containerName, + state: containerInfo.State, + status: containerInfo.Status, + }; + + dbFunctions.addContainerStats(parsed); } catch (error) { const errMsg = error instanceof Error ? error.message : String(error); diff --git a/src/core/plugins/loader.ts b/src/core/plugins/loader.ts index 3c058e7c..c6da8764 100644 --- a/src/core/plugins/loader.ts +++ b/src/core/plugins/loader.ts @@ -38,12 +38,12 @@ export async function loadPlugins(pluginDir: string) { logger.info(`Loading plugin: ${absolutePath}`); try { await checkFileForChangeMe(absolutePath); - const module = await import(absolutePath); + const module = await import(/* @vite-ignore */ absolutePath); const plugin = module.default; pluginManager.register(plugin); pluginCount++; } catch (error) { - pluginManager.fail({ name: file }); + pluginManager.fail({ name: file, version: "0.0.0" }); logger.error( `Error while registering plugin ${absolutePath}: ${error as string}`, ); diff --git a/src/core/plugins/plugin-manager.ts b/src/core/plugins/plugin-manager.ts index c22c3d59..f68b80d8 100644 --- a/src/core/plugins/plugin-manager.ts +++ b/src/core/plugins/plugin-manager.ts @@ -1,7 +1,8 @@ import { EventEmitter } from "node:events"; import type { ContainerInfo } from "~/typings/docker"; -import type { Hooks, Plugin, PluginInfo } from "~/typings/plugin"; +import type { Plugin, PluginInfo } from "~/typings/plugin"; import { logger } from "../utils/logger"; +import { loadPlugins } from "./loader"; function getHooks(plugin: Plugin) { return { @@ -27,6 +28,16 @@ class PluginManager extends EventEmitter { private plugins: Map = new Map(); private failedPlugins: Map = new Map(); + async start() { + try { + await loadPlugins("./server/src/plugins"); + return; + } catch (error) { + logger.error(`Failed to init plugin manager: ${error}`); + return; + } + } + fail(plugin: Plugin) { try { this.failedPlugins.set(plugin.name, plugin); @@ -52,29 +63,31 @@ class PluginManager extends EventEmitter { } getPlugins(): PluginInfo[] { - const loadedPlugins = Array.from(this.plugins.values()).map((plugin) => { - const hooks: Hooks = getHooks(plugin); + const plugins: PluginInfo[] = []; - return { + for (const plugin of this.plugins.values()) { + logger.debug(`Loaded plugin: ${JSON.stringify(plugin)}`); + const hooks = getHooks(plugin); + plugins.push({ name: plugin.name, + version: plugin.version, status: "active", usedHooks: hooks, - }; - }); - - const failedPlugins = Array.from(this.failedPlugins.values()).map( - (plugin) => { - const hooks: Hooks = getHooks(plugin); - - return { - name: plugin.name, - status: "inactive", - usedHooks: hooks, - }; - }, - ); - - return loadedPlugins.concat(failedPlugins); + }); + } + + for (const plugin of this.failedPlugins.values()) { + logger.debug(`Loaded plugin: ${JSON.stringify(plugin)}`); + const hooks = getHooks(plugin); + plugins.push({ + name: plugin.name, + version: plugin.version, + status: "inactive", + usedHooks: hooks, + }); + } + + return plugins; } // Trigger plugin flows: diff --git a/src/core/stacks/controller.ts b/src/core/stacks/controller.ts index 90f7c671..0b0b5174 100644 --- a/src/core/stacks/controller.ts +++ b/src/core/stacks/controller.ts @@ -2,10 +2,10 @@ import { rm } from "node:fs/promises"; import DockerCompose from "docker-compose"; import { dbFunctions } from "~/core/database"; import { logger } from "~/core/utils/logger"; -import { postToClient } from "~/routes/live-stacks"; import type { stacks_config } from "~/typings/database"; import type { Stack } from "~/typings/docker-compose"; import type { ComposeSpec } from "~/typings/docker-compose"; +import { broadcast } from "../../handlers/modules/docker-socket"; import { checkStacks } from "./checker"; import { runStackCommand } from "./operations/runStackCommand"; import { wrapProgressCallback } from "./operations/runStackCommand"; @@ -33,12 +33,17 @@ export async function deployStack(stack_config: stacks_config): Promise { throw new Error("Failed to add stack to database"); } - postToClient({ - type: "stack-status", + // Broadcast pending status + broadcast({ + topic: "stack", data: { - stack_id: stackId, - status: "pending", - message: "Creating stack configuration", + timestamp: new Date(), + type: "stack-status", + data: { + stack_id: stackId, + status: "pending", + message: "Creating stack configuration", + }, }, }); @@ -64,12 +69,17 @@ export async function deployStack(stack_config: stacks_config): Promise { "deploying", ); - postToClient({ - type: "stack-status", + // Broadcast deployed status + broadcast({ + topic: "stack", data: { - stack_id: stackId, - status: "deployed", - message: "Stack deployed successfully", + timestamp: new Date(), + type: "stack-status", + data: { + stack_id: stackId, + status: "deployed", + message: "Stack deployed successfully", + }, }, }); @@ -107,13 +117,17 @@ export async function deployStack(stack_config: stacks_config): Promise { } } - postToClient({ - type: "stack-error", + // Broadcast deployment error + broadcast({ + topic: "stack", data: { - stack_id: stackId ?? 0, - action: "deploying", - message: errorMsg, - timestamp: new Date().toISOString(), + timestamp: new Date(), + type: "stack-error", + data: { + stack_id: stackId ?? 0, + action: "deploying", + message: errorMsg, + }, }, }); throw new Error(errorMsg); @@ -208,13 +222,17 @@ export async function removeStack(stack_id: number): Promise { } catch (error) { const errorMsg = error instanceof Error ? error.message : String(error); logger.error(errorMsg); - postToClient({ - type: "stack-error", + // Broadcast removal error + broadcast({ + topic: "stack", data: { - stack_id, - action: "removing", - message: `Directory removal failed: ${errorMsg}`, - timestamp: new Date().toISOString(), + timestamp: new Date(), + type: "stack-error", + data: { + stack_id, + action: "removing", + message: `Directory removal failed: ${errorMsg}`, + }, }, }); throw new Error(errorMsg); @@ -222,23 +240,32 @@ export async function removeStack(stack_id: number): Promise { dbFunctions.deleteStack(stack_id); - postToClient({ - type: "stack-removed", + // Broadcast successful removal + broadcast({ + topic: "stack", data: { - stack_id, - message: "Stack removed successfully", + timestamp: new Date(), + type: "stack-removed", + data: { + stack_id, + message: "Stack removed successfully", + }, }, }); } catch (error: unknown) { const errorMsg = error instanceof Error ? error.message : String(error); logger.error(errorMsg); - postToClient({ - type: "stack-error", + // Broadcast removal error + broadcast({ + topic: "stack", data: { - stack_id, - action: "removing", - message: errorMsg, - timestamp: new Date().toISOString(), + timestamp: new Date(), + type: "stack-error", + data: { + stack_id, + action: "removing", + message: errorMsg, + }, }, }); throw new Error(errorMsg); diff --git a/src/core/stacks/operations/runStackCommand.ts b/src/core/stacks/operations/runStackCommand.ts index 818e6499..d613a7c7 100644 --- a/src/core/stacks/operations/runStackCommand.ts +++ b/src/core/stacks/operations/runStackCommand.ts @@ -1,6 +1,6 @@ import { logger } from "~/core/utils/logger"; -import { postToClient } from "~/routes/live-stacks"; import type { Stack } from "~/typings/docker-compose"; +import { broadcast } from "../../../handlers/modules/docker-socket"; import { getStackName, getStackPath } from "./stackHelpers"; export function wrapProgressCallback(progressCallback?: (log: string) => void) { @@ -49,13 +49,17 @@ export async function runStackCommand( } } - postToClient({ - type: "stack-progress", + // Broadcast progress + broadcast({ + topic: "stack", data: { - stack_id, - action, - message, - timestamp: new Date().toISOString(), + timestamp: new Date(), + type: "stack-progress", + data: { + stack_id, + message, + action, + }, }, }); }; @@ -68,6 +72,21 @@ export async function runStackCommand( `Successfully completed command for stack_id=${stack_id}, action="${action}"`, ); + // Optionally broadcast status on completion + broadcast({ + topic: "stack", + data: { + timestamp: new Date(), + type: "stack-status", + data: { + stack_id, + status: "completed", + message: `Completed ${action}`, + action, + }, + }, + }); + return result; } catch (error: unknown) { const errorMsg = @@ -75,15 +94,21 @@ export async function runStackCommand( logger.debug( `Error occurred for stack_id=${stack_id}, action="${action}": ${errorMsg}`, ); - postToClient({ - type: "stack-error", + + // Broadcast error + broadcast({ + topic: "stack", data: { - stack_id, - action, - message: errorMsg, - timestamp: new Date().toISOString(), + timestamp: new Date(), + type: "stack-error", + data: { + stack_id, + action, + message: errorMsg, + }, }, }); + throw new Error(`Error while ${action} stack "${stack_id}": ${errorMsg}`); } } diff --git a/src/core/utils/logger.ts b/src/core/utils/logger.ts index f9304ab1..483d73cf 100644 --- a/src/core/utils/logger.ts +++ b/src/core/utils/logger.ts @@ -1,18 +1,19 @@ import path from "node:path"; -import chalk, { type ChalkInstance } from "chalk"; +import chalk from "chalk"; +import type { ChalkInstance } from "chalk"; import type { TransformableInfo } from "logform"; import { createLogger, format, transports } from "winston"; import wrapAnsi from "wrap-ansi"; import { dbFunctions } from "~/core/database"; -import { logToClients } from "~/routes/live-logs"; +import { logToClients } from "../../handlers/modules/logs-socket"; import type { log_message } from "~/typings/database"; import { backupInProgress } from "../database/_dbState"; -const padNewlines = process.env.PAD_NEW_LINES !== "false"; +const padNewlines = true; //process.env.PAD_NEW_LINES !== "false"; type LogLevel = | "error" @@ -24,7 +25,7 @@ type LogLevel = | "task" | "ut"; -// biome-ignore lint/suspicious/noControlCharactersInRegex: +// biome-ignore lint/suspicious/noControlCharactersInRegex: const ansiRegex = /\x1B\[[0-?9;]*[mG]/g; const formatTerminalMessage = (message: string, prefix: string): string => { @@ -127,7 +128,7 @@ export const logger = createLogger({ const lineStr = stack[i].trim(); if ( !lineStr.includes("node_modules") && - !lineStr.includes(path.basename(__filename)) + !lineStr.includes(path.basename(import.meta.url)) ) { const matches = lineStr.match(/\(?(.+):(\d+):(\d+)\)?$/); if (matches) { diff --git a/src/core/utils/package-json.ts b/src/core/utils/package-json.ts index 20958a4c..86f9287f 100644 --- a/src/core/utils/package-json.ts +++ b/src/core/utils/package-json.ts @@ -1,4 +1,4 @@ -import packageJson from "~/../package.json"; +import packageJson from "../../../package.json"; const { version, description, license, dependencies, devDependencies } = packageJson; diff --git a/src/core/utils/response-handler.ts b/src/core/utils/response-handler.ts deleted file mode 100644 index 00d5b464..00000000 --- a/src/core/utils/response-handler.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { logger } from "~/core/utils/logger"; -import type { set } from "~/typings/elysiajs"; - -export const responseHandler = { - error( - set: set, - error: string, - response_message: string, - error_code?: number, - ) { - set.status = error_code || 500; - logger.error(`${response_message} - ${error}`); - return { success: false, message: response_message, error: String(error) }; - }, - - ok(set: set, response_message: string) { - set.status = 200; - logger.debug(response_message); - return { success: true, message: response_message }; - }, - - simple_error(set: set, response_message: string, status_code?: number) { - set.status = status_code || 502; - logger.warn(response_message); - return { success: false, message: response_message }; - }, - - reject( - set: set, - reject: CallableFunction, - response_message: string, - error?: string, - ) { - set.status = 501; - if (error) { - logger.error(`${response_message} - ${error}`); - } else { - logger.error(response_message); - } - return reject(new Error(response_message)); - }, -}; diff --git a/src/core/utils/swagger-readme.ts b/src/core/utils/swagger-readme.ts deleted file mode 100644 index c1457c68..00000000 --- a/src/core/utils/swagger-readme.ts +++ /dev/null @@ -1,66 +0,0 @@ -export const swaggerReadme: string = ` -[Download API type sheet](/server.d.ts) - -![Docker](https://img.shields.io/badge/Docker-2CA5E0?style=flat&logo=docker&logoColor=white) -![TypeScript](https://img.shields.io/badge/TypeScript-3178C6?style=flat&logo=typescript&logoColor=white) - -Docker infrastructure management API with real-time monitoring and orchestration capabilities. - -## Key Features - -- **Stack Orchestration** - Deploy/update Docker stacks (compose v3+) with custom configurations -- **Container Monitoring** - Real-time metrics (CPU/RAM/status) across multiple Docker hosts -- **Centralized Logging** - Structured log management with retention policies and filtering -- **Host Management** - Multi-host configuration with connection health checks -- **Plugin System** - Extensible architecture for custom monitoring integrations - -## Installation & Setup - -**Prerequisites**: -- Node.js 18+ -- Docker Engine 23+ -- Bun runtime - -\`\`\`bash -# Clone repo -git clone https://github.com/Its4Nik/DockStatAPI.git -cd DockStatAPI -# Install dependencies -bun install - -# Start development server -bun run dev -\`\`\` - -## Configuration - -**Environment Variables**: -\`\`\`ini -PAD_NEW_LINES=true -NODE_ENV=production -LOG_LEVEL=info -\`\`\` - -## Security - -1. Always use HTTPS in production -2. Rotate API keys regularly -3. Restrict host connections to trusted networks -4. Enable Docker Engine TLS authentication - -## Contributing - -1. Fork repository -2. Create feature branch (\`feat/my-feature\`) -3. Submit PR with detailed description - -**Code Style**: -- TypeScript strict mode -- Elysia framework conventions -- Prettier formatting -`; diff --git a/src/handlers/config.ts b/src/handlers/config.ts new file mode 100644 index 00000000..5f713f04 --- /dev/null +++ b/src/handlers/config.ts @@ -0,0 +1,201 @@ +import { existsSync, readdirSync, unlinkSync } from "node:fs"; +import { dbFunctions } from "~/core/database"; +import { backupDir } from "~/core/database/backup"; +import { reloadSchedules } from "~/core/docker/scheduler"; +import { pluginManager } from "~/core/plugins/plugin-manager"; +import { logger } from "~/core/utils/logger"; +import { + authorEmail, + authorName, + authorWebsite, + contributors, + dependencies, + description, + devDependencies, + license, + version, +} from "~/core/utils/package-json"; +import type { config } from "~/typings/database"; +import type { DockerHost } from "~/typings/docker"; +import type { PluginInfo } from "~/typings/plugin"; + +class apiHandler { + getConfig(): config { + try { + const data = dbFunctions.getConfig() as config[]; + const distinct = data[0]; + + logger.debug("Fetched backend config"); + return distinct; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async updateConfig(fetching_interval: number, keep_data_for: number) { + try { + logger.debug( + `Updated config: fetching_interval: ${fetching_interval} - keep_data_for: ${keep_data_for}`, + ); + dbFunctions.updateConfig(fetching_interval, keep_data_for); + await reloadSchedules(); + return "Updated DockStatAPI config"; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + getPlugins(): PluginInfo[] { + try { + logger.debug("Gathering plugins"); + return pluginManager.getPlugins(); + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + getPackage() { + try { + logger.debug("Fetching package.json"); + const data: { + version: string; + description: string; + license: string; + authorName: string; + authorEmail: string; + authorWebsite: string; + contributors: string[]; + dependencies: Record; + devDependencies: Record; + } = { + version: version, + description: description, + license: license, + authorName: authorName, + authorEmail: authorEmail, + authorWebsite: authorWebsite, + contributors: contributors, + dependencies: dependencies, + devDependencies: devDependencies, + }; + + logger.debug( + `Received: ${JSON.stringify(data).length} chars in package.json`, + ); + + if (JSON.stringify(data).length <= 10) { + throw new Error("Failed to read package.json"); + } + + return data; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async createbackup(): Promise { + try { + const backupFilename = await dbFunctions.backupDatabase(); + return backupFilename; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async listBackups() { + try { + const backupFiles = readdirSync(backupDir); + + const filteredFiles = backupFiles.filter((file: string) => { + return !( + file.startsWith(".") || + file.endsWith(".db") || + file.endsWith(".db-shm") || + file.endsWith(".db-wal") + ); + }); + + return filteredFiles; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async downloadbackup(downloadFile?: string) { + try { + const filename: string = downloadFile || dbFunctions.findLatestBackup(); + const filePath = `${backupDir}/${filename}`; + + if (!existsSync(filePath)) { + throw new Error("Backup file not found"); + } + + return Bun.file(filePath); + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async restoreBackup(file: File) { + try { + if (!file) { + throw new Error("No file uploaded"); + } + + if (!(file.name || "").endsWith(".db.bak")) { + throw new Error("Invalid file type. Expected .db.bak"); + } + + const tempPath = `${backupDir}/upload_${Date.now()}.db.bak`; + const fileBuffer = await file.arrayBuffer(); + + await Bun.write(tempPath, fileBuffer); + dbFunctions.restoreDatabase(tempPath); + unlinkSync(tempPath); + + return "Database restored successfully"; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async addHost(host: DockerHost) { + try { + dbFunctions.addDockerHost(host); + return `Added docker host (${host.name} - ${host.hostAddress})`; + } catch (error: unknown) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async updateHost(host: DockerHost) { + try { + dbFunctions.updateDockerHost(host); + return `Updated docker host (${host.id})`; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async removeHost(id: number) { + try { + dbFunctions.deleteDockerHost(id); + return `Deleted docker host (${id})`; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } +} + +export const ApiHandler = new apiHandler(); diff --git a/src/handlers/database.ts b/src/handlers/database.ts new file mode 100644 index 00000000..d6bd0c49 --- /dev/null +++ b/src/handlers/database.ts @@ -0,0 +1,13 @@ +import { dbFunctions } from "~/core/database"; + +class databaseHandler { + async getContainers() { + return dbFunctions.getContainerStats(); + } + + async getHosts() { + return dbFunctions.getHostStats(); + } +} + +export const DatabaseHandler = new databaseHandler(); diff --git a/src/handlers/docker.ts b/src/handlers/docker.ts new file mode 100644 index 00000000..6e6a5411 --- /dev/null +++ b/src/handlers/docker.ts @@ -0,0 +1,155 @@ +import type Docker from "dockerode"; +import { dbFunctions } from "~/core/database"; +import { getDockerClient } from "~/core/docker/client"; +import { logger } from "~/core/utils/logger"; +import type { ContainerInfo, DockerHost, HostStats } from "~/typings/docker"; +import type { DockerInfo } from "~/typings/dockerode"; + +class basicDockerHandler { + async getContainers(): Promise { + try { + const hosts = dbFunctions.getDockerHosts() as DockerHost[]; + const containers: ContainerInfo[] = []; + + await Promise.all( + hosts.map(async (host) => { + try { + const docker = getDockerClient(host); + try { + await docker.ping(); + } catch (pingError) { + throw new Error(pingError as string); + } + + const hostContainers = await docker.listContainers({ all: true }); + + await Promise.all( + hostContainers.map(async (containerInfo) => { + try { + const container = docker.getContainer(containerInfo.Id); + const stats = await new Promise( + (resolve) => { + container.stats({ stream: false }, (error, stats) => { + if (error) { + throw new Error(error as string); + } + if (!stats) { + throw new Error("No stats available"); + } + resolve(stats); + }); + }, + ); + + containers.push({ + id: containerInfo.Id, + hostId: host.id, + name: containerInfo.Names[0].replace(/^\//, ""), + image: containerInfo.Image, + status: containerInfo.Status, + state: containerInfo.State, + cpuUsage: stats.cpu_stats.system_cpu_usage, + memoryUsage: stats.memory_stats.usage, + stats: stats, + info: containerInfo, + }); + } catch (containerError) { + logger.error( + "Error fetching container stats,", + containerError, + ); + } + }), + ); + logger.debug(`Fetched stats for ${host.name}`); + } catch (error) { + const errMsg = + error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + }), + ); + + logger.debug("Fetched all containers across all hosts"); + return containers; + } catch (error) { + const errMsg = error instanceof Error ? error.message : String(error); + throw new Error(errMsg); + } + } + + async getHostStats() { + //if (true) { + try { + const hosts = dbFunctions.getDockerHosts() as DockerHost[]; + + const stats: HostStats[] = []; + + for (const host of hosts) { + const docker = getDockerClient(host); + const info: DockerInfo = await docker.info(); + + const config: HostStats = { + hostId: host.id as number, + hostName: host.name, + dockerVersion: info.ServerVersion, + apiVersion: info.Driver, + os: info.OperatingSystem, + architecture: info.Architecture, + totalMemory: info.MemTotal, + totalCPU: info.NCPU, + labels: info.Labels, + images: info.Images, + containers: info.Containers, + containersPaused: info.ContainersPaused, + containersRunning: info.ContainersRunning, + containersStopped: info.ContainersStopped, + }; + + stats.push(config); + } + + logger.debug("Fetched all hosts"); + return stats; + } catch (error) { + throw new Error(error as string); + } + //} + + //try { + // const hosts = dbFunctions.getDockerHosts() as DockerHost[]; + // + // const host = findObjectByKey(hosts, "id", Number(id)); + // if (!host) { + // throw new Error(`Host (${id}) not found`); + // } + // + // const docker = getDockerClient(host); + // const info: DockerInfo = await docker.info(); + // + // const config: HostStats = { + // hostId: host.id as number, + // hostName: host.name, + // dockerVersion: info.ServerVersion, + // apiVersion: info.Driver, + // os: info.OperatingSystem, + // architecture: info.Architecture, + // totalMemory: info.MemTotal, + // totalCPU: info.NCPU, + // labels: info.Labels, + // images: info.Images, + // containers: info.Containers, + // containersPaused: info.ContainersPaused, + // containersRunning: info.ContainersRunning, + // containersStopped: info.ContainersStopped, + // }; + // + // logger.debug(`Fetched config for ${host.name}`); + // return config; + //} catch (error) { + // throw new Error(`Failed to retrieve host config: ${error}`); + //} + } +} + +export const BasicDockerHandler = new basicDockerHandler(); diff --git a/src/handlers/index.ts b/src/handlers/index.ts new file mode 100644 index 00000000..982f142b --- /dev/null +++ b/src/handlers/index.ts @@ -0,0 +1,23 @@ +import { ApiHandler } from "./config"; +import { DatabaseHandler } from "./database"; +import { BasicDockerHandler } from "./docker"; +import { LogHandler } from "./logs"; +import { Starter } from "./modules/starter"; +import { StackHandler } from "./stacks"; +import { StoreHandler } from "./store"; +import { ThemeHandler } from "./themes"; +import { CheckHealth } from "./utils"; + +export const handlers = { + BasicDockerHandler, + ApiHandler, + DatabaseHandler, + StackHandler, + LogHandler, + CheckHealth, + Socket: "ws://localhost:4837/ws", + StoreHandler, + ThemeHandler, +}; + +Starter.startAll(); diff --git a/src/handlers/logs.ts b/src/handlers/logs.ts new file mode 100644 index 00000000..766e60d9 --- /dev/null +++ b/src/handlers/logs.ts @@ -0,0 +1,50 @@ +import { dbFunctions } from "~/core/database"; +import { logger } from "~/core/utils/logger"; + +class logHandler { + async getLogs(level?: string) { + if (!level) { + try { + const logs = dbFunctions.getAllLogs(); + logger.debug("Retrieved all logs"); + return logs; + } catch (error) { + logger.error("Failed to retrieve logs,", error); + throw new Error("Failed to retrieve logs"); + } + } + try { + const logs = dbFunctions.getLogsByLevel(level); + + logger.debug(`Retrieved logs (level: ${level})`); + return logs; + } catch (error) { + logger.error(`Failed to retrieve logs: ${error}`); + throw new Error(`Failed to retrieve logs: ${error}`); + } + } + + async deleteLogs(level?: string) { + if (!level) { + try { + dbFunctions.clearAllLogs(); + return { success: true }; + } catch (error) { + logger.error("Could not delete all logs,", error); + throw new Error("Could not delete all logs"); + } + } + + try { + dbFunctions.clearLogsByLevel(level); + + logger.debug(`Cleared all logs with level: ${level}`); + return { success: true }; + } catch (error) { + logger.error("Could not clear logs with level", level, ",", error); + throw new Error("Failed to retrieve logs"); + } + } +} + +export const LogHandler = new logHandler(); diff --git a/src/handlers/modules/docker-socket.ts b/src/handlers/modules/docker-socket.ts new file mode 100644 index 00000000..1a20c9ce --- /dev/null +++ b/src/handlers/modules/docker-socket.ts @@ -0,0 +1,175 @@ +import { serve } from "bun"; +import split2 from "split2"; +import { dbFunctions } from "~/core/database"; +import { getDockerClient } from "~/core/docker/client"; +import { + calculateCpuPercent, + calculateMemoryUsage, +} from "~/core/utils/calculations"; +import { logger } from "~/core/utils/logger"; +import type { log_message } from "~/typings/database"; +import type { DockerHost } from "~/typings/docker"; +import type { WSMessage } from "~/typings/websocket"; +import { createLogStream } from "./logs-socket"; + +// Unified WebSocket message with topic for client-side routing +const clients = new Set>(); + +/** + * Broadcasts a WSMessage to all connected clients. + */ +export function broadcast(wsMsg: WSMessage) { + const payload = JSON.stringify(wsMsg); + for (const ws of clients) { + if (ws.readyState === 1) { + ws.send(payload); + } + } +} + +/** + * Streams Docker stats for all hosts and broadcasts events. + */ +export async function startDockerStatsBroadcast() { + logger.debug("Starting Docker stats broadcast..."); + + try { + const hosts: DockerHost[] = dbFunctions.getDockerHosts(); + logger.debug(`Retrieved ${hosts.length} Docker host(s)`); + + for (const host of hosts) { + try { + const docker = getDockerClient(host); + await docker.ping(); + + const containers = await docker.listContainers({ all: true }); + logger.debug( + `Host ${host.name} contains ${containers.length} containers`, + ); + + for (const info of containers) { + (async () => { + try { + const statsStream = await docker + .getContainer(info.Id) + .stats({ stream: true }); + const splitter = split2(); + statsStream.pipe(splitter); + + for await (const line of splitter) { + if (!line) continue; + try { + const stats = JSON.parse(line); + const msg: WSMessage = { + topic: "stats", + data: { + id: info.Id, + hostId: host.id, + name: info.Names[0].replace(/^\//, ""), + image: info.Image, + status: info.Status, + state: stats.state || info.State, + cpuUsage: calculateCpuPercent(stats) ?? 0, + memoryUsage: calculateMemoryUsage(stats) ?? 0, + }, + }; + broadcast(msg); + } catch (err) { + const errorMsg = (err as Error).message; + const msg: WSMessage = { + topic: "error", + data: { + hostId: host.id, + containerId: info.Id, + error: `Parse error: ${errorMsg}`, + }, + }; + broadcast(msg); + } + } + } catch (err) { + const errorMsg = (err as Error).message; + const msg: WSMessage = { + topic: "error", + data: { + hostId: host.id, + containerId: info.Id, + error: `Stats stream error: ${errorMsg}`, + }, + }; + broadcast(msg); + } + })(); + } + } catch (err) { + const errorMsg = (err as Error).message; + const msg: WSMessage = { + topic: "error", + data: { + hostId: host.id, + error: `Host connection error: ${errorMsg}`, + }, + }; + broadcast(msg); + } + } + } catch (err) { + const errorMsg = (err as Error).message; + const msg: WSMessage = { + topic: "error", + data: { + hostId: 0, + error: `Initialization error: ${errorMsg}`, + }, + }; + broadcast(msg); + } +} + +/** + * Sets up a log stream to forward application logs over WebSocket. + */ +function startLogBroadcast() { + const logStream = createLogStream(); + logStream.on("data", (chunk: log_message) => { + const msg: WSMessage = { + topic: "logs", + data: chunk, + }; + broadcast(msg); + }); +} + +/** + * WebSocket server serving multiple topics over one socket. + */ +export const WSServer = serve({ + port: 4837, + reusePort: true, + fetch(req, server) { + //if (req.url.endsWith("/ws")) { + if (server.upgrade(req)) { + logger.debug("Upgraded!"); + return; + } + //} + return new Response("Expected WebSocket upgrade", { status: 426 }); + }, + websocket: { + open(ws) { + logger.debug("Client connected via WebSocket"); + clients.add(ws); + }, + message() {}, + close(ws, code, reason) { + logger.debug(`Client disconnected (${code}): ${reason}`); + clients.delete(ws); + }, + }, +}); + +// Initialize broadcasts +startDockerStatsBroadcast().catch((err) => { + logger.error("Failed to start Docker stats broadcast:", err); +}); +startLogBroadcast(); diff --git a/src/handlers/modules/live-stacks.ts b/src/handlers/modules/live-stacks.ts new file mode 100644 index 00000000..ab26ccfd --- /dev/null +++ b/src/handlers/modules/live-stacks.ts @@ -0,0 +1,31 @@ +import { PassThrough, type Readable } from "node:stream"; +import { logger } from "~/core/utils/logger"; + +const activeStreams = new Set(); + +export function createStackStream(): Readable { + const stream = new PassThrough({ objectMode: true }); + + activeStreams.add(stream); + logger.info( + `New Stack stream created. Active streams: ${activeStreams.size}`, + ); + + const removeStream = () => { + if (activeStreams.delete(stream)) { + logger.info(`Stack stream closed. Active streams: ${activeStreams.size}`); + if (!stream.destroyed) { + stream.destroy(); + } + } + }; + + stream.on("close", removeStream); + stream.on("end", removeStream); + stream.on("error", (error) => { + logger.error(`Stream error: ${error.message}`); + removeStream(); + }); + + return stream; +} diff --git a/src/handlers/modules/logs-socket.ts b/src/handlers/modules/logs-socket.ts new file mode 100644 index 00000000..77a730e0 --- /dev/null +++ b/src/handlers/modules/logs-socket.ts @@ -0,0 +1,53 @@ +import { PassThrough, type Readable } from "node:stream"; +import { logger } from "~/core/utils/logger"; +import type { log_message } from "~/typings/database"; + +const activeStreams = new Set(); + +export function createLogStream(): Readable { + const stream = new PassThrough({ objectMode: true }); + + activeStreams.add(stream); + logger.info(`New Logs stream created. Active streams: ${activeStreams.size}`); + + const removeStream = () => { + if (activeStreams.delete(stream)) { + logger.info(`Logs stream closed. Active streams: ${activeStreams.size}`); + if (!stream.destroyed) { + stream.destroy(); + } + } + }; + + stream.on("close", removeStream); + stream.on("end", removeStream); + stream.on("error", (error) => { + logger.error(`Stream error: ${error.message}`); + removeStream(); + }); + + return stream; +} + +export function logToClients(data: log_message): void { + for (const stream of activeStreams) { + try { + if (stream.writable && !stream.destroyed) { + const success = stream.write(data); + if (!success) { + logger.warn("Log stream buffer full, data may be delayed"); + } + } + } catch (error) { + logger.error( + `Failed to write to log stream: ${ + error instanceof Error ? error.message : String(error) + }`, + ); + activeStreams.delete(stream); + if (!stream.destroyed) { + stream.destroy(); + } + } + } +} diff --git a/src/handlers/modules/starter.ts b/src/handlers/modules/starter.ts new file mode 100644 index 00000000..a9f47131 --- /dev/null +++ b/src/handlers/modules/starter.ts @@ -0,0 +1,35 @@ +import { setSchedules } from "~/core/docker/scheduler"; +import { pluginManager } from "~/core/plugins/plugin-manager"; +import { startDockerStatsBroadcast } from "./docker-socket"; + +function banner(msg: string) { + const fenced = `= ${msg} =`; + const lines = msg.length; + console.info("=".repeat(fenced.length)); + console.info(fenced); + console.info("=".repeat(fenced.length)); +} + +class starter { + public started = false; + async startAll() { + try { + if (!this.started) { + banner("Setting schedules"); + await setSchedules(); + banner("Importing plugins"); + await startDockerStatsBroadcast(); + banner("Started DockStatAPI succesfully"); + await pluginManager.start(); + banner("Starting WebSocket server"); + this.started = true; + return; + } + console.info("Already started"); + } catch (error) { + throw new Error(`Could not start DockStatAPI: ${error}`); + } + } +} + +export const Starter = new starter(); diff --git a/src/handlers/sockets.ts b/src/handlers/sockets.ts new file mode 100644 index 00000000..ff463c6c --- /dev/null +++ b/src/handlers/sockets.ts @@ -0,0 +1,9 @@ +import { WSServer } from "./modules/docker-socket"; +import { createStackStream } from "./modules/live-stacks"; +import { createLogStream } from "./modules/logs-socket"; + +export const Sockets = { + dockerStatsStream: `${WSServer.hostname}${WSServer.port}/ws`, + createLogStream, + createStackStream, +}; diff --git a/src/handlers/stacks.ts b/src/handlers/stacks.ts new file mode 100644 index 00000000..cabf5836 --- /dev/null +++ b/src/handlers/stacks.ts @@ -0,0 +1,186 @@ +import { dbFunctions } from "~/core/database"; +import { + deployStack, + getAllStacksStatus, + getStackStatus, + pullStackImages, + removeStack, + restartStack, + startStack, + stopStack, +} from "~/core/stacks/controller"; +import { logger } from "~/core/utils/logger"; +import type { stacks_config } from "~/typings/database"; + +class stackHandler { + /** + * Deploys a Stack on the DockStatAPI + * + * @example + * ```ts + * deploy({ + * id: 0, + * name: "example", + * vesion: 1, + * custom: false, + * source: "https://github.com/Its4Nik/DockStacks" + * compose_spec: "{services: {web: {image: "nginx:latest",ports: ["80:80"]}}" + * }) + * ``` + * @param config + * @returns "Stack ${config.name} deployed successfully" + */ + async deploy(config: stacks_config) { + try { + await deployStack(config); + logger.info(`Deployed Stack (${config.name})`); + return `Stack ${config.name} deployed successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + + return `${errorMsg}, Error deploying stack, please check the server logs for more information`; + } + } + /** + * Runs `docker compose -f "./stacks/[StackID]-[StackName]" up -d` + * @param stackId + * @returns `Started Stack (${stackId})` + */ + async start(stackId: number) { + try { + if (!stackId) { + throw new Error("Stack ID needed"); + } + await startStack(stackId); + logger.info(`Started Stack (${stackId})`); + return `Stack ${stackId} started successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + + return `${errorMsg}, Error starting stack`; + } + } + + /** + * Runs `docker compose -f "./stacks/[StackID]-[StackName]" down` + * @param stackId + * @returns `Stack ${stackId} stopped successfully` + */ + async stop(stackId: number) { + try { + if (!stackId) { + throw new Error("Stack needed"); + } + await stopStack(stackId); + logger.info(`Stopped Stack (${stackId})`); + return `Stack ${stackId} stopped successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + + return `${errorMsg}, Error stopping stack`; + } + } + + /** + * Runs `docker compose -f "./stacks/[StackID]-[StackName]" restart` + * @param stackId + * @returns `Stack ${stackId} restarted successfully` + */ + async restart(stackId: number) { + try { + if (!stackId) { + throw new Error("StackID needed"); + } + await restartStack(stackId); + logger.info(`Restarted Stack (${stackId})`); + return `Stack ${stackId} restarted successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + + return `${errorMsg}, Error restarting stack`; + } + } + + /** + * Runs `docker compose -f "./stacks/[StackID]-[StackName]" pull` + * @param stackId + * @returns `Images for stack ${stackId} pulled successfully` + */ + async pullImages(stackId: number) { + try { + if (!stackId) { + throw new Error("StackID needed"); + } + await pullStackImages(stackId); + logger.info(`Pulled Stack images (${stackId})`); + return `Images for stack ${stackId} pulled successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + + return `${errorMsg}, Error pulling images`; + } + } + + /** + * Runs `docker compose -f "./stacks/[StackID]-[StackName]" ps` with custom formatting + * @param stackId + * @returns Idfk + */ + async getStatus(stackId?: number) { + if (stackId) { + const status = await getStackStatus(stackId); + logger.debug( + `Retrieved status for stackId=${stackId}: ${JSON.stringify(status)}`, + ); + return status; + } + + logger.debug("Fetching status for all stacks"); + const status = await getAllStacksStatus(); + logger.debug(`Retrieved status for all stacks: ${JSON.stringify(status)}`); + + return status; + } + + /** + * @example + * ```json + * [{ + * id: 1; + * name: "example"; + * version: 1; + * custom: false; + * source: "https://github.com/Its4Nik/DockStacks"; + * compose_spec: "{services: {web: {image: "nginx:latest",ports: ["80:80"]}}" + * }] + * ``` + */ + listStacks(): stacks_config[] { + try { + const stacks = dbFunctions.getStacks(); + logger.info("Fetched Stacks"); + return stacks; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + throw new Error(`${errorMsg}, Error getting stacks`); + } + } + + /** + * Deletes a whole Stack and it's local folder, this action is irreversible + * @param stackId + * @returns `Stack ${stackId} deleted successfully` + */ + async deleteStack(stackId: number) { + try { + await removeStack(stackId); + logger.info(`Deleted Stack ${stackId}`); + return `Stack ${stackId} deleted successfully`; + } catch (error) { + const errorMsg = error instanceof Error ? error.message : String(error); + return `${errorMsg}, Error deleting stack`; + } + } +} + +export const StackHandler = new stackHandler(); diff --git a/src/handlers/store.ts b/src/handlers/store.ts new file mode 100644 index 00000000..4cb83c45 --- /dev/null +++ b/src/handlers/store.ts @@ -0,0 +1,51 @@ +import { + addStoreRepo, + deleteStoreRepo, + getStoreRepos, +} from "~/core/database/stores"; + +class store { + /** + * + * @returns an Array of all Repos added to the Database + * @example + * ```json + * [ + * { + * slug: "DockStacks", + * base: "https://raw.githubusercontent.com/Its4Nik/DockStacks/refs/heads/main/Index.json" + * } + * ] + * ``` + */ + getRepos(): { + slug: string; + base: string; + }[] { + return getStoreRepos(); + } + + /** + * + * @param slug - "Nickname" for this repo + * @param base - The raw URL of where the [ROOT].json is located + * @example + * ```ts + * addRepo("DockStacks", "https://raw.githubusercontent.com/Its4Nik/DockStacks/refs/heads/main/Index.json") + * ``` + */ + addRepo(slug: string, base: string) { + return addStoreRepo(slug, base); + } + + /** + * Deletes a Repo from the Database + * @param slug + * @returns Changes + */ + deleteRepo(slug: string) { + return deleteStoreRepo(slug); + } +} + +export const StoreHandler = new store(); diff --git a/src/handlers/themes.ts b/src/handlers/themes.ts new file mode 100644 index 00000000..8bc1f98d --- /dev/null +++ b/src/handlers/themes.ts @@ -0,0 +1,42 @@ +import { dbFunctions } from "~/core/database"; +import type { Theme } from "~/typings/database"; + +class themeHandler { + getThemes(): Theme[] { + return dbFunctions.getThemes(); + } + addTheme(theme: Theme) { + try { + const rawVars = + typeof theme.vars === "string" ? JSON.parse(theme.vars) : theme.vars; + + const cssVars = Object.entries(rawVars) + .map(([key, value]) => `--${key}: ${value};`) + .join(" "); + + const varsString = `.root, #root, #docs-root { ${cssVars} }`; + + return dbFunctions.addTheme({ + ...theme, + vars: varsString, + }); + } catch (error) { + throw new Error( + `Could not save theme ${JSON.stringify(theme)}, error: ${error}`, + ); + } + } + deleteTheme(name: string) { + try { + dbFunctions.deleteTheme(name); + return "Deleted theme"; + } catch (error) { + throw new Error(`Could not save theme ${name}, error: ${error}`); + } + } + getTheme(name: string): Theme { + return dbFunctions.getSpecificTheme(name); + } +} + +export const ThemeHandler = new themeHandler(); diff --git a/src/handlers/utils.ts b/src/handlers/utils.ts new file mode 100644 index 00000000..80b32d0b --- /dev/null +++ b/src/handlers/utils.ts @@ -0,0 +1,6 @@ +import { logger } from "~/core/utils/logger"; + +export async function CheckHealth(): Promise<"healthy"> { + logger.info("Checking health"); + return "healthy"; +} diff --git a/src/index.ts b/src/index.ts index c8f62b60..ade06411 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,189 +1,3 @@ -import cors from "@elysiajs/cors"; -import { serverTiming } from "@elysiajs/server-timing"; -import staticPlugin from "@elysiajs/static"; -import { swagger } from "@elysiajs/swagger"; -import { Elysia } from "elysia"; -import { dts } from "elysia-remote-dts"; -import { Logestic } from "logestic"; -import { dbFunctions } from "~/core/database"; -import { monitorDockerEvents } from "~/core/docker/monitor"; -import { setSchedules } from "~/core/docker/scheduler"; -import { loadPlugins } from "~/core/plugins/loader"; -import { logger } from "~/core/utils/logger"; -import { - authorWebsite, - contributors, - license, -} from "~/core/utils/package-json"; -import { swaggerReadme } from "~/core/utils/swagger-readme"; -import { validateApiKey } from "~/middleware/auth"; -import { apiConfigRoutes } from "~/routes/api-config"; -import { dockerRoutes } from "~/routes/docker-manager"; -import { dockerStatsRoutes } from "~/routes/docker-stats"; -import { dockerWebsocketRoutes } from "~/routes/docker-websocket"; -import { liveLogs } from "~/routes/live-logs"; -import { backendLogs } from "~/routes/logs"; -import { stackRoutes } from "~/routes/stacks"; -import type { config } from "~/typings/database"; -import { checkStacks } from "./core/stacks/checker"; -import { databaseStats } from "./routes/database-stats"; -import { liveStacks } from "./routes/live-stacks"; +import { handlers } from "./handlers"; -console.log(""); - -logger.info("Starting DockStatAPI"); - -const DockStatAPI = new Elysia({ - normalize: true, - precompile: true, -}) - .use(cors()) - //.use(Logestic.preset("fancy")) - .use(staticPlugin()) - .use(serverTiming()) - .use( - dts("./src/index.ts", { - tsconfig: "./tsconfig.json", - compilerOptions: { - strict: true, - }, - }), - ) - .use( - swagger({ - documentation: { - info: { - title: "DockStatAPI", - version: "3.0.0", - description: swaggerReadme, - }, - components: { - securitySchemes: { - apiKeyAuth: { - type: "apiKey" as const, - name: "x-api-key", - in: "header", - description: "API key for authentication", - }, - }, - }, - security: [ - { - apiKeyAuth: [], - }, - ], - tags: [ - { - name: "Statistics", - description: - "All endpoints for fetching statistics of hosts / containers", - }, - { - name: "Management", - description: "Various endpoints for managing DockStatAPI", - }, - { - name: "Stacks", - description: "DockStat's Stack functionality", - }, - { - name: "Utils", - description: "Various utilities which might be useful", - }, - ], - }, - }), - ) - .onBeforeHandle(async (context) => { - const { path, request, set } = context; - - if ( - path === "/health" || - path.startsWith("/swagger") || - path.startsWith("/public") - ) { - logger.info(`Requested unguarded route: ${path}`); - return; - } - - const validation = await validateApiKey(request, set); - - if (!validation) { - throw new Error("Error while checking API key"); - } - - if (!validation.success) { - set.status = 400; - - throw new Error(validation.error); - } - }) - .onError(({ code, set, path, error }) => { - if (code === "NOT_FOUND") { - logger.warn(`Unknown route (${path}), showing error page!`); - set.status = 404; - set.headers["Content-Type"] = "text/html"; - return Bun.file("public/404.html"); - } - - logger.error(`Internal server error at ${path}: ${error}`); - set.status = 500; - set.headers["Content-Type"] = "text/html"; - return { success: false, message: error }; - }) - .use(dockerRoutes) - .use(dockerStatsRoutes) - .use(backendLogs) - .use(dockerWebsocketRoutes) - .use(apiConfigRoutes) - .use(stackRoutes) - .use(liveLogs) - .use(liveStacks) - .use(databaseStats) - .get("/health", () => ({ status: "healthy" }), { - tags: ["Utils"], - response: { message: "healthy" }, - }) - .listen(process.env.DOCKSTATAPI_PORT || 3000, ({ hostname, port }) => { - console.log("----- [ ############## ]"); - logger.info(`DockStatAPI is running at http://${hostname}:${port}`); - logger.info( - `Swagger API Documentation available at http://${hostname}:${port}/swagger`, - ); - logger.info(`License: ${license}`); - logger.info(`Author: ${authorWebsite}`); - logger.info(`Contributors: ${contributors}`); - }); - -const initializeServer = async () => { - try { - await loadPlugins("./src/plugins"); - await setSchedules(); - - monitorDockerEvents().catch((error) => { - logger.error(`Monitoring Error: ${error}`); - }); - - const configData = dbFunctions.getConfig() as config[]; - const apiKey = configData[0].api_key; - - if (apiKey === "changeme") { - logger.warn( - "Default API Key of 'changeme' detected. Please change your API Key via the `/config/update` route!", - ); - } - - await checkStacks(); - - logger.info("Started server"); - console.log("----- [ ############## ]"); - } catch (error) { - logger.error("Error while starting server:", error); - process.exit(1); - } -}; - -await initializeServer(); - -export type App = typeof DockStatAPI; -export { DockStatAPI }; +export default handlers; diff --git a/src/middleware/auth.ts b/src/middleware/auth.ts deleted file mode 100644 index 3a730229..00000000 --- a/src/middleware/auth.ts +++ /dev/null @@ -1,89 +0,0 @@ -import { dbFunctions } from "~/core/database"; -import { logger } from "~/core/utils/logger"; - -import type { config } from "~/typings/database"; -import type { set } from "~/typings/elysiajs"; - -export async function hashApiKey(apiKey: string): Promise { - logger.debug("Hashing API key"); - try { - logger.debug("API key hashed successfully"); - return await Bun.password.hash(apiKey); - } catch (error) { - logger.error("Error hashing API key", error); - throw new Error("Failed to hash API key"); - } -} - -async function validateApiKeyHash( - providedKey: string, - storedHash: string, -): Promise { - logger.debug("Validating API key hash"); - try { - const isValid = await Bun.password.verify(providedKey, storedHash); - logger.debug(`API key validation result: ${isValid}`); - return isValid; - } catch (error) { - logger.error("Error validating API key hash", error); - return false; - } -} - -async function getApiKeyFromDb( - apiKey: string, -): Promise<{ hash: string } | null> { - const dbApiKey = (dbFunctions.getConfig() as config[])[0].api_key; - logger.debug(`Querying database for API key: ${apiKey}`); - return Promise.resolve({ - hash: dbApiKey, - }); -} - -export async function validateApiKey(request: Request, set: set) { - const apiKey = request.headers.get("x-api-key"); - - if (process.env.NODE_ENV !== "production") { - logger.warn( - "API Key validation deactivated, since running in development mode", - ); - return { success: true, apiKey }; - } - - if (!apiKey) { - logger.error(`API key missing from request ${request.url}`); - set.status = 401; - return { error: "API key required", success: false, apiKey }; - } - - logger.debug("API key validation initiated"); - - try { - const dbRecord = await getApiKeyFromDb(apiKey); - - if (!dbRecord) { - logger.error("API key not found in database"); - set.status = 401; - return { success: false, error: "Invalid API key" }; - } - - if (dbRecord.hash === "changeme") { - logger.error("Please change your API Key!"); - return { success: true, apiKey }; - } - - const isValid = await validateApiKeyHash(apiKey, dbRecord.hash); - - if (!isValid) { - logger.error("Invalid API key provided"); - set.status = 401; - return { success: false, error: "Invalid API key", apiKey }; - } - - logger.info("Valid API key used"); - } catch (error) { - logger.error("Error during API key validation", error); - set.status = 500; - return { success: false, error: "Internal server error", apiKey }; - } -} diff --git a/src/routes/api-config.ts b/src/routes/api-config.ts deleted file mode 100644 index c049a044..00000000 --- a/src/routes/api-config.ts +++ /dev/null @@ -1,586 +0,0 @@ -import { existsSync, readdirSync, unlinkSync } from "node:fs"; -import { Elysia, t } from "elysia"; -import { dbFunctions } from "~/core/database"; -import { backupDir } from "~/core/database/backup"; -import { pluginManager } from "~/core/plugins/plugin-manager"; -import { logger } from "~/core/utils/logger"; -import { - authorEmail, - authorName, - authorWebsite, - contributors, - dependencies, - description, - devDependencies, - license, - version, -} from "~/core/utils/package-json"; -import { responseHandler } from "~/core/utils/response-handler"; -import { hashApiKey } from "~/middleware/auth"; -import type { config } from "~/typings/database"; - -export const apiConfigRoutes = new Elysia({ prefix: "/config" }) - .get( - "", - async ({ set }) => { - try { - const data = dbFunctions.getConfig() as config[]; - const distinct = data[0]; - set.status = 200; - - logger.debug("Fetched backend config"); - return distinct; - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Returns current API configuration including data retention policies and security settings", - responses: { - "200": { - description: "Successfully retrieved configuration", - content: { - "application/json": { - schema: { - type: "object", - properties: { - fetching_interval: { - type: "number", - example: 5, - }, - keep_data_for: { - type: "number", - example: 7, - }, - api_key: { - type: "string", - example: "hashed_api_key", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving configuration", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error getting the DockStatAPI config", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/plugins", - () => { - try { - return pluginManager.getPlugins(); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Lists all active plugins with their registration details and status", - responses: { - "200": { - description: "Successfully retrieved plugins", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - name: { - type: "string", - example: "example-plugin", - }, - version: { - type: "string", - example: "1.0.0", - }, - status: { - type: "string", - example: "active", - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving plugins", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error getting all registered plugins", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .post( - "/update", - async ({ set, body }) => { - try { - const { fetching_interval, keep_data_for, api_key } = body; - - dbFunctions.updateConfig( - fetching_interval, - keep_data_for, - await hashApiKey(api_key), - ); - return responseHandler.ok(set, "Updated DockStatAPI config"); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Modifies core API settings including data collection intervals, retention periods, and security credentials", - responses: { - "200": { - description: "Successfully updated configuration", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Updated DockStatAPI config", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error updating configuration", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error updating the DockStatAPI config", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - fetching_interval: t.Number(), - keep_data_for: t.Number(), - api_key: t.String(), - }), - }, - ) - .get( - "/package", - async () => { - try { - logger.debug("Fetching package.json"); - const data = { - version: version, - description: description, - license: license, - authorName: authorName, - authorEmail: authorEmail, - authorWebsite: authorWebsite, - contributors: contributors, - dependencies: dependencies, - devDependencies: devDependencies, - }; - - logger.debug( - `Received: ${JSON.stringify(data).length} chars in package.json`, - ); - - if (JSON.stringify(data).length <= 10) { - throw new Error("Failed to read package.json"); - } - - return data; - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Displays package metadata including dependencies, contributors, and licensing information", - responses: { - "200": { - description: "Successfully retrieved package information", - content: { - "application/json": { - schema: { - type: "object", - properties: { - version: { - type: "string", - example: "3.0.0", - }, - description: { - type: "string", - example: - "DockStatAPI is an API backend featuring plugins and more for DockStat", - }, - license: { - type: "string", - example: "CC BY-NC 4.0", - }, - authorName: { - type: "string", - example: "ItsNik", - }, - authorEmail: { - type: "string", - example: "info@itsnik.de", - }, - authorWebsite: { - type: "string", - example: "https://github.com/Its4Nik", - }, - contributors: { - type: "array", - items: { - type: "string", - }, - example: [], - }, - dependencies: { - type: "object", - example: { - "@elysiajs/server-timing": "^1.2.1", - "@elysiajs/static": "^1.2.0", - }, - }, - devDependencies: { - type: "object", - example: { - "@biomejs/biome": "1.9.4", - "@types/dockerode": "^3.3.38", - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving package information", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error while reading package.json", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .post( - "/backup", - async ({ set }) => { - try { - const backupFilename = await dbFunctions.backupDatabase(); - return responseHandler.ok(set, backupFilename); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: "Backs up the internal database", - responses: { - "200": { - description: "Successfully created backup", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "backup_2024-03-20_12-00-00.db.bak", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error creating backup", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error backing up", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/backup", - async () => { - try { - const backupFiles = readdirSync(backupDir); - - const filteredFiles = backupFiles.filter((file: string) => { - return !( - file.startsWith(".") || - file.endsWith(".db") || - file.endsWith(".db-shm") || - file.endsWith(".db-wal") - ); - }); - - return filteredFiles; - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: "Lists all available backups", - responses: { - "200": { - description: "Successfully retrieved backup list", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "string", - }, - example: [ - "backup_2024-03-20_12-00-00.db.bak", - "backup_2024-03-19_12-00-00.db.bak", - ], - }, - }, - }, - }, - "400": { - description: "Error retrieving backup list", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Reading Backup directory", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - - .get( - "/backup/download", - async ({ query, set }) => { - try { - const filename = query.filename || dbFunctions.findLatestBackup(); - const filePath = `${backupDir}/${filename}`; - - if (!existsSync(filePath)) { - throw new Error("Backup file not found"); - } - - set.headers["Content-Type"] = "application/octet-stream"; - set.headers["Content-Disposition"] = - `attachment; filename="${filename}"`; - return Bun.file(filePath); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Download a specific backup or the latest if no filename is provided", - responses: { - "200": { - description: "Successfully downloaded backup file", - content: { - "application/octet-stream": { - schema: { - type: "string", - format: "binary", - example: "Binary backup file content", - }, - }, - }, - headers: { - "Content-Disposition": { - schema: { - type: "string", - example: - 'attachment; filename="backup_2024-03-20_12-00-00.db.bak"', - }, - }, - }, - }, - "400": { - description: "Error downloading backup", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Backup download failed", - }, - }, - }, - }, - }, - }, - }, - }, - query: t.Object({ - filename: t.Optional(t.String()), - }), - }, - ) - .post( - "/restore", - async ({ body, set }) => { - try { - const { file } = body; - - set.headers["Content-Type"] = "text/html"; - - if (!file) { - throw new Error("No file uploaded"); - } - - if (!file.name.endsWith(".db.bak")) { - throw new Error("Invalid file type. Expected .db.bak"); - } - - const tempPath = `${backupDir}/upload_${Date.now()}.db.bak`; - const fileBuffer = await file.arrayBuffer(); - - await Bun.write(tempPath, fileBuffer); - dbFunctions.restoreDatabase(tempPath); - unlinkSync(tempPath); - - return responseHandler.ok(set, "Database restored successfully"); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - body: t.Object({ file: t.File() }), - detail: { - tags: ["Management"], - description: "Restore database from uploaded backup file", - responses: { - "200": { - description: "Successfully restored database", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Database restored successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error restoring database", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Database restoration error", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ); diff --git a/src/routes/database-stats.ts b/src/routes/database-stats.ts deleted file mode 100644 index 8a1bd882..00000000 --- a/src/routes/database-stats.ts +++ /dev/null @@ -1,169 +0,0 @@ -import Elysia from "elysia"; -import { dbFunctions } from "~/core/database"; - -export const databaseStats = new Elysia({ prefix: "/db-stats" }) - .get( - "/containers", - async () => { - return dbFunctions.getContainerStats(); - }, - { - detail: { - tags: ["Statistics"], - description: "Shows all stored metrics of containers", - responses: { - "200": { - description: "Successfully fetched Container Stats from the DB", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "string", - example: - "0c1142d825a4104f45099e8297428cc7ef820319924aa9cf46739cf1c147cdae", - }, - hostId: { - type: "string", - example: "Localhost", - }, - name: { - type: "string", - example: "heimdall", - }, - image: { - type: "string", - example: "linuxserver/heimdall:latest", - }, - status: { - type: "string", - example: "Up About a minute", - }, - state: { - type: "string", - example: "running", - }, - cpu_usage: { - type: "number", - example: 0.00628140703517588, - }, - memory_usage: { - type: "number", - example: 0.2784590652462969, - }, - timestamp: { - type: "string", - example: "2025-06-07 07:01:26", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/hosts", - async () => { - return dbFunctions.getHostStats(); - }, - { - detail: { - tags: ["Statistics"], - description: "Shows all stored metrics of Docker hosts", - responses: { - "200": { - description: "Successfully fetched Host Stats from the DB", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - hostId: { - type: "number", - example: 1, - description: "Unique identifier for the host", - }, - hostName: { - type: "string", - example: "Localhost", - description: "Display name of the host", - }, - dockerVersion: { - type: "string", - example: "28.2.0", - description: "Installed Docker version", - }, - apiVersion: { - type: "string", - example: "overlay2", - description: "Docker API version", - }, - os: { - type: "string", - example: "Arch Linux", - description: "Host operating system", - }, - architecture: { - type: "string", - example: "x86_64", - description: "System architecture", - }, - totalMemory: { - type: "number", - example: 33512706048, - description: "Total system memory in bytes", - }, - totalCPU: { - type: "number", - example: 4, - description: "Number of available CPU cores", - }, - labels: { - type: "string", - example: "[]", - description: "JSON string of host labels", - }, - containers: { - type: "number", - example: 3, - description: "Total containers on host", - }, - containersRunning: { - type: "number", - example: 3, - description: "Currently running containers", - }, - containersStopped: { - type: "number", - example: 0, - description: "Stopped containers", - }, - containersPaused: { - type: "number", - example: 0, - description: "Paused containers", - }, - images: { - type: "number", - example: 30, - description: "Available Docker images", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - ); diff --git a/src/routes/docker-manager.ts b/src/routes/docker-manager.ts deleted file mode 100644 index fcd877e9..00000000 --- a/src/routes/docker-manager.ts +++ /dev/null @@ -1,255 +0,0 @@ -import { Elysia, t } from "elysia"; -import { dbFunctions } from "~/core/database"; -import { logger } from "~/core/utils/logger"; -import { responseHandler } from "~/core/utils/response-handler"; -import type { DockerHost } from "~/typings/docker"; - -export const dockerRoutes = new Elysia({ prefix: "/docker-config" }) - .post( - "/add-host", - async ({ set, body }) => { - try { - dbFunctions.addDockerHost(body as DockerHost); - return responseHandler.ok(set, `Added docker host (${body.name})`); - } catch (error: unknown) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Registers a new Docker host to the monitoring system with connection details", - responses: { - "200": { - description: "Successfully added Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Added docker host (Localhost)", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error adding Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error adding docker Host", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - name: t.String(), - hostAddress: t.String(), - secure: t.Boolean(), - }), - }, - ) - - .post( - "/update-host", - async ({ set, body }) => { - try { - set.status = 200; - dbFunctions.updateDockerHost(body); - return responseHandler.ok(set, `Updated docker host (${body.id})`); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Modifies existing Docker host configuration parameters (name, address, security)", - responses: { - "200": { - description: "Successfully updated Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Updated docker host (1)", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error updating Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to update host", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - id: t.Number(), - name: t.String(), - hostAddress: t.String(), - secure: t.Boolean(), - }), - }, - ) - - .get( - "/hosts", - async ({ set }) => { - try { - const dockerHosts = dbFunctions.getDockerHosts(); - - logger.debug("Retrieved docker hosts"); - return dockerHosts; - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Lists all configured Docker hosts with their connection settings", - responses: { - "200": { - description: "Successfully retrieved Docker hosts", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "number", - example: 1, - }, - name: { - type: "string", - example: "Localhost", - }, - hostAddress: { - type: "string", - example: "localhost:2375", - }, - secure: { - type: "boolean", - example: false, - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving Docker hosts", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve hosts", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - - .delete( - "/hosts/:id", - async ({ set, params }) => { - try { - set.status = 200; - dbFunctions.deleteDockerHost(params.id); - return responseHandler.ok(set, `Deleted docker host (${params.id})`); - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Management"], - description: - "Removes Docker host from monitoring system and clears associated data", - responses: { - "200": { - description: "Successfully deleted Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Deleted docker host (1)", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error deleting Docker host", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to delete host", - }, - }, - }, - }, - }, - }, - }, - }, - params: t.Object({ - id: t.Number(), - }), - }, - ); diff --git a/src/routes/docker-stats.ts b/src/routes/docker-stats.ts deleted file mode 100644 index aa968d2d..00000000 --- a/src/routes/docker-stats.ts +++ /dev/null @@ -1,598 +0,0 @@ -import type Docker from "dockerode"; -import { Elysia } from "elysia"; -import { dbFunctions } from "~/core/database"; -import { getDockerClient } from "~/core/docker/client"; -import { - calculateCpuPercent, - calculateMemoryUsage, -} from "~/core/utils/calculations"; -import { findObjectByKey } from "~/core/utils/helpers"; -import { logger } from "~/core/utils/logger"; -import { responseHandler } from "~/core/utils/response-handler"; -import type { ContainerInfo, DockerHost, HostStats } from "~/typings/docker"; -import type { DockerInfo } from "~/typings/dockerode"; - -export const dockerStatsRoutes = new Elysia({ prefix: "/docker" }) - .get( - "/containers", - async ({ set }) => { - try { - const hosts = dbFunctions.getDockerHosts() as DockerHost[]; - const containers: ContainerInfo[] = []; - - await Promise.all( - hosts.map(async (host) => { - try { - const docker = getDockerClient(host); - try { - await docker.ping(); - } catch (pingError) { - return responseHandler.error( - set, - pingError as string, - "Docker host connection failed", - ); - } - - const hostContainers = await docker.listContainers({ all: true }); - - await Promise.all( - hostContainers.map(async (containerInfo) => { - try { - const container = docker.getContainer(containerInfo.Id); - const stats = await new Promise( - (resolve, reject) => { - container.stats({ stream: false }, (error, stats) => { - if (error) { - return responseHandler.reject( - set, - reject, - "An error occurred", - error, - ); - } - if (!stats) { - return responseHandler.reject( - set, - reject, - "No stats available", - ); - } - resolve(stats); - }); - }, - ); - - containers.push({ - id: containerInfo.Id, - hostId: `${host.id}`, - name: containerInfo.Names[0].replace(/^\//, ""), - image: containerInfo.Image, - status: containerInfo.Status, - state: containerInfo.State, - cpuUsage: calculateCpuPercent(stats), - memoryUsage: calculateMemoryUsage(stats), - stats: stats, - info: containerInfo, - }); - } catch (containerError) { - logger.error( - "Error fetching container stats,", - containerError, - ); - } - }), - ); - logger.debug(`Fetched stats for ${host.name}`); - } catch (error) { - const errMsg = - error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }), - ); - - logger.debug("Fetched all containers across all hosts"); - return { containers }; - } catch (error) { - const errMsg = error instanceof Error ? error.message : String(error); - throw new Error(errMsg); - } - }, - { - detail: { - tags: ["Statistics"], - description: - "Collects real-time statistics for all Docker containers across monitored hosts, including CPU and memory utilization", - responses: { - "200": { - description: "Successfully retrieved container statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - containers: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "string", - example: "abc123def456", - }, - hostId: { - type: "string", - example: "1", - }, - name: { - type: "string", - example: "example-container", - }, - image: { - type: "string", - example: "nginx:latest", - }, - status: { - type: "string", - example: "running", - }, - state: { - type: "string", - example: "running", - }, - cpuUsage: { - type: "number", - example: 0.5, - }, - memoryUsage: { - type: "number", - example: 1024, - }, - }, - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving container statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve containers", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/hosts", - async ({ set }) => { - try { - const hosts = dbFunctions.getDockerHosts() as DockerHost[]; - - const stats: HostStats[] = []; - - for (const host of hosts) { - const docker = getDockerClient(host); - const info: DockerInfo = await docker.info(); - - const config: HostStats = { - hostId: host.id as number, - hostName: host.name, - dockerVersion: info.ServerVersion, - apiVersion: info.Driver, - os: info.OperatingSystem, - architecture: info.Architecture, - totalMemory: info.MemTotal, - totalCPU: info.NCPU, - labels: info.Labels, - images: info.Images, - containers: info.Containers, - containersPaused: info.ContainersPaused, - containersRunning: info.ContainersRunning, - containersStopped: info.ContainersStopped, - }; - - stats.push(config); - } - - logger.debug("Fetched all hosts"); - return stats; - } catch (error) { - return responseHandler.error( - set, - error as string, - "Failed to retrieve host config", - ); - } - }, - { - detail: { - tags: ["Statistics"], - description: - "Provides detailed system metrics and Docker runtime information for specified host", - responses: { - "200": { - description: "Successfully retrieved host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - hostId: { - type: "number", - example: 1, - }, - hostName: { - type: "string", - example: "Localhost", - }, - dockerVersion: { - type: "string", - example: "24.0.5", - }, - apiVersion: { - type: "string", - example: "1.41", - }, - os: { - type: "string", - example: "Linux", - }, - architecture: { - type: "string", - example: "x86_64", - }, - totalMemory: { - type: "number", - example: 16777216, - }, - totalCPU: { - type: "number", - example: 4, - }, - labels: { - type: "array", - items: { - type: "string", - }, - example: ["environment=production"], - }, - images: { - type: "number", - example: 10, - }, - containers: { - type: "number", - example: 5, - }, - containersPaused: { - type: "number", - example: 0, - }, - containersRunning: { - type: "number", - example: 4, - }, - containersStopped: { - type: "number", - example: 1, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve host config", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/hosts", - async ({ set }) => { - try { - const hosts = dbFunctions.getDockerHosts() as DockerHost[]; - - const stats: HostStats[] = []; - - for (const host of hosts) { - const docker = getDockerClient(host); - const info: DockerInfo = await docker.info(); - - const config: HostStats = { - hostId: host.id as number, - hostName: host.name, - dockerVersion: info.ServerVersion, - apiVersion: info.Driver, - os: info.OperatingSystem, - architecture: info.Architecture, - totalMemory: info.MemTotal, - totalCPU: info.NCPU, - labels: info.Labels, - images: info.Images, - containers: info.Containers, - containersPaused: info.ContainersPaused, - containersRunning: info.ContainersRunning, - containersStopped: info.ContainersStopped, - }; - - stats.push(config); - } - - logger.debug("Fetched stats for all hosts"); - return stats; - } catch (error) { - return responseHandler.error( - set, - error as string, - "Failed to retrieve host config", - ); - } - }, - { - detail: { - tags: ["Statistics"], - description: - "Provides detailed system metrics and Docker runtime information for all hosts", - responses: { - "200": { - description: "Successfully retrieved host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - hostId: { - type: "number", - example: 1, - }, - hostName: { - type: "string", - example: "Localhost", - }, - dockerVersion: { - type: "string", - example: "24.0.5", - }, - apiVersion: { - type: "string", - example: "1.41", - }, - os: { - type: "string", - example: "Linux", - }, - architecture: { - type: "string", - example: "x86_64", - }, - totalMemory: { - type: "number", - example: 16777216, - }, - totalCPU: { - type: "number", - example: 4, - }, - labels: { - type: "array", - items: { - type: "string", - }, - example: ["environment=production"], - }, - images: { - type: "number", - example: 10, - }, - containers: { - type: "number", - example: 5, - }, - containersPaused: { - type: "number", - example: 0, - }, - containersRunning: { - type: "number", - example: 4, - }, - containersStopped: { - type: "number", - example: 1, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve host config", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .get( - "/hosts/:id", - async ({ params, set }) => { - try { - const hosts = dbFunctions.getDockerHosts() as DockerHost[]; - - const host = findObjectByKey(hosts, "id", Number(params.id)); - if (!host) { - return responseHandler.simple_error( - set, - `Host (${params.id}) not found`, - ); - } - - const docker = getDockerClient(host); - const info: DockerInfo = await docker.info(); - - const config: HostStats = { - hostId: host.id as number, - hostName: host.name, - dockerVersion: info.ServerVersion, - apiVersion: info.Driver, - os: info.OperatingSystem, - architecture: info.Architecture, - totalMemory: info.MemTotal, - totalCPU: info.NCPU, - labels: info.Labels, - images: info.Images, - containers: info.Containers, - containersPaused: info.ContainersPaused, - containersRunning: info.ContainersRunning, - containersStopped: info.ContainersStopped, - }; - - logger.debug(`Fetched config for ${host.name}`); - return config; - } catch (error) { - return responseHandler.error( - set, - error as string, - "Failed to retrieve host config", - ); - } - }, - { - detail: { - tags: ["Statistics"], - description: - "Provides detailed system metrics and Docker runtime information for specified host", - responses: { - "200": { - description: "Successfully retrieved host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - hostId: { - type: "number", - example: 1, - }, - hostName: { - type: "string", - example: "Localhost", - }, - dockerVersion: { - type: "string", - example: "24.0.5", - }, - apiVersion: { - type: "string", - example: "1.41", - }, - os: { - type: "string", - example: "Linux", - }, - architecture: { - type: "string", - example: "x86_64", - }, - totalMemory: { - type: "number", - example: 16777216, - }, - totalCPU: { - type: "number", - example: 4, - }, - labels: { - type: "array", - items: { - type: "string", - }, - example: ["environment=production"], - }, - images: { - type: "number", - example: 10, - }, - containers: { - type: "number", - example: 5, - }, - containersPaused: { - type: "number", - example: 0, - }, - containersRunning: { - type: "number", - example: 4, - }, - containersStopped: { - type: "number", - example: 1, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error retrieving host statistics", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve host config", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ); diff --git a/src/routes/docker-websocket.ts b/src/routes/docker-websocket.ts deleted file mode 100644 index 83d31c99..00000000 --- a/src/routes/docker-websocket.ts +++ /dev/null @@ -1,136 +0,0 @@ -import type { Readable } from "node:stream"; -import { Elysia } from "elysia"; -import type { ElysiaWS } from "elysia/dist/ws"; -import split2 from "split2"; - -import { dbFunctions } from "~/core/database"; -import { getDockerClient } from "~/core/docker/client"; -import { - calculateCpuPercent, - calculateMemoryUsage, -} from "~/core/utils/calculations"; -import { logger } from "~/core/utils/logger"; -import { responseHandler } from "~/core/utils/response-handler"; - -//biome-ignore lint/suspicious/noExplicitAny: -const activeDockerConnections = new Set>(); -const connectionStreams = new Map< - //biome-ignore lint/suspicious/noExplicitAny: - ElysiaWS, - Array<{ statsStream: Readable; splitStream: ReturnType }> ->(); - -export const dockerWebsocketRoutes = new Elysia({ prefix: "/ws" }).ws( - "/docker", - { - async open(ws) { - activeDockerConnections.add(ws); - connectionStreams.set(ws, []); - - ws.send(JSON.stringify({ message: "Connection established" })); - logger.info(`New Docker WebSocket established (${ws.id})`); - - try { - const hosts = dbFunctions.getDockerHosts(); - logger.debug(`Retrieved ${hosts.length} docker host(s)`); - - for (const host of hosts) { - if (ws.readyState !== 1) { - break; - } - - const docker = getDockerClient(host); - await docker.ping(); - const containers = await docker.listContainers({ all: true }); - logger.debug( - `Found ${containers.length} containers on ${host.name} (id: ${host.id})`, - ); - - for (const containerInfo of containers) { - if (ws.readyState !== 1) { - break; - } - - const container = docker.getContainer(containerInfo.Id); - const statsStream = (await container.stats({ - stream: true, - })) as Readable; - const splitStream = split2(); - - connectionStreams.get(ws)?.push({ statsStream, splitStream }); - - statsStream - .on("close", () => splitStream.destroy()) - .pipe(splitStream) - .on("data", (line: string) => { - if (ws.readyState !== 1 || !line) { - return; - } - try { - const stats = JSON.parse(line); - ws.send( - JSON.stringify({ - id: containerInfo.Id, - hostId: host.id, - name: containerInfo.Names[0].replace(/^\//, ""), - image: containerInfo.Image, - status: containerInfo.Status, - state: containerInfo.State, - cpuUsage: calculateCpuPercent(stats) || 0, - memoryUsage: calculateMemoryUsage(stats) || 0, - }), - ); - } catch (error) { - logger.error(`Parse error: ${error}`); - } - }) - .on("error", (error: Error) => { - logger.error(`Stream error: ${error}`); - statsStream.destroy(); - ws.send( - JSON.stringify({ - hostId: host.name, - containerId: containerInfo.Id, - error: `Stats stream error: ${error}`, - }), - ); - }); - } - } - } catch (error) { - logger.error(`Connection error: ${error}`); - ws.send( - JSON.stringify( - responseHandler.error( - { headers: {} }, - error as string, - "Docker connection failed", - 500, - ), - ), - ); - } - }, - - message(ws, message) { - if (message === "pong") ws.pong(); - }, - - close(ws) { - logger.info(`Closing connection ${ws.id}`); - activeDockerConnections.delete(ws); - - const streams = connectionStreams.get(ws) || []; - for (const { statsStream, splitStream } of streams) { - try { - statsStream.unpipe(splitStream); - statsStream.destroy(); - splitStream.destroy(); - } catch (error) { - logger.error(`Cleanup error: ${error}`); - } - } - connectionStreams.delete(ws); - }, - }, -); diff --git a/src/routes/live-logs.ts b/src/routes/live-logs.ts deleted file mode 100644 index 1b7fbfd8..00000000 --- a/src/routes/live-logs.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { Elysia } from "elysia"; -import type { ElysiaWS } from "elysia/dist/ws"; - -import { logger } from "~/core/utils/logger"; - -import type { log_message } from "~/typings/database"; - -//biome-ignore lint/suspicious/noExplicitAny: -const activeConnections = new Set>(); - -export const liveLogs = new Elysia({ prefix: "/ws" }).ws("/logs", { - open(ws) { - activeConnections.add(ws); - ws.send({ - message: "Connection established", - level: "info", - timestamp: new Date().toISOString(), - file: "live-logs.ts", - line: 14, - }); - logger.info(`New Logs WebSocket established (${ws.id})`); - }, - close(ws) { - logger.info(`Logs WebSocket closed (${ws.id})`); - activeConnections.delete(ws); - }, -}); - -export function logToClients(data: log_message) { - for (const ws of activeConnections) { - try { - ws.send(JSON.stringify(data)); - } catch (error) { - activeConnections.delete(ws); - logger.error("Failed to send to WebSocket:", error); - } - } -} diff --git a/src/routes/live-stacks.ts b/src/routes/live-stacks.ts deleted file mode 100644 index b093fd21..00000000 --- a/src/routes/live-stacks.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { Elysia } from "elysia"; -import type { ElysiaWS } from "elysia/dist/ws"; -import { logger } from "~/core/utils/logger"; -import type { stackSocketMessage } from "~/typings/websocket"; - -//biome-ignore lint/suspicious/noExplicitAny: Any = Connections -const activeConnections = new Set>(); - -export const liveStacks = new Elysia({ prefix: "/ws" }).ws("/stacks", { - open(ws) { - activeConnections.add(ws); - ws.send({ message: "Connection established" }); - logger.info(`New Stacks WebSocket established (${ws.id})`); - }, - close(ws) { - logger.info(`Stacks WebSocket closed (${ws.id})`); - activeConnections.delete(ws); - }, -}); - -export function postToClient(data: stackSocketMessage) { - for (const ws of activeConnections) { - try { - ws.send(JSON.stringify(data)); - } catch (error) { - activeConnections.delete(ws); - logger.error("Failed to send to WebSocket:", error); - } - } -} diff --git a/src/routes/logs.ts b/src/routes/logs.ts deleted file mode 100644 index 17da1fb7..00000000 --- a/src/routes/logs.ts +++ /dev/null @@ -1,261 +0,0 @@ -import { Elysia } from "elysia"; - -import { dbFunctions } from "~/core/database"; -import { logger } from "~/core/utils/logger"; - -export const backendLogs = new Elysia({ prefix: "/logs" }) - .get( - "", - async ({ set }) => { - try { - const logs = dbFunctions.getAllLogs(); - // - logger.debug("Retrieved all logs"); - return logs; - } catch (error) { - set.status = 500; - logger.error("Failed to retrieve logs,", error); - return { error: "Failed to retrieve logs" }; - } - }, - { - detail: { - tags: ["Management"], - description: - "Retrieves complete application log history from persistent storage", - responses: { - "200": { - description: "Successfully retrieved logs", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "number", - example: 1, - }, - level: { - type: "string", - example: "info", - }, - message: { - type: "string", - example: "Application started", - }, - timestamp: { - type: "string", - example: "2024-03-20T12:00:00Z", - }, - }, - }, - }, - }, - }, - }, - "500": { - description: "Error retrieving logs", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve logs", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - - .get( - "/:level", - async ({ params: { level }, set }) => { - try { - const logs = dbFunctions.getLogsByLevel(level); - - logger.debug(`Retrieved logs (level: ${level})`); - return logs; - } catch (error) { - set.status = 500; - logger.error("Failed to retrieve logs"); - return { error: "Failed to retrieve logs" }; - } - }, - { - detail: { - tags: ["Management"], - description: - "Filters logs by severity level (debug, info, warn, error, fatal)", - responses: { - "200": { - description: "Successfully retrieved logs by level", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "number", - example: 1, - }, - level: { - type: "string", - example: "info", - }, - message: { - type: "string", - example: "Application started", - }, - timestamp: { - type: "string", - example: "2024-03-20T12:00:00Z", - }, - }, - }, - }, - }, - }, - }, - "500": { - description: "Error retrieving logs", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve logs", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - - .delete( - "/", - async ({ set }) => { - try { - set.status = 200; - - dbFunctions.clearAllLogs(); - return { success: true }; - } catch (error) { - set.status = 500; - logger.error("Could not delete all logs,", error); - return { error: "Could not delete all logs" }; - } - }, - { - detail: { - tags: ["Management"], - description: "Purges all historical log records from the database", - responses: { - "200": { - description: "Successfully cleared all logs", - content: { - "application/json": { - schema: { - type: "object", - properties: { - success: { - type: "boolean", - example: true, - }, - }, - }, - }, - }, - }, - "500": { - description: "Error clearing logs", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Could not delete all logs", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - - .delete( - "/:level", - async ({ params: { level }, set }) => { - try { - dbFunctions.clearLogsByLevel(level); - - logger.debug(`Cleared all logs with level: ${level}`); - return { success: true }; - } catch (error) { - set.status = 500; - logger.error("Could not clear logs with level", level, ",", error); - return { error: "Failed to retrieve logs" }; - } - }, - { - detail: { - tags: ["Management"], - description: "Clears log entries matching specified severity level", - responses: { - "200": { - description: "Successfully cleared logs by level", - content: { - "application/json": { - schema: { - type: "object", - properties: { - success: { - type: "boolean", - example: true, - }, - }, - }, - }, - }, - }, - "500": { - description: "Error clearing logs", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Failed to retrieve logs", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ); diff --git a/src/routes/stacks.ts b/src/routes/stacks.ts deleted file mode 100644 index d81d24d6..00000000 --- a/src/routes/stacks.ts +++ /dev/null @@ -1,598 +0,0 @@ -import { Elysia, t } from "elysia"; -import { dbFunctions } from "~/core/database"; -import { - deployStack, - getAllStacksStatus, - getStackStatus, - pullStackImages, - removeStack, - restartStack, - startStack, - stopStack, -} from "~/core/stacks/controller"; -import { logger } from "~/core/utils/logger"; -import { responseHandler } from "~/core/utils/response-handler"; -import type { stacks_config } from "~/typings/database"; - -export const stackRoutes = new Elysia({ prefix: "/stacks" }) - .post( - "/deploy", - async ({ set, body }) => { - try { - await deployStack(body as stacks_config); - logger.info(`Deployed Stack (${body.name})`); - return responseHandler.ok( - set, - `Stack ${body.name} deployed successfully`, - ); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error( - set, - errorMsg, - "Error deploying stack, please check the server logs for more information", - ); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Deploys a new Docker stack using a provided compose specification, allowing custom configurations and image updates", - responses: { - "200": { - description: "Successfully deployed stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack example-stack deployed successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error deploying stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error deploying stack", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - name: t.String(), - version: t.Number(), - custom: t.Boolean(), - source: t.String(), - compose_spec: t.Any(), - }), - }, - ) - .post( - "/start", - async ({ set, body }) => { - try { - if (!body.stackId) { - throw new Error("Stack ID needed"); - } - await startStack(body.stackId); - logger.info(`Started Stack (${body.stackId})`); - return responseHandler.ok( - set, - `Stack ${body.stackId} started successfully`, - ); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error starting stack"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Initiates a Docker stack, starting all associated containers", - responses: { - "200": { - description: "Successfully started stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack 1 started successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error starting stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error starting stack", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - stackId: t.Number(), - }), - }, - ) - .post( - "/stop", - async ({ set, body }) => { - try { - if (!body.stackId) { - throw new Error("Stack needed"); - } - await stopStack(body.stackId); - logger.info(`Stopped Stack (${body.stackId})`); - return responseHandler.ok( - set, - `Stack ${body.stackId} stopped successfully`, - ); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error stopping stack"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Halts a running Docker stack and its containers while preserving configurations", - responses: { - "200": { - description: "Successfully stopped stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack 1 stopped successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error stopping stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error stopping stack", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - stackId: t.Number(), - }), - }, - ) - .post( - "/restart", - async ({ set, body }) => { - try { - if (!body.stackId) { - throw new Error("Stack needed"); - } - await restartStack(body.stackId); - logger.info(`Restarted Stack (${body.stackId})`); - return responseHandler.ok( - set, - `Stack ${body.stackId} restarted successfully`, - ); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error restarting stack"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Performs full stack restart - stops and restarts all stack components in sequence", - responses: { - "200": { - description: "Successfully restarted stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack 1 restarted successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error restarting stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error restarting stack", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - stackId: t.Number(), - }), - }, - ) - .post( - "/pull-images", - async ({ set, body }) => { - try { - if (!body.stackId) { - throw new Error("Stack needed"); - } - await pullStackImages(body.stackId); - logger.info(`Pulled Stack images (${body.stackId})`); - return responseHandler.ok( - set, - `Images for stack ${body.stackId} pulled successfully`, - ); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error pulling images"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Updates container images for a stack using Docker's pull mechanism (requires stack ID)", - responses: { - "200": { - description: "Successfully pulled images", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Images for stack 1 pulled successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error pulling images", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error pulling images", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - stackId: t.Number(), - }), - }, - ) - .get( - "/status", - async ({ set, query }) => { - try { - // biome-ignore lint/suspicious/noExplicitAny: - let status: Record; - let res = {}; - - logger.debug("Entering stack status handler"); - logger.debug(`Request body: ${JSON.stringify(query)}`); - - if (query.stackId !== 0) { - logger.debug(`Fetching status for stackId=${query.stackId}`); - status = await getStackStatus(query.stackId); - logger.debug( - `Retrieved status for stackId=${query.stackId}: ${JSON.stringify( - status, - )}`, - ); - - res = responseHandler.ok( - set, - `Stack ${query.stackId} status retrieved successfully`, - ); - logger.info("Fetched Stack status"); - } else { - logger.debug("Fetching status for all stacks"); - status = await getAllStacksStatus(); - logger.debug( - `Retrieved status for all stacks: ${JSON.stringify(status)}`, - ); - - res = responseHandler.ok(set, "Fetched all Stack's status"); - logger.info("Fetched all Stack status"); - } - - logger.debug("Returning response with status data"); - return { ...res, status: status }; - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - logger.debug(`Error occurred while fetching stack status: ${errorMsg}`); - - return responseHandler.error( - set, - errorMsg, - "Error getting stack status", - ); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Retrieves operational status for either a specific stack (by ID) or all managed stacks (ID: 0)", - responses: { - "200": { - description: "Successfully retrieved stack status", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack 1 status retrieved successfully", - }, - status: { - type: "object", - properties: { - name: { - type: "string", - example: "example-stack", - }, - status: { - type: "string", - example: "running", - }, - containers: { - type: "array", - items: { - type: "object", - properties: { - name: { - type: "string", - example: "example-stack_web_1", - }, - status: { - type: "string", - example: "running", - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error getting stack status", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error getting stack status", - }, - }, - }, - }, - }, - }, - }, - }, - query: t.Object({ - stackId: t.Number(), - }), - }, - ) - .get( - "/", - async ({ set }) => { - try { - const stacks = dbFunctions.getStacks(); - logger.info("Fetched Stacks"); - return stacks; - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error getting stacks"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Lists all registered stacks with their complete configuration details", - responses: { - "200": { - description: "Successfully retrieved stacks", - content: { - "application/json": { - schema: { - type: "array", - items: { - type: "object", - properties: { - id: { - type: "number", - example: 1, - }, - name: { - type: "string", - example: "example-stack", - }, - version: { - type: "number", - example: 1, - }, - source: { - type: "string", - example: "github.com/example/repo", - }, - automatic_reboot_on_error: { - type: "boolean", - example: true, - }, - }, - }, - }, - }, - }, - }, - "400": { - description: "Error getting stacks", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error getting stacks", - }, - }, - }, - }, - }, - }, - }, - }, - }, - ) - .delete( - "/", - async ({ set, body }) => { - try { - const { stackId } = body; - await removeStack(stackId); - logger.info(`Deleted Stack ${stackId}`); - return responseHandler.ok(set, `Stack ${stackId} deleted successfully`); - } catch (error) { - const errorMsg = error instanceof Error ? error.message : String(error); - - return responseHandler.error(set, errorMsg, "Error deleting stack"); - } - }, - { - detail: { - tags: ["Stacks"], - description: - "Permanently removes a stack configuration and cleans up associated resources", - responses: { - "200": { - description: "Successfully deleted stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - message: { - type: "string", - example: "Stack 1 deleted successfully", - }, - }, - }, - }, - }, - }, - "400": { - description: "Error deleting stack", - content: { - "application/json": { - schema: { - type: "object", - properties: { - error: { - type: "string", - example: "Error deleting stack", - }, - }, - }, - }, - }, - }, - }, - }, - body: t.Object({ - stackId: t.Number(), - }), - }, - ); diff --git a/src/routes/utils.ts b/src/routes/utils.ts deleted file mode 100644 index e69de29b..00000000 diff --git a/src/tests/api-config.spec.ts b/src/tests/api-config.spec.ts deleted file mode 100644 index ba3e7b32..00000000 --- a/src/tests/api-config.spec.ts +++ /dev/null @@ -1,344 +0,0 @@ -import { afterAll, beforeEach, describe, expect, it, mock } from "bun:test"; -import { Elysia } from "elysia"; -import { logger } from "~/core/utils/logger"; -import { apiConfigRoutes } from "~/routes/api-config"; -import { generateMarkdownReport, recordTestResult } from "./markdown-exporter"; -import type { TestContext } from "./markdown-exporter"; - -const mockDb = { - updateConfig: mock(() => ({})), - backupDatabase: mock( - () => `dockstatapi-${new Date().toISOString().slice(0, 10)}.db.bak`, - ), - restoreDatabase: mock(), - findLatestBackup: mock(() => "dockstatapi-2025-05-06.db.bak"), -}; - -mock.module("node:fs", () => ({ - existsSync: mock((path) => path.includes("dockstatapi")), - readdirSync: mock(() => [ - "dockstatapi-2025-05-06.db.bak", - "dockstatapi.db", - "dockstatapi.db-shm", - ]), - unlinkSync: mock(), -})); - -const mockPlugins = [ - { - name: "docker-monitor", - version: "1.2.0", - status: "active", - }, -]; - -const createTestApp = () => - new Elysia().use(apiConfigRoutes).decorate({ - dbFunctions: mockDb, - pluginManager: { - getLoadedPlugins: mock(() => mockPlugins), - getPlugin: mock((name) => mockPlugins.find((p) => p.name === name)), - }, - logger: { - ...logger, - debug: mock(), - error: mock(), - info: mock(), - }, - }); - -async function captureTestContext( - req: Request, - res: Response, -): Promise { - const responseStatus = res.status; - const responseHeaders = Object.fromEntries(res.headers.entries()); - let responseBody: string; - - try { - responseBody = await res.clone().json(); - } catch (parseError) { - try { - responseBody = await res.clone().text(); - } catch (textError) { - responseBody = "Unparseable response content"; - } - } - - return { - request: { - method: req.method, - url: req.url, - headers: Object.fromEntries(req.headers.entries()), - body: req.body ? await req.clone().text() : undefined, - }, - response: { - status: responseStatus, - headers: responseHeaders, - body: responseBody, - }, - }; -} - -describe("API Configuration Endpoints", () => { - beforeEach(() => { - mockDb.updateConfig.mockClear(); - }); - - describe("Core Configuration", () => { - it("should retrieve current config with hashed API key", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const req = new Request("http://localhost:3000/config"); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toMatchObject({ - fetching_interval: expect.any(Number), - keep_data_for: expect.any(Number), - }); - - recordTestResult({ - name: "should retrieve current config with hashed API key", - suite: "API Configuration Endpoints - Core Configuration", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should retrieve current config with hashed API key", - suite: "API Configuration Endpoints - Core Configuration", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with valid config structure", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should handle config update with valid payload", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const requestBody = { - fetching_interval: 15, - keep_data_for: 30, - api_key: "new-valid-key", - }; - const req = new Request("http://localhost:3000/config/update", { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(requestBody), - }); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toMatchObject({ - success: true, - message: expect.stringContaining("Updated"), - }); - - recordTestResult({ - name: "should handle config update with valid payload", - suite: "API Configuration Endpoints - Core Configuration", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should handle config update with valid payload", - suite: "API Configuration Endpoints - Core Configuration", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with update confirmation", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("Plugin Management", () => { - it("should list active plugins with metadata", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const req = new Request("http://localhost:3000/config/plugins"); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toEqual( - [], - //expect.arrayContaining([ - // expect.objectContaining({ - // name: expect.any(String), - // version: expect.any(String), - // status: expect.any(String), - // }), - //]) - ); - - recordTestResult({ - name: "should list active plugins with metadata", - suite: "API Configuration Endpoints - Plugin Management", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should list active plugins with metadata", - suite: "API Configuration Endpoints - Plugin Management", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with plugin list", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("Backup Management", () => { - it("should generate timestamped backup files", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const req = new Request("http://localhost:3000/config/backup", { - method: "POST", - }); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - const { message } = context.response.body as { message: string }; - expect(message).toMatch( - /^data\/dockstatapi-\d{2}-\d{2}-\d{4}-1\.db\.bak$/, - ); - - recordTestResult({ - name: "should generate timestamped backup files", - suite: "API Configuration Endpoints - Backup Management", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should generate timestamped backup files", - suite: "API Configuration Endpoints - Backup Management", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with backup path", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should list valid backup files", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const req = new Request("http://localhost:3000/config/backup"); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - const backups = context.response.body as string[]; - expect(backups).toEqual( - expect.arrayContaining([expect.stringMatching(/\.db\.bak$/)]), - ); - - recordTestResult({ - name: "should list valid backup files", - suite: "API Configuration Endpoints - Backup Management", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should list valid backup files", - suite: "API Configuration Endpoints - Backup Management", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with backup list", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("Error Handling", () => { - it("should return proper error format", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - try { - const app = createTestApp(); - const req = new Request("http://localhost:3000/random_link", { - method: "GET", - headers: { "Content-Type": "application/json" }, - }); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(404); - - recordTestResult({ - name: "should return proper error format", - suite: - "API Configuration Endpoints - Error Handling of unkown routes", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "should return proper error format", - suite: "API Configuration Endpoints - Error Handling", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "500 Error with structured error format", - received: context?.response, - }, - }); - throw error; - } - }); - }); -}); - -afterAll(() => { - generateMarkdownReport(); -}); diff --git a/src/tests/docker-manager.spec.ts b/src/tests/docker-manager.spec.ts deleted file mode 100644 index 865b2aa1..00000000 --- a/src/tests/docker-manager.spec.ts +++ /dev/null @@ -1,482 +0,0 @@ -import { afterAll, beforeEach, describe, expect, it, mock } from "bun:test"; -import { Elysia } from "elysia"; -import { dbFunctions } from "~/core/database"; -import { dockerRoutes } from "~/routes/docker-manager"; -import { - generateMarkdownReport, - recordTestResult, - testResults, -} from "./markdown-exporter"; -import type { TestContext } from "./markdown-exporter"; - -type DockerHost = { - id?: number; - name: string; - hostAddress: string; - secure: boolean; -}; - -const mockDb = { - addDockerHost: mock(() => ({ - changes: 1, - lastInsertRowid: 1, - })), - updateDockerHost: mock(() => ({ - changes: 1, - lastInsertRowid: 1, - })), - getDockerHosts: mock(() => []), - deleteDockerHost: mock(() => ({ - changes: 1, - lastInsertRowid: 1, - })), -}; - -mock.module("~/core/database", () => ({ - dbFunctions: mockDb, -})); - -mock.module("~/core/utils/logger", () => ({ - logger: { - debug: mock(), - info: mock(), - error: mock(), - }, -})); - -const createApp = () => new Elysia().use(dockerRoutes).decorate({}); - -async function captureTestContext( - req: Request, - res: Response, -): Promise { - const responseStatus = res.status; - const responseHeaders = Object.fromEntries(res.headers.entries()); - let responseBody: unknown; - - try { - responseBody = await res.clone().json(); - } catch (parseError) { - try { - responseBody = await res.clone().text(); - } catch { - responseBody = "Unparseable response content"; - } - } - - return { - request: { - method: req.method, - url: req.url, - headers: Object.fromEntries(req.headers.entries()), - body: req.body ? await req.clone().text() : undefined, - }, - response: { - status: responseStatus, - headers: responseHeaders, - body: responseBody, - }, - }; -} - -describe("Docker Configuration Endpoints", () => { - beforeEach(() => { - mockDb.addDockerHost.mockClear(); - mockDb.updateDockerHost.mockClear(); - mockDb.getDockerHosts.mockClear(); - mockDb.deleteDockerHost.mockClear(); - }); - - describe("POST /docker-config/add-host", () => { - it("should add a docker host successfully", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const host: DockerHost = { - name: "Host1", - hostAddress: "127.0.0.1:2375", - secure: false, - }; - - try { - const app = createApp(); - const req = new Request( - "http://localhost:3000/docker-config/add-host", - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(host), - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toMatchObject({ - message: `Added docker host (${host.name})`, - }); - expect(mockDb.addDockerHost).toHaveBeenCalledWith(host); - - recordTestResult({ - name: "add-host success", - suite: "Docker Config - Add Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "add-host success", - suite: "Docker Config - Add Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with success message", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should handle error when adding a docker host fails", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const host: DockerHost = { - name: "Host2", - hostAddress: "invalid", - secure: true, - }; - - // Set mock implementation - mockDb.addDockerHost.mockImplementationOnce(() => { - throw new Error("Mock Database Error"); - }); - - try { - const app = createApp(); - const req = new Request( - "http://localhost:3000/docker-config/add-host", - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(host), - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(500); - expect(context.response).toMatchObject({ - body: expect.any(String), - }); - - recordTestResult({ - name: "add-host failure", - suite: "Docker Config - Add Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "add-host failure", - suite: "Docker Config - Add Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "400 Error with error structure", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("POST /docker-config/update-host", () => { - it("should update a docker host successfully", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const host: DockerHost = { - id: 1, - name: "Host1-upd", - hostAddress: "127.0.0.1:2376", - secure: true, - }; - - try { - const app = createApp(); - const req = new Request( - "http://localhost:3000/docker-config/update-host", - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(host), - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toMatchObject({ - message: `Updated docker host (${host.id})`, - }); - expect(mockDb.updateDockerHost).toHaveBeenCalledWith(host); - - recordTestResult({ - name: "update-host success", - suite: "Docker Config - Update Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "update-host success", - suite: "Docker Config - Update Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with update confirmation", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should handle error when update fails", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const host: DockerHost = { - id: 2, - name: "Host2", - hostAddress: "x", - secure: false, - }; - - mockDb.updateDockerHost.mockImplementationOnce(() => { - throw new Error("Update error"); - }); - - try { - const app = createApp(); - const req = new Request( - "http://localhost:3000/docker-config/update-host", - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify(host), - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(500); - expect(context.response).toMatchObject({ - body: expect.any(String), - }); - - recordTestResult({ - name: "update-host failure", - suite: "Docker Config - Update Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "update-host failure", - suite: "Docker Config - Update Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "400 Error with error details", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("GET /docker-config/hosts", () => { - it("should retrieve list of hosts", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const hosts: DockerHost[] = [ - { id: 1, name: "H1", hostAddress: "a", secure: false }, - ]; - - mockDb.getDockerHosts.mockImplementation(() => hosts as never[]); - - try { - const app = createApp(); - const req = new Request("http://localhost:3000/docker-config/hosts"); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toEqual(hosts); - - recordTestResult({ - name: "get-hosts success", - suite: "Docker Config - List Hosts", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "get-hosts success", - suite: "Docker Config - List Hosts", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with hosts array", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should handle error when retrieval fails", async () => { - const start = Date.now(); - let context: TestContext | undefined; - - mockDb.getDockerHosts.mockImplementationOnce(() => { - throw new Error("Fetch error"); - }); - - try { - const app = createApp(); - const req = new Request("http://localhost:3000/docker-config/hosts"); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(500); - expect(context.response).toMatchObject({ - body: expect.any(String), - }); - - recordTestResult({ - name: "get-hosts failure", - suite: "Docker Config - List Hosts", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "get-hosts failure", - suite: "Docker Config - List Hosts", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "400 Error with error details", - received: context?.response, - }, - }); - throw error; - } - }); - }); - - describe("DELETE /docker-config/hosts/:id", () => { - it("should delete a host successfully", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const id = 5; - - try { - const app = createApp(); - const req = new Request( - `http://localhost:3000/docker-config/hosts/${id}`, - { - method: "DELETE", - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(200); - expect(context.response.body).toMatchObject({ - message: `Deleted docker host (${id})`, - }); - expect(mockDb.deleteDockerHost).toHaveBeenCalledWith(id); - - recordTestResult({ - name: "delete-host success", - suite: "Docker Config - Delete Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "delete-host success", - suite: "Docker Config - Delete Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "200 OK with deletion confirmation", - received: context?.response, - }, - }); - throw error; - } - }); - - it("should handle error when delete fails", async () => { - const start = Date.now(); - let context: TestContext | undefined; - const id = 6; - - mockDb.deleteDockerHost.mockImplementationOnce(() => { - throw new Error("Delete error"); - }); - - try { - const app = createApp(); - const req = new Request( - `http://localhost:3000/docker-config/hosts/${id}`, - { - method: "DELETE", - }, - ); - const res = await app.handle(req); - context = await captureTestContext(req, res); - - expect(res.status).toBe(500); - expect(context.response).toMatchObject({ - body: expect.any(String), - }); - - recordTestResult({ - name: "delete-host failure", - suite: "Docker Config - Delete Host", - time: Date.now() - start, - context, - }); - } catch (error) { - recordTestResult({ - name: "delete-host failure", - suite: "Docker Config - Delete Host", - time: Date.now() - start, - error: error as Error, - context, - errorDetails: { - expected: "400 Error with error details", - received: context?.response, - }, - }); - throw error; - } - }); - }); -}); - -afterAll(() => { - generateMarkdownReport(); -}); diff --git a/src/tests/markdown-exporter.ts b/src/tests/markdown-exporter.ts deleted file mode 100644 index 2d55b48e..00000000 --- a/src/tests/markdown-exporter.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { mkdirSync, writeFileSync } from "node:fs"; -import { format } from "date-fns"; -import { logger } from "~/core/utils/logger"; - -export type TestContext = { - request: { - method: string; - url: string; - headers: Record; - query?: Record; - body?: unknown; - }; - response: { - status: number; - headers: Record; - body?: unknown; - }; -}; - -type ErrorDetails = { - expected?: unknown; - received?: unknown; -}; - -type TestResult = { - name: string; - suite: string; - time: number; - error?: Error; - context?: TestContext; - errorDetails?: ErrorDetails; -}; - -export function recordTestResult(result: TestResult) { - logger.debug(`__UT__ Recording test result: ${JSON.stringify(result)}`); - testResults.push(result); -} - -export const testResults: TestResult[] = []; - -function formatContextMarkdown( - context?: TestContext, - errorDetails?: ErrorDetails, -): string { - if (!context) return ""; - - let md = "```\n"; - md += "=== REQUEST ===\n"; - md += `Method: ${context.request.method}\n`; - md += `URL: ${context.request.url}\n`; - if (context.request.query) { - md += `Query Params: ${JSON.stringify(context.request.query, null, 2)}\n`; - } - md += `Headers: ${JSON.stringify(context.request.headers, null, 2)}\n`; - if (context.request.body) { - md += `Body: ${JSON.stringify(context.request.body, null, 2)}\n`; - } - md += "\n=== RESPONSE ===\n"; - md += `Status: ${context.response.status}\n`; - md += `Headers: ${JSON.stringify(context.response.headers, null, 2)}\n`; - if (context.response.body) { - md += `Body: ${JSON.stringify(context.response.body, null, 2)}\n`; - } - if (errorDetails) { - md += "\n=== ERROR DETAILS ===\n"; - md += `Expected: ${JSON.stringify(errorDetails.expected, null, 2)}\n`; - md += `Received: ${JSON.stringify(errorDetails.received, null, 2)}\n`; - } - md += "```\n"; - return md; -} - -export function generateMarkdownReport() { - if (testResults.length === 0) { - logger.warn("No test results to generate markdown report."); - return; - } - - const totalTests = testResults.length; - const totalErrors = testResults.filter((r) => r.error).length; - - const testSuites = testResults.reduce( - (suites, result) => { - if (!suites[result.suite]) { - suites[result.suite] = []; - } - suites[result.suite].push(result); - return suites; - }, - {} as Record, - ); - - let md = `# Test Report - ${format(new Date(), "yyyy-MM-dd")}\n`; - md += `\n**Total Tests:** ${totalTests} -`; - md += `**Total Failures:** ${totalErrors}\n`; - - for (const [suiteName, cases] of Object.entries(testSuites)) { - const suiteErrors = cases.filter((c) => c.error).length; - md += `\n## Suite: ${suiteName} -`; - md += `- Tests: ${cases.length} -`; - md += `- Failures: ${suiteErrors}\n`; - - for (const test of cases) { - const status = test.error ? "❌ Failed" : "✅ Passed"; - md += `\n### ${test.name} (${(test.time / 1000).toFixed(2)}s) -`; - md += `- Status: **${status}** \n`; - - if (test.error) { - const msg = test.error.message - .replace(//g, ">"); - const stack = test.error.stack - ?.replace(//g, ">"); - md += "\n
\nError Details\n\n"; - md += `**Message:** ${msg} \n`; - if (stack) { - md += `\n\`\`\`\n${stack}\n\`\`\`\n`; - } - md += "
\n"; - } - - if (test.context) { - md += "\n
\nRequest/Response Context\n\n"; - md += formatContextMarkdown(test.context, test.errorDetails); - md += "
\n"; - } - } - } - - // Ensure directory exists - mkdirSync("reports/markdown", { recursive: true }); - const filename = `reports/markdown/test-report-${format( - new Date(), - "yyyy-MM-dd", - )}.md`; - writeFileSync(filename, md, "utf8"); - - logger.debug(`__UT__ Markdown report written to ${filename}`); -} diff --git a/src/typings b/src/typings deleted file mode 160000 index 9cae829b..00000000 --- a/src/typings +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9cae829bead60cd13351b757340f3225649cb11d diff --git a/tsconfig.json b/tsconfig.json index 85c0ed8c..b0ce6926 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -29,10 +29,11 @@ /* Modules */ "module": "ES2022" /* Specify what module code is generated. */, // "rootDir": "./", /* Specify the root folder within your source files. */ - "moduleResolution": "node" /* Specify how TypeScript looks up a file from a given module specifier. */, - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + "moduleResolution": "bundler" /* Specify how TypeScript looks up a file from a given module specifier. */, + "baseUrl": "./" /* Specify the base directory to resolve non-relative module names. */, "paths": { - "~/*": ["./src/*"] + "~/*": ["./src/*"], + "~/typings/*": ["./typings/*"] } /* Specify a set of entries that re-map imports to additional lookup locations. */, // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ diff --git a/typings b/typings new file mode 160000 index 00000000..9d5500fc --- /dev/null +++ b/typings @@ -0,0 +1 @@ +Subproject commit 9d5500fcbcb1d217b898ba85a929ebb26c42f898