From 1e80dea0d7f22d250fc9e044c24f4378f8d08f91 Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Mon, 1 Nov 2021 14:38:15 +1100 Subject: [PATCH 01/28] Adding quality stage to sanity check release builds and parallelising test jobs for CI/CD --- .gitignore | 1 + .gitlab-ci.yml | 305 +++++++++++++++++++++-- .npmignore | 1 + default.nix | 4 +- package-lock.json | 400 ++++++++++-------------------- package.json | 9 +- src/PolykeyClient.ts | 4 +- src/agent/GRPCClientAgent.ts | 35 ++- src/client/GRPCClientClient.ts | 52 +++- src/client/clientService.ts | 3 +- tests/bin/keys.test.ts | 13 - tests/bin/secret.test.ts | 13 - tests/bootstrap/bootstrap.test.ts | 3 +- tests/client/rpcSessions.test.ts | 1 - 14 files changed, 511 insertions(+), 333 deletions(-) diff --git a/.gitignore b/.gitignore index cf000140c..cd3216165 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ .env* !.env.example /result* +/builds # Logs logs diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a9cbf4640..fb3674a00 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,14 +1,17 @@ +default: + interruptible: true + variables: GIT_SUBMODULE_STRATEGY: recursive stages: - check - build + - quality - release -image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - -lint: +.lint: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true script: @@ -18,19 +21,185 @@ lint: npm run lint; ' -test: +.test 1/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true script: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' npm install; - npm run test; + npm run test -- tests/client; ' -nix-dry: +.test 2/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/agent; + ' + +.test 3/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/bin; + ' + +.test 4/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/bootstrap; + ' + +.test 5/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/network; + ' + +.test 6/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/vaults; + ' + +.test 7/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/workers; + ' + +.test 8/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/grpc; + ' + +.test 9/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/http; + ' + +.test 10/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/GenericIdTypes.test.ts tests/index.test.ts tests/PolykeyAgent.test.ts tests/utils.test.ts; + ' + +.test 11/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/claims; + ' + +.test 12/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/gestalts; + ' + +.test 13/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/nodes; + ' + +.test 14/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/git; + ' + +.test 15/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/keys; + ' + +.test 16/16: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm run test -- tests/acl tests/discovery tests/lockfile tests/notifications tests/session tests/sigchain tests/identities; + ' + +.nix-dry: + stage: check + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner script: - nix-build -v -v --dry-run ./release.nix --attr application - nix-build -v -v --dry-run ./release.nix --attr docker @@ -40,31 +209,127 @@ nix-dry: nix: stage: build - interruptible: true + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner script: + - mkdir -p ./builds + # nix-specific application target - > - nix-build ./release.nix - --max-jobs $(nproc) - --attr application - --attr docker + build_application="$(nix-build \ + --max-jobs "$(nproc)" --cores "$(nproc)" \ + ./release.nix \ + --attr application \ + )" + - > + nix-store --export $( \ + nix-store --query --requisites "$build_application" \ + ) | gzip > ./builds/js-polykey.closure.gz + # non-nix targets + - > + builds="$(nix-build \ + --max-jobs "$(nproc)" --cores "$(nproc)" \ + ./release.nix \ + --attr docker \ + --attr package.linux.x64.elf \ + --attr package.windows.x64.exe \ + --attr package.macos.x64.macho)" + - cp -r $builds ./builds/ + only: + - qa-testing + - master + artifacts: + paths: + - ./builds/ + +application run: + stage: quality + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + dependencies: + - nix + script: + - > + build_application="$( \ + gunzip -c ./builds/js-polykey.closure.gz | \ + nix-store --import | \ + tail -1 \ + )" + - echo password > testPassword + - $build_application/bin/polykey + only: + - master + - qa-testing + +docker run: + stage: quality + image: docker:20.10.11 + dependencies: + - nix + services: + - docker:20.10.11-dind + variables: + DOCKER_TLS_CERTDIR: "/certs" + before_script: + - docker info + script: + - image="$(docker load --input ./builds/*docker* | cut -d' ' -f3)" + - docker run "$image" + only: + - qa-testing + - master + +linux run: + stage: quality + image: ubuntu:latest + dependencies: + - nix + script: + - echo password > testPassword + - for f in ./builds/*-linux-*; do "$f"; done + only: + - qa-testing + - master + +windows run: + stage: quality + dependencies: + - nix + script: + - echo password > testPassword + - Get-ChildItem -File ./builds/*-win32-* | ForEach {& $_.FullName} + tags: + - windows only: + - qa-testing - master -packages: +macos run: + stage: quality + image: macos-11-xcode-12 + dependencies: + - nix + script: + - echo password > testPassword + - for f in ./builds/*-macos-*; do "$f"; done + only: + - qa-testing + - master + tags: + - shared-macos-amd64 + +.packages: stage: release - interruptible: true + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + dependencies: + - nix script: - > nix-shell -I nixpkgs=./pkgs.nix --packages git gitAndTools.gh --run ' - builds="$(nix-build \ - --max-jobs $(nproc) --cores $(nproc) \ - ./release.nix \ - --attr package.linux.x64.elf \ - --attr package.windows.x64.exe \ - --attr package.macos.x64.macho)"; commit="$(git rev-parse --short HEAD)"; gh release \ - create "$commit" $builds \ + create "$commit" \ + builds/*.closure.gz \ + builds/*-linux-* \ + builds/*-win32-* \ + builds/*-macos-* \ --title "Build-$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ --prerelease \ --notes "" \ diff --git a/.npmignore b/.npmignore index afe09caa5..4a1857eea 100644 --- a/.npmignore +++ b/.npmignore @@ -13,3 +13,4 @@ /tmp /docs /benches +/builds diff --git a/default.nix b/default.nix index 478ceca03..79e7317a4 100644 --- a/default.nix +++ b/default.nix @@ -12,7 +12,9 @@ let } '' mkdir -p $out/lib/node_modules/${utils.node2nixDev.packageName} - # copy only the dist + # copy the package.json + cp ${utils.node2nixDev}/lib/node_modules/${utils.node2nixDev.packageName}/package.json $out/lib/node_modules/${utils.node2nixDev.packageName}/ + # copy the dist cp -r ${utils.node2nixDev}/lib/node_modules/${utils.node2nixDev.packageName}/dist $out/lib/node_modules/${utils.node2nixDev.packageName}/ # copy over the production dependencies if [ -d "${utils.node2nixProd}/lib/node_modules" ]; then diff --git a/package-lock.json b/package-lock.json index 758918abc..9bb5f7d93 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1582,15 +1582,15 @@ } }, "@matrixai/db": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.0.tgz", - "integrity": "sha512-qoNbSbGzhZwjq+vXQcJ3Lk/z9NMNDlvSKd6wWe80i5ZMuVM+rbE8PenlWC4EXX77i+pfnV8rV51kz+V5FPt3Vg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.2.tgz", + "integrity": "sha512-wkVEEAJZaWS5Kbg6T/LcI6lS8AdWqszp8L1Dxmk7vwr1ihIkoIVQNSQ+FQryaFpor2eqh/wJaOKjDUpcHo+hEg==", "requires": { "@matrixai/logger": "^2.0.1", "@matrixai/workers": "^1.2.3", "abstract-leveldown": "^7.0.0", "async-mutex": "^0.3.1", - "level": "^6.0.1", + "level": "7.0.1", "levelup": "^5.0.1", "sublevel-prefixer": "^1.0.0", "subleveldown": "^5.0.1", @@ -1605,16 +1605,6 @@ "requires": { "tslib": "^2.3.1" } - }, - "level": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/level/-/level-6.0.1.tgz", - "integrity": "sha512-psRSqJZCsC/irNhfHzrVZbmPYXDcEYhA5TVNwr+V92jF44rbf86hqGp8fiT702FyiArScYIlPSBTDUASCVNSpw==", - "requires": { - "level-js": "^5.0.0", - "level-packager": "^5.1.0", - "leveldown": "^5.4.0" - } } } }, @@ -3136,48 +3126,12 @@ "dev": true }, "deferred-leveldown": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", - "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz", + "integrity": "sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg==", "requires": { - "abstract-leveldown": "~6.2.1", + "abstract-leveldown": "^7.2.0", "inherits": "^2.0.3" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } } }, "define-properties": { @@ -3362,50 +3316,14 @@ "dev": true }, "encoding-down": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", - "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-7.1.0.tgz", + "integrity": "sha512-ky47X5jP84ryk5EQmvedQzELwVJPjCgXDQZGeb9F6r4PdChByCGHTBrVcF3h8ynKVJ1wVbkxTsDC8zBROPypgQ==", "requires": { - "abstract-leveldown": "^6.2.1", + "abstract-leveldown": "^7.2.0", "inherits": "^2.0.3", - "level-codec": "^9.0.0", - "level-errors": "^2.0.0" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz", - "integrity": "sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } + "level-codec": "^10.0.0", + "level-errors": "^3.0.0" } }, "encryptedfs": { @@ -6504,93 +6422,56 @@ "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", "dev": true }, + "level": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/level/-/level-7.0.1.tgz", + "integrity": "sha512-w3E64+ALx2eZf8RV5JL4kIcE0BFAvQscRYd1yU4YVqZN9RGTQxXSvH202xvK15yZwFFxRXe60f13LJjcJ//I4Q==", + "requires": { + "level-js": "^6.1.0", + "level-packager": "^6.0.1", + "leveldown": "^6.1.0" + } + }, "level-codec": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", - "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-10.0.0.tgz", + "integrity": "sha512-QW3VteVNAp6c/LuV6nDjg7XDXx9XHK4abmQarxZmlRSDyXYk20UdaJTSX6yzVvQ4i0JyWSB7jert0DsyD/kk6g==", "requires": { - "buffer": "^5.6.0" - }, - "dependencies": { - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } + "buffer": "^6.0.3" } }, "level-concat-iterator": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.0.0.tgz", - "integrity": "sha512-UHGiIdj+uiFQorOrURRvJF3Ei0uHc89ciM/aRi0qsWDV2f0HXypeXUPhJKL6DsONgSR76Pc0AI4sKYEYYRn2Dg==" - }, - "level-errors": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", - "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz", + "integrity": "sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ==", "requires": { - "errno": "~0.1.1" + "catering": "^2.1.0" } }, + "level-errors": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-3.0.1.tgz", + "integrity": "sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ==" + }, "level-iterator-stream": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", - "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz", + "integrity": "sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA==", "requires": { "inherits": "^2.0.4", - "readable-stream": "^3.4.0", - "xtend": "^4.0.2" + "readable-stream": "^3.4.0" } }, "level-js": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/level-js/-/level-js-5.0.2.tgz", - "integrity": "sha512-SnBIDo2pdO5VXh02ZmtAyPP6/+6YTJg2ibLtl9C34pWvmtMEmRTWpra+qO/hifkUtBTOtfx6S9vLDjBsBK4gRg==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/level-js/-/level-js-6.1.0.tgz", + "integrity": "sha512-i7mPtkZm68aewfv0FnIUWvFUFfoyzIvVKnUmuQGrelEkP72vSPTaA1SGneWWoCV5KZJG4wlzbJLp1WxVNGuc6A==", "requires": { - "abstract-leveldown": "~6.2.3", - "buffer": "^5.5.0", + "abstract-leveldown": "^7.2.0", + "buffer": "^6.0.3", "inherits": "^2.0.3", - "ltgt": "^2.1.2" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } + "ltgt": "^2.1.2", + "run-parallel-limit": "^1.1.0" } }, "level-option-wrap": { @@ -6602,84 +6483,33 @@ } }, "level-packager": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", - "integrity": "sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-6.0.1.tgz", + "integrity": "sha512-8Ezr0XM6hmAwqX9uu8IGzGNkWz/9doyPA8Oo9/D7qcMI6meJC+XhIbNYHukJhIn8OGdlzQs/JPcL9B8lA2F6EQ==", "requires": { - "encoding-down": "^6.3.0", - "levelup": "^4.3.2" - }, - "dependencies": { - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - }, - "levelup": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/levelup/-/levelup-4.4.0.tgz", - "integrity": "sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ==", - "requires": { - "deferred-leveldown": "~5.3.0", - "level-errors": "~2.0.0", - "level-iterator-stream": "~4.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - } + "encoding-down": "^7.1.0", + "levelup": "^5.1.1" } }, "level-supports": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.0.2.tgz", - "integrity": "sha512-dU1W7OnntoCXeNfy9c93K7KEoGNsuP+zZLbUQrIbBzhdZ75U0h8GEcioqmJc1QpYVORyFfeR+kyFeNx2N4t7lg==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.1.0.tgz", + "integrity": "sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA==" }, "leveldown": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-5.6.0.tgz", - "integrity": "sha512-iB8O/7Db9lPaITU1aA2txU/cBEXAt4vWwKQRrrWuS6XDgbP4QZGj9BL2aNbwb002atoQ/lIotJkfyzz+ygQnUQ==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-6.1.0.tgz", + "integrity": "sha512-8C7oJDT44JXxh04aSSsfcMI8YiaGRhOFI9/pMEL7nWJLVsWajDPTRxsSHTM2WcTVY5nXM+SuRHzPPi0GbnDX+w==", "requires": { - "abstract-leveldown": "~6.2.1", + "abstract-leveldown": "^7.2.0", "napi-macros": "~2.0.0", - "node-gyp-build": "~4.1.0" + "node-gyp-build": "^4.3.0" }, "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==" } } }, @@ -6694,31 +6524,6 @@ "level-iterator-stream": "^5.0.0", "level-supports": "^2.0.1", "queue-microtask": "^1.2.3" - }, - "dependencies": { - "deferred-leveldown": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz", - "integrity": "sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg==", - "requires": { - "abstract-leveldown": "^7.2.0", - "inherits": "^2.0.3" - } - }, - "level-errors": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-3.0.1.tgz", - "integrity": "sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ==" - }, - "level-iterator-stream": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz", - "integrity": "sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA==", - "requires": { - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - } } }, "leven": { @@ -7133,9 +6938,10 @@ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" }, "node-gyp-build": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.1.1.tgz", - "integrity": "sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ==" + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", + "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "dev": true }, "node-int64": { "version": "0.4.0", @@ -8135,6 +7941,14 @@ "queue-microtask": "^1.2.2" } }, + "run-parallel-limit": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz", + "integrity": "sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==", + "requires": { + "queue-microtask": "^1.2.2" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -8855,11 +8669,71 @@ "ieee754": "^1.1.13" } }, + "deferred-leveldown": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", + "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "requires": { + "abstract-leveldown": "~6.2.1", + "inherits": "^2.0.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "requires": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + } + } + }, + "encoding-down": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", + "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "requires": { + "abstract-leveldown": "^6.2.1", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0" + } + }, + "level-codec": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", + "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "requires": { + "buffer": "^5.6.0" + } + }, "level-concat-iterator": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" }, + "level-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", + "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", + "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.4.0", + "xtend": "^4.0.2" + } + }, "level-supports": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", diff --git a/package.json b/package.json index f17d64668..e47fd1c84 100644 --- a/package.json +++ b/package.json @@ -52,13 +52,15 @@ "assets": [ "node_modules/jose/**/*", "node_modules/utp-native/**/*", - "node_modules/leveldown/**/*" + "node_modules/leveldown/**/*", + "node_modules/fd-lock/**/*", + "dist/**/*.json" ], "scripts": "dist/workers/polykeyWorker.js" }, "scripts": { "build": "rm -r ./dist || true; tsc -p ./tsconfig.build.json", - "postbuild": "cp -fR src/proto dist", + "postbuild": "cp -fR src/proto dist; cp src/notifications/*.json dist/notifications/; cp src/claims/*.json dist/claims/; cp src/status/*.json dist/status/;", "ts-node": "ts-node --require tsconfig-paths/register", "test": "jest", "lint": "eslint '{src,tests}/**/*.{js,ts}'", @@ -70,7 +72,7 @@ "dependencies": { "@grpc/grpc-js": "1.3.7", "@matrixai/async-init": "^1.2.0", - "@matrixai/db": "^1.1.0", + "@matrixai/db": "^1.1.2", "@matrixai/id": "^2.1.0", "@matrixai/logger": "^2.1.0", "@matrixai/workers": "^1.2.3", @@ -102,6 +104,7 @@ "uuid": "^8.3.0" }, "devDependencies": { + "node-gyp-build": "4.2.3", "@babel/preset-env": "^7.13.10", "@types/cross-spawn": "^6.0.2", "@types/google-protobuf": "^3.7.4", diff --git a/src/PolykeyClient.ts b/src/PolykeyClient.ts index d39259bfe..b124feefa 100644 --- a/src/PolykeyClient.ts +++ b/src/PolykeyClient.ts @@ -62,8 +62,8 @@ class PolykeyClient { grpcClient ?? (await GRPCClientClient.createGRPCClientClient({ nodeId, - host: host, - port: port, + host, + port, tlsConfig: { keyPrivatePem: undefined, certChainPem: undefined }, session, timeout, diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index ce9a22c9a..6af1e3534 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -1,6 +1,11 @@ -import type { Host, Port, ProxyConfig } from '../network/types'; +import type { ClientDuplexStream } from '@grpc/grpc-js'; +import type { ClientReadableStream } from '@grpc/grpc-js/build/src/call'; +import type { + AsyncGeneratorReadableStreamClient, + AsyncGeneratorDuplexStreamClient, +} from '../grpc/types'; import type { NodeId } from '../nodes/types'; -import type { TLSConfig } from '../network/types'; +import type { Host, Port, ProxyConfig, TLSConfig } from '../network/types'; import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import type * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; @@ -79,7 +84,12 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsGitInfoGet(...args) { + public vaultsGitInfoGet( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.PackChunk, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsGitInfoGet, @@ -87,12 +97,19 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsGitPackGet(...args) { + public vaultsGitPackGet( + ...args + ): ClientDuplexStream { return this.client.vaultsGitPackGet(...args); } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsScan(...args) { + public vaultsScan( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.Vault, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, @@ -148,7 +165,13 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public nodesCrossSignClaim(...args) { + public nodesCrossSignClaim( + ...args + ): AsyncGeneratorDuplexStreamClient< + nodesPB.CrossSign, + nodesPB.CrossSign, + ClientDuplexStream + > { return grpcUtils.promisifyDuplexStreamCall< nodesPB.CrossSign, nodesPB.CrossSign diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index 19437c188..28f8577cb 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -1,8 +1,9 @@ import type { Interceptor } from '@grpc/grpc-js'; +import type { ClientReadableStream } from '@grpc/grpc-js/build/src/call'; +import type { AsyncGeneratorReadableStreamClient } from '../grpc/types'; import type { Session } from '../sessions'; import type { NodeId } from '../nodes/types'; import type { Host, Port, TLSConfig, ProxyConfig } from '../network/types'; - import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import type * as agentPB from '../proto/js/polykey/v1/agent/agent_pb'; import type * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; @@ -119,7 +120,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsList(...args) { + public vaultsList( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.List, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsList, @@ -167,7 +173,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsScan(...args) { + public vaultsScan( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.List, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, @@ -191,7 +202,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultPermissions(...args) { + public vaultPermissions( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.Permission, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsPermissions, @@ -199,7 +215,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsSecretsList(...args) { + public vaultsSecretsList( + ...args + ): AsyncGeneratorReadableStreamClient< + secretsPB.Secret, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsSecretsList, @@ -279,7 +300,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsLog(...args) { + public vaultsLog( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.LogEntry, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsLog, @@ -359,7 +385,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public keysCertsChainGet(...args) { + public keysCertsChainGet( + ...args + ): AsyncGeneratorReadableStreamClient< + keysPB.Certificate, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.keysCertsChainGet, @@ -367,7 +398,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public gestaltsGestaltList(...args) { + public gestaltsGestaltList( + ...args + ): AsyncGeneratorReadableStreamClient< + gestaltsPB.Gestalt, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.gestaltsGestaltList, diff --git a/src/client/clientService.ts b/src/client/clientService.ts index db7d1a549..a46f1fa59 100644 --- a/src/client/clientService.ts +++ b/src/client/clientService.ts @@ -1,3 +1,4 @@ +import type * as grpc from '@grpc/grpc-js'; import type PolykeyAgent from '../PolykeyAgent'; import type { KeyManager } from '../keys'; import type { VaultManager } from '../vaults'; @@ -11,8 +12,6 @@ import type { Sigchain } from '../sigchain'; import type { GRPCServer } from '../grpc'; import type { ForwardProxy, ReverseProxy } from '../network'; import type { FileSystem } from '../types'; - -import type * as grpc from '@grpc/grpc-js'; import type { IClientServiceServer } from '../proto/js/polykey/v1/client_service_grpc_pb'; import createStatusRPC from './rpcStatus'; import createSessionsRPC from './rpcSessions'; diff --git a/tests/bin/keys.test.ts b/tests/bin/keys.test.ts index 970eb9348..17ab838e5 100644 --- a/tests/bin/keys.test.ts +++ b/tests/bin/keys.test.ts @@ -11,19 +11,6 @@ jest.mock('@/keys/utils', () => ({ jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('CLI keys', () => { const logger = new Logger('CLI Test', LogLevel.WARN, [new StreamHandler()]); let dataDir: string; diff --git a/tests/bin/secret.test.ts b/tests/bin/secret.test.ts index a9092d94c..0616d2f07 100644 --- a/tests/bin/secret.test.ts +++ b/tests/bin/secret.test.ts @@ -13,19 +13,6 @@ jest.mock('@/keys/utils', () => ({ jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('CLI secrets', () => { const password = 'password'; const logger = new Logger('CLI Test', LogLevel.WARN, [new StreamHandler()]); diff --git a/tests/bootstrap/bootstrap.test.ts b/tests/bootstrap/bootstrap.test.ts index 7bad74e73..ec7dd5a5e 100644 --- a/tests/bootstrap/bootstrap.test.ts +++ b/tests/bootstrap/bootstrap.test.ts @@ -5,6 +5,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as bootstrapUtils from '@/bootstrap/utils'; import { Status } from '@/status'; +import config from '@/config'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -64,7 +65,7 @@ describe('Bootstrap', () => { nodePath, logger, }); - const statusPath = path.join(nodePath, 'status.json'); + const statusPath = path.join(nodePath, config.defaults.statusBase); const status = new Status({ statusPath, fs, diff --git a/tests/client/rpcSessions.test.ts b/tests/client/rpcSessions.test.ts index 581e93725..947d87fd9 100644 --- a/tests/client/rpcSessions.test.ts +++ b/tests/client/rpcSessions.test.ts @@ -12,7 +12,6 @@ import * as grpcUtils from '@/grpc/utils'; import * as clientUtils from '@/client/utils'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: From 4ae3b01415d5e3b0b7bb8fe2364a8b9a3b826b48 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 13 Dec 2021 20:30:55 +1100 Subject: [PATCH 02/28] Allow STARTING or LIVE for potentially slow command in `tests/bin/agent/status.test.ts` --- tests/bin/agent/status.test.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index 4c31502e8..84782cdb7 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -53,8 +53,9 @@ describe('status', () => { dataDir, )); expect(exitCode).toBe(0); + // If the command was slow, it may have become LIVE already expect(JSON.parse(stdout)).toMatchObject({ - status: 'STARTING', + status: expect.stringMatching(/STARTING|LIVE/), pid: agentProcess.pid, }); await status.waitFor('LIVE'); From fea7b6cedcc90a469e989d19b4948fa367908b8c Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 13 Dec 2021 22:38:11 +1100 Subject: [PATCH 03/28] Make `tests/keys/utils.test.ts` use 1024 size keys --- tests/keys/utils.test.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/keys/utils.test.ts b/tests/keys/utils.test.ts index 9d3e30a6e..11b2a1563 100644 --- a/tests/keys/utils.test.ts +++ b/tests/keys/utils.test.ts @@ -3,14 +3,14 @@ import * as keysUtils from '@/keys/utils'; describe('utils', () => { test('key pair copy', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const keyPairPem = keysUtils.keyPairToPem(keyPair); const keyPair2 = keysUtils.keyPairCopy(keyPair); const keyPairPem2 = keysUtils.keyPairToPem(keyPair2); expect(keyPairPem).toStrictEqual(keyPairPem2); }); test('to and from der encoding', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const cert = keysUtils.generateCertificate( keyPair.publicKey, keyPair.privateKey, @@ -24,7 +24,7 @@ describe('utils', () => { expect(certPem).toBe(certPem_); }); test('certificate copy', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const cert = keysUtils.generateCertificate( keyPair.publicKey, keyPair.privateKey, @@ -37,7 +37,7 @@ describe('utils', () => { expect(certPem).toBe(certPem2); }); test('encryption and decryption of private key', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); // Try first password const password = (await keysUtils.getRandomBytes(10)).toString('base64'); const privateKeyPemEncrypted = keysUtils.encryptPrivateKey( From 3a915ebc2de6b19dd19d6bf02645970402a69d62 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 13 Dec 2021 22:39:04 +1100 Subject: [PATCH 04/28] KeyManager tests can use 1024 size keys, and removed asynchronous bug in change password test --- tests/keys/KeyManager.test.ts | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/tests/keys/KeyManager.test.ts b/tests/keys/KeyManager.test.ts index a1771bcbb..32a343ab9 100644 --- a/tests/keys/KeyManager.test.ts +++ b/tests/keys/KeyManager.test.ts @@ -24,7 +24,7 @@ describe('KeyManager', () => { let mockedGenerateDeterministicKeyPair; beforeAll(async () => { // Key pair generated once for mocking - keyPair = await keysUtils.generateKeyPair(4096); + keyPair = await keysUtils.generateKeyPair(1024); workerManager = await workersUtils.createWorkerManager({ cores: 1, logger, @@ -54,8 +54,8 @@ describe('KeyManager', () => { test('KeyManager readiness', async () => { const keysPath = `${dataDir}/keys`; const keyManager = await KeyManager.createKeyManager({ - password, keysPath, + password, logger, }); await expect(async () => { @@ -78,8 +78,8 @@ describe('KeyManager', () => { test('constructs root key pair, root cert, root certs and db key', async () => { const keysPath = `${dataDir}/keys`; const keyManager = await KeyManager.createKeyManager({ - password, keysPath, + password, logger, }); const keysPathContents = await fs.promises.readdir(keysPath); @@ -210,8 +210,9 @@ describe('KeyManager', () => { }); // No way we can encrypt 1000 bytes without a ridiculous key size const plainText = Buffer.from(new Array(1000 + 1).join('A')); + const maxSize = keysUtils.maxEncryptSize(keysUtils.publicKeyBitSize(keyPair.publicKey) / 8, 32); await expect(keyManager.encryptWithRootKeyPair(plainText)).rejects.toThrow( - 'Maximum plain text byte size is 446', + `Maximum plain text byte size is ${maxSize}`, ); await keyManager.stop(); }); @@ -257,16 +258,12 @@ describe('KeyManager', () => { logger, }); }).rejects.toThrow(keysErrors.ErrorRootKeysParse); - await expect( - (async () => { - await KeyManager.createKeyManager({ - password: 'newpassword', - keysPath, - logger, - }); - await keyManager.stop(); - })(), - ).resolves.toBeUndefined(); + await KeyManager.createKeyManager({ + password: 'newpassword', + keysPath, + logger, + }); + await keyManager.stop(); }); test('can reset root certificate', async () => { const keysPath = `${dataDir}/keys`; From 7fda00b9937e2b1c3b016b0f09a0c5788145e64c Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 13 Dec 2021 22:40:08 +1100 Subject: [PATCH 05/28] The `tests/agent` now uses the global shared key --- tests/agent/GRPCClientAgent.test.ts | 40 +++++++++++++++++------------ tests/globalSetup.ts | 2 +- 2 files changed, 24 insertions(+), 18 deletions(-) diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 22295ec4d..5fb406665 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -9,7 +9,6 @@ import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Mutex } from 'async-mutex'; import { DB } from '@matrixai/db'; - import { KeyManager } from '@/keys'; import { NodeManager } from '@/nodes'; import { VaultManager } from '@/vaults'; @@ -17,23 +16,24 @@ import { Sigchain } from '@/sigchain'; import { ACL } from '@/acl'; import { GestaltGraph } from '@/gestalts'; import { errors as agentErrors } from '@/agent'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; import { ForwardProxy, ReverseProxy } from '@/network'; import { NotificationsManager } from '@/notifications'; import { utils as claimsUtils, errors as claimsErrors } from '@/claims'; import { makeNodeId } from '@/nodes/utils'; -import * as testUtils from './utils'; +import * as keysUtils from '@/keys/utils'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; +import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; +import * as testAgentUtils from './utils'; +import * as testUtils from '../utils'; import TestNodeConnection from '../nodes/TestNodeConnection'; -import { makeCrypto } from '../utils'; -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +jest + .spyOn(keysUtils, 'generateKeyPair') + .mockImplementation(testUtils.getGlobalKeyPair); +jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockImplementation(testUtils.getGlobalKeyPair); describe('GRPC agent', () => { const password = 'password'; @@ -101,7 +101,13 @@ describe('GRPC agent', () => { dbPath: dbPath, fs: fs, logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + } + }, }); acl = await ACL.createACL({ @@ -153,18 +159,18 @@ describe('GRPC agent', () => { }); await nodeManager.start(); - [server, port] = await testUtils.openTestAgentServer({ + [server, port] = await testAgentUtils.openTestAgentServer({ keyManager, vaultManager, nodeManager, sigchain, notificationsManager, }); - client = await testUtils.openTestAgentClient(port); + client = await testAgentUtils.openTestAgentClient(port); }, global.polykeyStartupTimeout); afterEach(async () => { - await testUtils.closeTestAgentClient(client); - await testUtils.closeTestAgentServer(server); + await testAgentUtils.closeTestAgentClient(client); + await testAgentUtils.closeTestAgentServer(server); await vaultManager.stop(); await notificationsManager.stop(); diff --git a/tests/globalSetup.ts b/tests/globalSetup.ts index 8a976ac68..d2918915c 100644 --- a/tests/globalSetup.ts +++ b/tests/globalSetup.ts @@ -21,7 +21,7 @@ async function setup() { console.log(`Creating global.keyPairDir: ${keyPairDir}`); await fs.promises.rm(keyPairDir, { force: true, recursive: true }); await fs.promises.mkdir(keyPairDir); - const rootKeyPair = await keysUtils.generateKeyPair(1024); + const rootKeyPair = await keysUtils.generateKeyPair(4096); const rootKeyPairPem = keysUtils.keyPairToPem(rootKeyPair); await Promise.all([ fs.promises.writeFile( From 8ad709f93344a33ca8350d28e461010d78f5ec4c Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Tue, 14 Dec 2021 00:33:25 +1100 Subject: [PATCH 06/28] Fixing tests, acl, agent, bin, bootstrap tests passing --- jest.config.js | 41 +- src/PolykeyAgent.ts | 512 ++--- src/client/rpcStatus.ts | 12 +- src/config.ts | 2 + src/errors.ts | 1 + src/network/Connection.ts | 6 - src/network/ConnectionForward.ts | 161 +- src/network/ConnectionReverse.ts | 169 +- src/network/ForwardProxy.ts | 84 +- src/network/ReverseProxy.ts | 88 +- src/network/errors.ts | 169 +- src/nodes/NodeConnection.ts | 10 +- src/nodes/NodeManager.ts | 34 +- src/schema/Schema.ts | 18 +- src/vaults/VaultManager.ts | 50 +- tests/PolykeyAgent.test.ts | 306 ++- tests/PolykeyClient.test.ts | 20 +- tests/acl/ACL.test.ts | 22 +- tests/acl/utils.test.ts | 2 +- tests/agent/GRPCClientAgent.test.ts | 49 +- tests/bin/agent/lock.test.ts | 46 +- tests/bin/agent/lockall.test.ts | 61 +- tests/bin/agent/start.test.ts | 4 +- tests/bin/agent/status.test.ts | 30 +- tests/bin/agent/unlock.test.ts | 33 +- tests/bin/bootstrap.test.ts | 2 +- tests/bin/{ => identities}/identities.test.ts | 102 +- tests/bin/{ => keys}/keys.test.ts | 26 +- tests/bin/{ => nodes}/nodes.test.ts | 70 +- .../{ => notifications}/notifications.test.ts | 73 +- tests/bin/polykey.test.ts | 4 +- .../secrets.test.ts} | 20 +- tests/bin/sessions.test.ts | 66 +- ...t.ts => utils.retryAuthentication.test.ts} | 3 - tests/bin/utils.ts | 106 - tests/bin/{ => vaults}/vaults.test.ts | 70 +- tests/bootstrap/bootstrap.test.ts | 82 - tests/bootstrap/utils.test.ts | 172 ++ tests/claims/utils.test.ts | 51 +- tests/client/rpcAgent.test.ts | 3 +- tests/client/rpcNodes.test.ts | 11 +- tests/discovery/Discovery.test.ts | 7 +- tests/global.d.ts | 12 + tests/globalSetup.ts | 42 +- tests/globalTeardown.ts | 23 +- tests/keys/KeyManager.test.ts | 5 +- tests/network/ForwardProxy.test.ts | 1738 +++++++++-------- tests/network/index.test.ts | 22 +- tests/nodes/TestNodeConnection.ts | 4 +- tests/setup.ts | 59 - tests/status/Status.test.ts | 10 +- tests/utils.ts | 203 +- tests/vaults/utils.test.ts | 6 +- 53 files changed, 2537 insertions(+), 2385 deletions(-) rename tests/bin/{ => identities}/identities.test.ts (89%) rename tests/bin/{ => keys}/keys.test.ts (87%) rename tests/bin/{ => nodes}/nodes.test.ts (83%) rename tests/bin/{ => notifications}/notifications.test.ts (84%) rename tests/bin/{secret.test.ts => secrets/secrets.test.ts} (91%) rename tests/bin/{utils.retryAuth.test.ts => utils.retryAuthentication.test.ts} (98%) rename tests/bin/{ => vaults}/vaults.test.ts (89%) delete mode 100644 tests/bootstrap/bootstrap.test.ts create mode 100644 tests/bootstrap/utils.test.ts create mode 100644 tests/global.d.ts diff --git a/jest.config.js b/jest.config.js index e372fda36..9febd9bab 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,3 +1,7 @@ +const os = require('os'); +const path = require('path'); +const fs = require('fs'); +const process = require('process'); const { pathsToModuleNameMapper } = require('ts-jest/utils'); const { compilerOptions } = require('./tsconfig'); @@ -10,6 +14,31 @@ const moduleNameMapper = pathsToModuleNameMapper( // https://github.com/panva/jose/discussions/105 moduleNameMapper['^jose/(.*)$'] = "/node_modules/jose/dist/node/cjs/$1"; +// Global variables that are shared across the jest worker pool +// These variables must be static and serialisable +const globals = { + // Absolute directory to the project root + projectDir: __dirname, + // Absolute directory to the test root + testDir: path.join(__dirname, 'tests'), + dataDir: fs.mkdtempSync( + path.join(os.tmpdir(), 'polykey-test-global-'), + ), + // Default global password for global agent or global key + password: 'password', + // Default asynchronous test timeout + defaultTimeout: 20000, + polykeyStartupTimeout: 30000, + failedConnectionTimeout: 50000, + // Timeouts rely on setTimeout which takes 32 bit numbers + maxTimeout: Math.pow(2, 31) - 1, +}; + +// The `globalSetup` and `globalTeardown` cannot access the `globals` +// They run in their own process context +// They can receive process environment +process.env['GLOBAL_DATA_DIR'] = globals.dataDir; + module.exports = { testEnvironment: "node", verbose: true, @@ -23,13 +52,21 @@ module.exports = { "^.+\\.tsx?$": "ts-jest", "^.+\\.jsx?$": "babel-jest" }, + globals, + // Global setup script executed once before all test files + globalSetup: "/tests/globalSetup.ts", + // Global teardown script executed once after all test files + globalTeardown: "/tests/globalTeardown.ts", + // Setup files are executed before each test file + // Can access globals setupFiles: [ "/tests/setup.ts" ], + // Setup files after env are executed before each test file + // after the jest test environment is installed + // Can access globals setupFilesAfterEnv: [ "/tests/setupAfterEnv.ts" ], - globalSetup: "/tests/globalSetup.ts", - globalTeardown: "/tests/globalTeardown.ts", moduleNameMapper: moduleNameMapper }; diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index d32229ff6..23dd2eba9 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -150,158 +150,180 @@ class PolykeyAgent { const dbPath = path.join(statePath, config.defaults.dbBase); const keysPath = path.join(statePath, config.defaults.keysBase); const vaultsPath = path.join(statePath, config.defaults.vaultsBase); - status = - status ?? - new Status({ - statusPath, - fs: fs, - logger: logger.getChild(Status.name), - }); - // Start locking the status - await status.start({ pid: process.pid }); - schema = - schema ?? - (await Schema.createSchema({ - statePath, - fs, - logger: logger.getChild(Schema.name), - fresh, - })); - keyManager = - keyManager ?? - (await KeyManager.createKeyManager({ - ...keysConfig_, - keysPath, - password, - fs, - logger: logger.getChild(KeyManager.name), - fresh, - })); - db = - db ?? - (await DB.createDB({ - dbPath, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, + try { + status = + status ?? + new Status({ + statusPath, + fs: fs, + logger: logger.getChild(Status.name), + }); + // Start locking the status + await status.start({ pid: process.pid }); + schema = + schema ?? + (await Schema.createSchema({ + statePath, + fs, + logger: logger.getChild(Schema.name), + fresh, + })); + keyManager = + keyManager ?? + (await KeyManager.createKeyManager({ + ...keysConfig_, + keysPath, + password, + fs, + logger: logger.getChild(KeyManager.name), + fresh, + })); + db = + db ?? + (await DB.createDB({ + dbPath, + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, }, - }, - fs, - logger: logger.getChild(DB.name), - fresh, - })); - identitiesManager = - identitiesManager ?? - (await IdentitiesManager.createIdentitiesManager({ - db, - logger: logger.getChild(IdentitiesManager.name), - fresh, - })); - // Registering providers - const githubProvider = new providers.GithubProvider({ - clientId: config.providers['github.com'].clientId, - logger: logger.getChild(providers.GithubProvider.name), - }); - identitiesManager.registerProvider(githubProvider); - sigchain = - sigchain ?? - (await Sigchain.createSigchain({ - keyManager, - db, - logger: logger.getChild(Sigchain.name), - fresh, - })); - acl = - acl ?? - (await ACL.createACL({ - db, - logger: logger.getChild(ACL.name), - fresh, - })); - gestaltGraph = - gestaltGraph ?? - (await GestaltGraph.createGestaltGraph({ - db, - acl, - logger: logger.getChild(GestaltGraph.name), - fresh, - })); - fwdProxy = - fwdProxy ?? - new ForwardProxy({ - ...forwardProxyConfig_, - logger: logger.getChild(ForwardProxy.name), - }); - revProxy = - revProxy ?? - new ReverseProxy({ - ...reverseProxyConfig_, - logger: logger.getChild(ReverseProxy.name), - }); - nodeManager = - nodeManager ?? - (await NodeManager.createNodeManager({ - db, - seedNodes, - sigchain, - keyManager, - fwdProxy, - revProxy, - logger: logger.getChild(NodeManager.name), - fresh, - })); - discovery = - discovery ?? - (await Discovery.createDiscovery({ - gestaltGraph, - identitiesManager, - nodeManager, - logger: logger.getChild(Discovery.name), - })); - vaultManager = - vaultManager ?? - (await VaultManager.createVaultManager({ - vaultsKey: keyManager.vaultKey, - vaultsPath, - keyManager, - nodeManager, - gestaltGraph, - acl, - db, - fs, - logger: logger.getChild(VaultManager.name), - fresh, - })); - notificationsManager = - notificationsManager ?? - (await NotificationsManager.createNotificationsManager({ - acl, - db, - nodeManager, - keyManager, - logger: logger.getChild(NotificationsManager.name), - fresh, - })); - sessionManager = - sessionManager ?? - (await SessionManager.createSessionManager({ - db, - keyManager, - logger: logger.getChild(SessionManager.name), - fresh, - })); - grpcServerClient = - grpcServerClient ?? - new GRPCServer({ - logger: logger.getChild(GRPCServer.name + 'Client'), - }); - grpcServerAgent = - grpcServerAgent ?? - new GRPCServer({ - logger: logger.getChild(GRPCServer.name + 'Agent'), + fs, + logger: logger.getChild(DB.name), + fresh, + })); + identitiesManager = + identitiesManager ?? + (await IdentitiesManager.createIdentitiesManager({ + db, + logger: logger.getChild(IdentitiesManager.name), + fresh, + })); + // Registering providers + const githubProvider = new providers.GithubProvider({ + clientId: config.providers['github.com'].clientId, + logger: logger.getChild(providers.GithubProvider.name), }); + identitiesManager.registerProvider(githubProvider); + sigchain = + sigchain ?? + (await Sigchain.createSigchain({ + keyManager, + db, + logger: logger.getChild(Sigchain.name), + fresh, + })); + acl = + acl ?? + (await ACL.createACL({ + db, + logger: logger.getChild(ACL.name), + fresh, + })); + gestaltGraph = + gestaltGraph ?? + (await GestaltGraph.createGestaltGraph({ + db, + acl, + logger: logger.getChild(GestaltGraph.name), + fresh, + })); + fwdProxy = + fwdProxy ?? + new ForwardProxy({ + ...forwardProxyConfig_, + logger: logger.getChild(ForwardProxy.name), + }); + revProxy = + revProxy ?? + new ReverseProxy({ + ...reverseProxyConfig_, + logger: logger.getChild(ReverseProxy.name), + }); + nodeManager = + nodeManager ?? + (await NodeManager.createNodeManager({ + db, + seedNodes, + sigchain, + keyManager, + fwdProxy, + revProxy, + logger: logger.getChild(NodeManager.name), + fresh, + })); + // Discovery uses in-memory CreateDestroy pattern + // Therefore it should be destroyed during stop + discovery = + discovery ?? + (await Discovery.createDiscovery({ + gestaltGraph, + identitiesManager, + nodeManager, + logger: logger.getChild(Discovery.name), + })); + vaultManager = + vaultManager ?? + (await VaultManager.createVaultManager({ + vaultsKey: keyManager.vaultKey, + vaultsPath, + keyManager, + nodeManager, + gestaltGraph, + acl, + db, + fs, + logger: logger.getChild(VaultManager.name), + fresh, + })); + notificationsManager = + notificationsManager ?? + (await NotificationsManager.createNotificationsManager({ + acl, + db, + nodeManager, + keyManager, + logger: logger.getChild(NotificationsManager.name), + fresh, + })); + sessionManager = + sessionManager ?? + (await SessionManager.createSessionManager({ + db, + keyManager, + logger: logger.getChild(SessionManager.name), + fresh, + })); + grpcServerClient = + grpcServerClient ?? + new GRPCServer({ + logger: logger.getChild(GRPCServer.name + 'Client'), + }); + grpcServerAgent = + grpcServerAgent ?? + new GRPCServer({ + logger: logger.getChild(GRPCServer.name + 'Agent'), + }); + } catch (e) { + logger.warn(`Failed Creating ${this.name}`); + await sessionManager?.stop(); + await notificationsManager?.stop(); + await vaultManager?.stop(); + await discovery?.destroy(); + await nodeManager?.stop(); + await revProxy?.stop(); + await fwdProxy?.stop(); + await gestaltGraph?.stop(); + await acl?.stop(); + await sigchain?.stop(); + await identitiesManager?.stop(); + await db?.stop(); + await keyManager?.stop(); + await schema?.stop(); + await status?.stop({}); + throw e; + } const polykeyAgent = new PolykeyAgent({ nodePath, status, @@ -429,100 +451,100 @@ class PolykeyAgent { networkConfig?: NetworkConfig; fresh?: boolean; }) { - this.logger.info(`Starting ${this.constructor.name}`); - const networkConfig_ = { - ...config.defaults.networkConfig, - ...utils.filterEmptyObject(networkConfig), - }; - await this.status.start({ pid: process.pid }); - await this.schema.start({ fresh }); - const agentService = createAgentService({ - keyManager: this.keyManager, - vaultManager: this.vaultManager, - nodeManager: this.nodeManager, - sigchain: this.sigchain, - notificationsManager: this.notificationsManager, - }); - const clientService = createClientService({ - polykeyAgent: this, - discovery: this.discovery, - gestaltGraph: this.gestaltGraph, - identitiesManager: this.identitiesManager, - keyManager: this.keyManager, - nodeManager: this.nodeManager, - notificationsManager: this.notificationsManager, - sessionManager: this.sessionManager, - vaultManager: this.vaultManager, - sigchain: this.sigchain, - grpcServerClient: this.grpcServerClient, - grpcServerAgent: this.grpcServerAgent, - fwdProxy: this.fwdProxy, - revProxy: this.revProxy, - fs: this.fs, - }); - - // Starting modules - await this.keyManager.start({ - password, - fresh, - }); - await this.db.start({ fresh }); - await this.identitiesManager.start({ fresh }); - await this.sigchain.start({ fresh }); - await this.acl.start({ fresh }); - await this.gestaltGraph.start({ fresh }); - - // GRPC Server - const tlsConfig = { - keyPrivatePem: this.keyManager.getRootKeyPairPem().privateKey, - certChainPem: await this.keyManager.getRootCertChainPem(), - }; - - // Client server - await this.grpcServerClient.start({ - services: [[ClientServiceService, clientService]], - host: networkConfig_.clientHost, - port: networkConfig_.clientPort, - tlsConfig, - }); - // Agent server - await this.grpcServerAgent.start({ - services: [[AgentServiceService, agentService]], - host: networkConfig_.agentHost, - port: networkConfig_.agentPort, - }); - await this.fwdProxy.start({ - proxyHost: networkConfig_.proxyHost, - proxyPort: networkConfig_.proxyPort, - egressHost: networkConfig_.egressHost, - egressPort: networkConfig_.egressPort, - tlsConfig, - }); - await this.revProxy.start({ - serverHost: this.grpcServerAgent.host, - serverPort: this.grpcServerAgent.port, - ingressHost: networkConfig_.ingressHost, - ingressPort: networkConfig_.ingressPort, - tlsConfig, - }); - - await this.nodeManager.start({ fresh }); - await this.nodeManager.getConnectionsToSeedNodes(); - await this.nodeManager.syncNodeGraph(); - await this.vaultManager.start({ fresh }); - await this.notificationsManager.start({ fresh }); - await this.sessionManager.start({ fresh }); - - await this.status.finishStart({ - pid: process.pid, - nodeId: this.keyManager.getNodeId(), - clientHost: this.grpcServerClient.host, - clientPort: this.grpcServerClient.port, - ingressHost: this.revProxy.ingressHost, - ingressPort: this.revProxy.ingressPort, - }); - - this.logger.info(`Started ${this.constructor.name}`); + try { + this.logger.info(`Starting ${this.constructor.name}`); + const networkConfig_ = { + ...config.defaults.networkConfig, + ...utils.filterEmptyObject(networkConfig), + }; + await this.status.start({ pid: process.pid }); + await this.schema.start({ fresh }); + const agentService = createAgentService({ + keyManager: this.keyManager, + vaultManager: this.vaultManager, + nodeManager: this.nodeManager, + sigchain: this.sigchain, + notificationsManager: this.notificationsManager, + }); + const clientService = createClientService({ + polykeyAgent: this, + discovery: this.discovery, + gestaltGraph: this.gestaltGraph, + identitiesManager: this.identitiesManager, + keyManager: this.keyManager, + nodeManager: this.nodeManager, + notificationsManager: this.notificationsManager, + sessionManager: this.sessionManager, + vaultManager: this.vaultManager, + sigchain: this.sigchain, + grpcServerClient: this.grpcServerClient, + grpcServerAgent: this.grpcServerAgent, + fwdProxy: this.fwdProxy, + revProxy: this.revProxy, + fs: this.fs, + }); + // Starting modules + await this.keyManager.start({ + password, + fresh, + }); + await this.db.start({ fresh }); + await this.identitiesManager.start({ fresh }); + await this.sigchain.start({ fresh }); + await this.acl.start({ fresh }); + await this.gestaltGraph.start({ fresh }); + // GRPC Server + const tlsConfig = { + keyPrivatePem: this.keyManager.getRootKeyPairPem().privateKey, + certChainPem: await this.keyManager.getRootCertChainPem(), + }; + // Client server + await this.grpcServerClient.start({ + services: [[ClientServiceService, clientService]], + host: networkConfig_.clientHost, + port: networkConfig_.clientPort, + tlsConfig, + }); + // Agent server + await this.grpcServerAgent.start({ + services: [[AgentServiceService, agentService]], + host: networkConfig_.agentHost, + port: networkConfig_.agentPort, + }); + await this.fwdProxy.start({ + proxyHost: networkConfig_.proxyHost, + proxyPort: networkConfig_.proxyPort, + egressHost: networkConfig_.egressHost, + egressPort: networkConfig_.egressPort, + tlsConfig, + }); + await this.revProxy.start({ + serverHost: this.grpcServerAgent.host, + serverPort: this.grpcServerAgent.port, + ingressHost: networkConfig_.ingressHost, + ingressPort: networkConfig_.ingressPort, + tlsConfig, + }); + await this.nodeManager.start({ fresh }); + await this.nodeManager.getConnectionsToSeedNodes(); + await this.nodeManager.syncNodeGraph(); + await this.vaultManager.start({ fresh }); + await this.notificationsManager.start({ fresh }); + await this.sessionManager.start({ fresh }); + await this.status.finishStart({ + pid: process.pid, + nodeId: this.keyManager.getNodeId(), + clientHost: this.grpcServerClient.host, + clientPort: this.grpcServerClient.port, + ingressHost: this.revProxy.getIngressHost(), + ingressPort: this.revProxy.getIngressPort(), + }); + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.stop(); + throw e; + } } /** @@ -534,6 +556,7 @@ class PolykeyAgent { await this.sessionManager.stop(); await this.notificationsManager.stop(); await this.vaultManager.stop(); + await this.discovery.destroy(); await this.nodeManager.stop(); await this.revProxy.stop(); await this.fwdProxy.stop(); @@ -556,7 +579,6 @@ class PolykeyAgent { await this.db.start(); await this.sessionManager.destroy(); await this.notificationsManager.destroy(); - await this.discovery.destroy(); await this.vaultManager.destroy(); await this.nodeManager.destroy(); await this.gestaltGraph.destroy(); diff --git a/src/client/rpcStatus.ts b/src/client/rpcStatus.ts index d0d1e5480..0f7b6e449 100644 --- a/src/client/rpcStatus.ts +++ b/src/client/rpcStatus.ts @@ -36,14 +36,14 @@ const createStatusRPC = ({ response.setNodeId(keyManager.getNodeId()); response.setClientHost(grpcServerClient.host); response.setClientPort(grpcServerClient.port); - response.setIngressHost(revProxy.ingressHost); - response.setIngressPort(revProxy.ingressPort); - response.setEgressHost(fwdProxy.egressHost); - response.setEgressPort(fwdProxy.egressPort); + response.setIngressHost(revProxy.getIngressHost()); + response.setIngressPort(revProxy.getIngressPort()); + response.setEgressHost(fwdProxy.getEgressHost()); + response.setEgressPort(fwdProxy.getEgressPort()); response.setAgentHost(grpcServerAgent.host); response.setAgentPort(grpcServerAgent.port); - response.setProxyHost(fwdProxy.proxyHost); - response.setProxyPort(fwdProxy.proxyPort); + response.setProxyHost(fwdProxy.getProxyHost()); + response.setProxyPort(fwdProxy.getProxyPort()); response.setRootPublicKeyPem(keyManager.getRootKeyPairPem().publicKey); response.setRootCertPem(keyManager.getRootCertPem()); response.setRootCertChainPem(await keyManager.getRootCertChainPem()); diff --git a/src/config.ts b/src/config.ts index 3f07c0ab7..1f48d623c 100644 --- a/src/config.ts +++ b/src/config.ts @@ -55,8 +55,10 @@ const config = { */ defaults: { nodePath: getDefaultNodePath(), + statusBase: 'status.json', stateBase: 'state', + stateVersionBase: 'version', dbBase: 'db', keysBase: 'keys', vaultsBase: 'vaults', diff --git a/src/errors.ts b/src/errors.ts index d29fe973a..dfa44bb1a 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -58,5 +58,6 @@ export * from './claims/errors'; export * from './sigchain/errors'; export * from './bootstrap/errors'; export * from './notifications/errors'; +export * from './schema/errors'; export * from './status/errors'; export * from './utils/errors'; diff --git a/src/network/Connection.ts b/src/network/Connection.ts index e901f7980..34e31b290 100644 --- a/src/network/Connection.ts +++ b/src/network/Connection.ts @@ -1,6 +1,5 @@ import type UTP from 'utp-native'; import type { Host, Port, Address, TLSConfig } from './types'; - import Logger from '@matrixai/logger'; import * as networkUtils from './utils'; import { promisify } from '../utils'; @@ -15,7 +14,6 @@ abstract class Connection { protected logger: Logger; protected timeout: ReturnType; - protected _started: boolean = false; protected _composed: boolean = false; constructor({ @@ -46,10 +44,6 @@ abstract class Connection { this.timeoutTime = timeoutTime; } - get started(): boolean { - return this._started; - } - get composed(): boolean { return this._composed; } diff --git a/src/network/ConnectionForward.ts b/src/network/ConnectionForward.ts index 2ea6f8f62..0d7c2c7f6 100644 --- a/src/network/ConnectionForward.ts +++ b/src/network/ConnectionForward.ts @@ -7,6 +7,7 @@ import type { NodeId } from '../nodes/types'; import type { AbstractConstructorParameters, Timer } from '../types'; import tls from 'tls'; +import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; @@ -18,6 +19,8 @@ type ConnectionsForward = { client: Map; }; +interface ConnectionForward extends StartStop {} +@StartStop() class ConnectionForward extends Connection { public readonly nodeId: NodeId; public readonly pingIntervalTime: number; @@ -53,91 +56,78 @@ class ConnectionForward extends Connection { }: { timer?: Timer; } = {}): Promise { + this.logger.info('Starting Connection Forward'); + // Promise for ready + const { p: readyP, resolveP: resolveReadyP } = promise(); + // Promise for start errors + const { p: errorP, rejectP: rejectErrorP } = promise(); + // Promise for secure connection + const { p: secureConnectP, resolveP: resolveSecureConnectP } = + promise(); + this.resolveReadyP = resolveReadyP; + this.utpSocket.on('message', this.handleMessage); + const handleStartError = (e) => { + rejectErrorP(e); + }; + this.utpConn = this.utpSocket.connect(this.port, this.host); + this.tlsSocket = tls.connect( + { + key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), + cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), + socket: this.utpConn, + rejectUnauthorized: false, + }, + () => { + resolveSecureConnectP(); + }, + ); + this.tlsSocket.once('error', handleStartError); + this.tlsSocket.on('end', this.handleEnd); + this.tlsSocket.on('close', this.handleClose); + let punchInterval; try { - if (this._started) { - return; - } - this.logger.info('Starting Connection Forward'); - this._started = true; - // Promise for ready - const { p: readyP, resolveP: resolveReadyP } = promise(); - // Promise for start errors - const { p: errorP, rejectP: rejectErrorP } = promise(); - // Promise for secure connection - const { p: secureConnectP, resolveP: resolveSecureConnectP } = - promise(); - this.resolveReadyP = resolveReadyP; - this.utpSocket.on('message', this.handleMessage); - const handleStartError = (e) => { - rejectErrorP(e); - }; - this.utpConn = this.utpSocket.connect(this.port, this.host); - this.tlsSocket = tls.connect( - { - key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), - cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), - socket: this.utpConn, - rejectUnauthorized: false, - }, - () => { - resolveSecureConnectP(); - }, - ); - this.tlsSocket.once('error', handleStartError); - this.tlsSocket.on('end', this.handleEnd); - this.tlsSocket.on('close', this.handleClose); - let punchInterval; - try { - // Send punch signal + // Send punch signal + await this.send(networkUtils.pingBuffer); + punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - punchInterval = setInterval(async () => { - await this.send(networkUtils.pingBuffer); - }, 1000); - await Promise.race([ - Promise.all([readyP, secureConnectP]).then(() => {}), - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - } catch (e) { - await this.stop(); - throw new networkErrors.ErrorConnectionStart(e.message, { - code: e.code, - errno: e.errno, - syscall: e.syscall, - }); - } finally { - clearInterval(punchInterval); - } - if (timer?.timedOut) { - await this.stop(); - throw new networkErrors.ErrorConnectionStartTimeout(); - } - const serverCertChain = networkUtils.getCertificateChain(this.tlsSocket); - try { - networkUtils.verifyServerCertificateChain(this.nodeId, serverCertChain); - } catch (e) { - await this.stop(); - throw e; - } - this.tlsSocket.off('error', handleStartError); - this.tlsSocket.on('error', this.handleError); - await this.startPingInterval(); - this.serverCertChain = serverCertChain; - this.connections.ingress.set(this.address, this); - this.startTimeout(); - this.logger.info('Started Connection Forward'); + }, 1000); + await Promise.race([ + Promise.all([readyP, secureConnectP]).then(() => {}), + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); + } catch (e) { + await this.stop(); + throw new networkErrors.ErrorConnectionStart(e.message, { + code: e.code, + errno: e.errno, + syscall: e.syscall, + }); + } finally { + clearInterval(punchInterval); + } + if (timer?.timedOut) { + await this.stop(); + throw new networkErrors.ErrorConnectionStartTimeout(); + } + const serverCertChain = networkUtils.getCertificateChain(this.tlsSocket); + try { + networkUtils.verifyServerCertificateChain(this.nodeId, serverCertChain); } catch (e) { - this._started = false; + await this.stop(); throw e; } + this.tlsSocket.off('error', handleStartError); + this.tlsSocket.on('error', this.handleError); + await this.startPingInterval(); + this.serverCertChain = serverCertChain; + this.connections.ingress.set(this.address, this); + this.startTimeout(); + this.logger.info('Started Connection Forward'); } public async stop(): Promise { - if (!this._started) { - return; - } this.logger.info('Stopping Connection Forward'); - this._started = false; this._composed = false; this.stopTimeout(); this.stopPingInterval(); @@ -151,14 +141,13 @@ class ConnectionForward extends Connection { this.logger.info('Stopped Connection Forward'); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public compose(clientSocket: Socket): void { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } try { if (this._composed) { throw new networkErrors.ErrorConnectionComposed(); } + this._composed = true; this.logger.info('Composing Connection Forward'); this.tlsSocket.on('error', (e) => { if (!clientSocket.destroyed) { @@ -190,7 +179,6 @@ class ConnectionForward extends Connection { this.clientPort, ); this.connections.client.set(this.clientAddress, this); - this._composed = true; this.logger.info('Composed Connection Forward'); } catch (e) { this._composed = false; @@ -212,17 +200,13 @@ class ConnectionForward extends Connection { return this.clientPort; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getServerCertificates(): Array { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.serverCertChain.map((crt) => keysUtils.certCopy(crt)); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getServerNodeIds(): Array { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.serverCertChain.map((c) => networkUtils.certNodeId(c)); } @@ -239,7 +223,10 @@ class ConnectionForward extends Connection { protected startTimeout() { this.timeout = setTimeout(() => { - this.tlsSocket.emit('error', new networkErrors.ErrorConnectionTimeout()); + this.tlsSocket.emit( + 'error', + new networkErrors.ErrorConnectionTimeout() + ); }, this.timeoutTime); } diff --git a/src/network/ConnectionReverse.ts b/src/network/ConnectionReverse.ts index 7e23f387f..698dda4d5 100644 --- a/src/network/ConnectionReverse.ts +++ b/src/network/ConnectionReverse.ts @@ -7,6 +7,7 @@ import type { AbstractConstructorParameters, Timer } from '../types'; import net from 'net'; import tls from 'tls'; +import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; @@ -18,6 +19,8 @@ type ConnectionsReverse = { proxy: Map; }; +interface ConnectionReverse extends StartStop {} +@StartStop() class ConnectionReverse extends Connection { public readonly serverHost: Host; public readonly serverPort: Port; @@ -51,77 +54,68 @@ class ConnectionReverse extends Connection { }: { timer?: Timer; } = {}): Promise { + this.logger.info('Starting Connection Reverse'); + // Promise for ready + const { p: readyP, resolveP: resolveReadyP } = promise(); + // Promise for server connection + const { p: socketP, resolveP: resolveSocketP } = promise(); + // Promise for start errors + const { p: errorP, rejectP: rejectErrorP } = promise(); + this.resolveReadyP = resolveReadyP; + this.utpSocket.on('message', this.handleMessage); + this.serverSocket = net.connect(this.serverPort, this.serverHost, () => { + const proxyAddressInfo = this.serverSocket.address() as AddressInfo; + this.proxyHost = proxyAddressInfo.address as Host; + this.proxyPort = proxyAddressInfo.port as Port; + this.proxyAddress = networkUtils.buildAddress( + this.proxyHost, + this.proxyPort, + ); + resolveSocketP(); + }); + const handleStartError = (e) => { + rejectErrorP(e); + }; + this.serverSocket.once('error', handleStartError); + this.serverSocket.on('end', this.handleEnd); + this.serverSocket.on('close', this.handleClose); + let punchInterval; try { - if (this._started) { - return; - } - this.logger.info('Starting Connection Reverse'); - this._started = true; - // Promise for ready - const { p: readyP, resolveP: resolveReadyP } = promise(); - // Promise for server connection - const { p: socketP, resolveP: resolveSocketP } = promise(); - // Promise for start errors - const { p: errorP, rejectP: rejectErrorP } = promise(); - this.resolveReadyP = resolveReadyP; - this.utpSocket.on('message', this.handleMessage); - this.serverSocket = net.connect(this.serverPort, this.serverHost, () => { - const proxyAddressInfo = this.serverSocket.address() as AddressInfo; - this.proxyHost = proxyAddressInfo.address as Host; - this.proxyPort = proxyAddressInfo.port as Port; - this.proxyAddress = networkUtils.buildAddress( - this.proxyHost, - this.proxyPort, - ); - resolveSocketP(); - }); - const handleStartError = (e) => { - rejectErrorP(e); - }; - this.serverSocket.once('error', handleStartError); - this.serverSocket.on('end', this.handleEnd); - this.serverSocket.on('close', this.handleClose); - let punchInterval; - try { - await Promise.race([ - socketP, - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - // Send punch & ready signal + await Promise.race([ + socketP, + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); + // Send punch & ready signal + await this.send(networkUtils.pingBuffer); + punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - punchInterval = setInterval(async () => { - await this.send(networkUtils.pingBuffer); - }, 1000); - await Promise.race([ - readyP, - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - } catch (e) { - await this.stop(); - throw new networkErrors.ErrorConnectionStart(e.message, { - code: e.code, - errno: e.errno, - syscall: e.syscall, - }); - } finally { - clearInterval(punchInterval); - } - if (timer?.timedOut) { - await this.stop(); - throw new networkErrors.ErrorConnectionStartTimeout(); - } - this.serverSocket.off('error', handleStartError); - this.serverSocket.on('error', this.handleError); - this.connections.egress.set(this.address, this); - this.connections.proxy.set(this.proxyAddress, this); - this.startTimeout(); - this.logger.info('Started Connection Reverse'); + }, 1000); + await Promise.race([ + readyP, + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); } catch (e) { - this._started = false; - throw e; + await this.stop(); + throw new networkErrors.ErrorConnectionStart(e.message, { + code: e.code, + errno: e.errno, + syscall: e.syscall, + }); + } finally { + clearInterval(punchInterval); + } + if (timer?.timedOut) { + await this.stop(); + throw new networkErrors.ErrorConnectionStartTimeout(); } + this.serverSocket.off('error', handleStartError); + this.serverSocket.on('error', this.handleError); + this.connections.egress.set(this.address, this); + this.connections.proxy.set(this.proxyAddress, this); + this.startTimeout(); + this.logger.info('Started Connection Reverse'); } /** @@ -129,15 +123,12 @@ class ConnectionReverse extends Connection { * Repeated invocations are noops */ public async stop() { - if (!this._started) { - return; - } this.logger.info('Stopping Connection Reverse'); - this._started = false; this._composed = false; this.stopTimeout(); this.utpSocket.off('message', this.handleMessage); if (!this.serverSocket.destroyed) { + // console.log('SENDING END TO serverSocket'); this.serverSocket.end(); this.serverSocket.destroy(); } @@ -149,14 +140,13 @@ class ConnectionReverse extends Connection { /** * Repeated invocations are noops */ + @ready(new networkErrors.ErrorConnectionNotRunning()) public async compose(utpConn: UTPConnection, timer?: Timer): Promise { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } try { if (this._composed) { throw new networkErrors.ErrorConnectionComposed(); } + this._composed = true; this.logger.info('Composing Connection Reverse'); // Promise for secure establishment const { p: secureP, resolveP: resolveSecureP } = promise(); @@ -201,11 +191,33 @@ class ConnectionReverse extends Connection { // The utp connection may already be destroyed tlsSocket.destroy(); } else { + + // console.log('DESTROYED', tlsSocket.destroyed); + // // @ts-ignore + // console.log('PENDING', tlsSocket.pending); + // // @ts-ignore + // console.log('READYSTATE', tlsSocket.readyState); + // console.log('CONNECTING', tlsSocket.connecting); + // console.log('ALLOW HALF OPEN', tlsSocket.allowHalfOpen); + // console.log('ALLOW HALF OPEN utpConn', utpConn.allowHalfOpen); + // console.log('ENDED?', tlsSocket.writableEnded); + // console.log('FINISHED?', tlsSocket.writableFinished); + // console.log('ENDED?', utpConn.writableEnded); + // console.log('FINISHED?', utpConn.writableFinished); + // console.log(utpConn); + // Prevent half open connections tlsSocket.end(); + + // utpConn.end(); + // console.log("ENDED TLS SOCKET AGAIN?"); + } }); tlsSocket.on('error', (e) => { + + // console.log('EMITTING ERROR ON TLS SOCKET', e); + if (!this.serverSocket.destroyed) { this.serverSocket.emit('error', e); } @@ -225,7 +237,6 @@ class ConnectionReverse extends Connection { tlsSocket.pipe(this.serverSocket); this.serverSocket.pipe(tlsSocket); this.clientCertChain = clientCertChain; - this._composed = true; this.logger.info('Composed Connection Reverse'); } catch (e) { this._composed = false; @@ -233,17 +244,13 @@ class ConnectionReverse extends Connection { } } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getProxyHost(): Host { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.proxyHost; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getProxyPort(): Port { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.proxyPort; } diff --git a/src/network/ForwardProxy.ts b/src/network/ForwardProxy.ts index 58dac9518..35e9587ee 100644 --- a/src/network/ForwardProxy.ts +++ b/src/network/ForwardProxy.ts @@ -23,10 +23,10 @@ class ForwardProxy { public readonly connPingIntervalTime: number; protected logger: Logger; - protected _proxyHost: Host; - protected _proxyPort: Port; - protected _egressHost: Host; - protected _egressPort: Port; + protected proxyHost: Host; + protected proxyPort: Port; + protected egressHost: Host; + protected egressPort: Port; protected server: http.Server; protected utpSocket: UTP; protected tlsConfig: TLSConfig; @@ -78,28 +78,26 @@ class ForwardProxy { egressPort?: Port; tlsConfig: TLSConfig; }): Promise { - this._proxyHost = proxyHost; - this._egressHost = egressHost; - this.tlsConfig = tlsConfig; - - let proxyAddress = networkUtils.buildAddress(this._proxyHost, proxyPort); - let egressAddress = networkUtils.buildAddress(this._egressHost, egressPort); + let proxyAddress = networkUtils.buildAddress(proxyHost, proxyPort); + let egressAddress = networkUtils.buildAddress(egressHost, egressPort); this.logger.info( `Starting Forward Proxy from ${proxyAddress} to ${egressAddress}`, ); const utpSocket = UTP({ allowHalfOpen: false }); const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); await utpSocketBind(egressPort, egressHost); - this._egressPort = utpSocket.address().port; + egressPort = utpSocket.address().port; const serverListen = promisify(this.server.listen).bind(this.server); - await serverListen(proxyPort, this._proxyHost); - this._proxyPort = (this.server.address() as AddressInfo).port as Port; - proxyAddress = networkUtils.buildAddress(this._proxyHost, this._proxyPort); - egressAddress = networkUtils.buildAddress( - this._egressHost, - this._egressPort, - ); + await serverListen(proxyPort, proxyHost); + proxyPort = (this.server.address() as AddressInfo).port as Port; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + this.egressHost = egressHost; + this.egressPort = egressPort; this.utpSocket = utpSocket; + this.tlsConfig = tlsConfig; + proxyAddress = networkUtils.buildAddress(proxyHost, proxyPort); + egressAddress = networkUtils.buildAddress(egressHost, egressPort); this.logger.info( `Started Forward Proxy from ${proxyAddress} to ${egressAddress}`, ); @@ -125,30 +123,30 @@ class ForwardProxy { this.logger.info('Stopped Forward Proxy Server'); } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get proxyHost(): Host { - return this._proxyHost; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getProxyHost(): Host { + return this.proxyHost; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get proxyPort(): Port { - return this._proxyPort; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getProxyPort(): Port { + return this.proxyPort; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get egressHost(): Host { - return this._egressHost; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getEgressHost(): Host { + return this.egressHost; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get egressPort(): Port { - return this._egressPort; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getEgressPort(): Port { + return this.egressPort; } - - public setTLSConfig(tlsConfig: TLSConfig): void { - this.tlsConfig = tlsConfig; + public getConnectionCount(): number { + return this.connections.ingress.size; } + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public getConnectionInfoByClient( clientHost: Host, clientPort: Port, @@ -163,13 +161,14 @@ class ForwardProxy { return { nodeId: serverNodeIds[0], certificates: serverCertificates, - egressHost: this._egressHost, - egressPort: this._egressPort, + egressHost: this.egressHost, + egressPort: this.egressPort, ingressHost: conn.host, ingressPort: conn.port, }; } + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public getConnectionInfoByIngress( ingressHost: Host, ingressPort: Port, @@ -184,18 +183,19 @@ class ForwardProxy { return { nodeId: serverNodeIds[0], certificates: serverCertificates, - egressHost: this._egressHost, - egressPort: this._egressPort, + egressHost: this.egressHost, + egressPort: this.egressPort, ingressHost: conn.host, ingressPort: conn.port, }; } - get connectionCount(): number { - return this.connections.ingress.size; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public setTLSConfig(tlsConfig: TLSConfig): void { + this.tlsConfig = tlsConfig; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public async openConnection( nodeId: NodeId, ingressHost: Host, @@ -217,7 +217,7 @@ class ForwardProxy { } } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public async closeConnection( ingressHost: Host, ingressPort: Port, @@ -401,7 +401,7 @@ class ForwardProxy { * Regular HTTP requests are not allowed */ protected handleRequest = ( - request: http.IncomingMessage, + _request: http.IncomingMessage, response: http.ServerResponse, ): void => { response.writeHead(405); diff --git a/src/network/ReverseProxy.ts b/src/network/ReverseProxy.ts index 7244e1483..af443da57 100644 --- a/src/network/ReverseProxy.ts +++ b/src/network/ReverseProxy.ts @@ -19,10 +19,10 @@ class ReverseProxy { public readonly connTimeoutTime: number; protected logger: Logger; - protected _ingressHost: Host; - protected _ingressPort: Port; - protected _serverHost: Host; - protected _serverPort: Port; + protected ingressHost: Host; + protected ingressPort: Port; + protected serverHost: Host; + protected serverPort: Port; protected utpSocket: UTP; protected tlsConfig: TLSConfig; protected connectionLocks: Map = new Map(); @@ -63,26 +63,27 @@ class ReverseProxy { ingressPort?: Port; tlsConfig: TLSConfig; }): Promise { - this._ingressHost = ingressHost; - this.tlsConfig = tlsConfig; let ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); let serverAddress = networkUtils.buildAddress(serverHost, serverPort); this.logger.info( `Starting Reverse Proxy from ${ingressAddress} to ${serverAddress}`, ); - const utpSocket = UTP.createServer(this.handleConnection, { - allowHalfOpen: false, - }); + const utpSocket = UTP.createServer( + { + allowHalfOpen: true, + }, + this.handleConnection + ); const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(ingressPort, this._ingressHost); - this._ingressPort = utpSocket.address().port; - this._serverHost = serverHost; - this._serverPort = serverPort; + await utpSocketListen(ingressPort, ingressHost); + ingressPort = utpSocket.address().port; + this.serverHost = serverHost; + this.serverPort = serverPort; + this.ingressHost = ingressHost; + this.ingressPort = ingressPort; this.utpSocket = utpSocket; - ingressAddress = networkUtils.buildAddress( - this._ingressHost, - this._ingressPort, - ); + this.tlsConfig = tlsConfig; + ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); serverAddress = networkUtils.buildAddress(serverHost, serverPort); this.logger.info( `Started Reverse Proxy from ${ingressAddress} to ${serverAddress}`, @@ -107,31 +108,31 @@ class ReverseProxy { this.logger.info('Stopped Reverse Proxy'); } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get ingressHost(): Host { - return this._ingressHost; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getIngressHost(): Host { + return this.ingressHost; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get ingressPort(): Port { - return this._ingressPort; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getIngressPort(): Port { + return this.ingressPort; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get serverHost(): Host { - return this._serverHost; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getServerHost(): Host { + return this.serverHost; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get serverPort(): Port { - return this._serverPort; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getServerPort(): Port { + return this.serverPort; } - public setTLSConfig(tlsConfig: TLSConfig): void { - this.tlsConfig = tlsConfig; + public getConnectionCount(): number { + return this.connections.egress.size; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public getConnectionInfoByProxy( proxyHost: Host, proxyPort: Port, @@ -148,12 +149,12 @@ class ReverseProxy { certificates: clientCertificates, egressHost: conn.host, egressPort: conn.port, - ingressHost: this._ingressHost, - ingressPort: this._ingressPort, + ingressHost: this.ingressHost, + ingressPort: this.ingressPort, }; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public getConnectionInfoByEgress( egressHost: Host, egressPort: Port, @@ -170,16 +171,17 @@ class ReverseProxy { certificates: clientCertificates, egressHost: conn.host, egressPort: conn.port, - ingressHost: this._ingressHost, - ingressPort: this._ingressPort, + ingressHost: this.ingressHost, + ingressPort: this.ingressPort, }; } - get connectionCount(): number { - return this.connections.egress.size; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public setTLSConfig(tlsConfig: TLSConfig): void { + this.tlsConfig = tlsConfig; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public async openConnection( egressHost: Host, egressPort: Port, @@ -200,7 +202,7 @@ class ReverseProxy { } } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public async closeConnection( egressHost: Host, egressPort: Port, @@ -297,8 +299,8 @@ class ReverseProxy { return conn; } conn = new ConnectionReverse({ - serverHost: this._serverHost, - serverPort: this._serverPort, + serverHost: this.serverHost, + serverPort: this.serverPort, connections: this.connections, utpSocket: this.utpSocket, host: egressHost, diff --git a/src/network/errors.ts b/src/network/errors.ts index 9b03420b4..d017704d2 100644 --- a/src/network/errors.ts +++ b/src/network/errors.ts @@ -1,109 +1,150 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorNetwork extends ErrorPolykey {} -class ErrorForwardProxyNotStarted extends ErrorNetwork {} +class ErrorForwardProxy extends ErrorNetwork {} -class ErrorForwardProxyDestroyed extends ErrorNetwork {} +class ErrorForwardProxyNotRunning extends ErrorForwardProxy { + description = 'ForwardProxy is not running'; + exitCode = sysexits.USAGE; +} -class ErrorForwardProxyInvalidUrl extends ErrorNetwork {} +class ErrorForwardProxyInvalidUrl extends ErrorForwardProxy { + description = 'Invalid target host used for HTTP connect proxy'; + exitCode = sysexits.PROTOCOL; +} -class ErrorForwardProxyMissingNodeId extends ErrorNetwork {} +class ErrorForwardProxyMissingNodeId extends ErrorForwardProxy { + description = 'Node ID query parameter is required for HTTP connect proxy'; + exitCode = sysexits.PROTOCOL; +} -class ErrorForwardProxyAuth extends ErrorNetwork {} +class ErrorForwardProxyAuth extends ErrorForwardProxy { + description = 'Incorrect HTTP connect proxy password'; + exitCode = sysexits.NOPERM; +} -class ErrorReverseProxyNotStarted extends ErrorNetwork {} +class ErrorReverseProxy extends ErrorNetwork {} -class ErrorReverseProxyDestroyed extends ErrorNetwork {} +class ErrorReverseProxyNotRunning extends ErrorReverseProxy { + description = 'ReverseProxy is not running'; + exitCode = sysexits.USAGE; +} class ErrorConnection extends ErrorNetwork {} -class ErrorConnectionMessageParse extends ErrorConnection {} +class ErrorConnectionNotRunning extends ErrorConnection { + description = 'Connection is not running'; + exitCode = sysexits.USAGE; +} -class ErrorConnectionNotStarted extends ErrorConnection {} +class ErrorConnectionComposed extends ErrorConnection { + description = 'Connection is composed'; + exitCode = sysexits.USAGE; +} -// During start error -class ErrorConnectionStart extends ErrorConnection {} +class ErrorConnectionNotComposed extends ErrorConnection { + description = 'Connection is not composed'; + exitCode = sysexits.USAGE; +} -// Start timeout error -class ErrorConnectionStartTimeout extends ErrorConnectionStart {} +class ErrorConnectionMessageParse extends ErrorConnection { + description = 'Network message received is invalid'; + exitCode = sysexits.TEMPFAIL; +} -// During compose error -class ErrorConnectionCompose extends ErrorConnection {} - -// Compose timeout error -class ErrorConnectionComposeTimeout extends ErrorConnectionCompose {} - -// Connection is already composed -class ErrorConnectionComposed extends ErrorConnection {} - -// Not yet composed, cannot answer certain things -class ErrorConnectionNotComposed extends ErrorConnection {} - -// Was not able to keep alive -class ErrorConnectionTimeout extends ErrorConnection {} - -/** - * Certificate verification errors - */ -class ErrorCertChain extends ErrorNetwork {} - -/** - * When the certificate chain is empty - */ -class ErrorCertChainEmpty extends ErrorCertChain {} +class ErrorConnectionTimeout extends ErrorConnection { + description = 'Connection keep-alive timed out'; + exitCode = sysexits.UNAVAILABLE; +} /** - * The target node id is not claimed by any certificate + * Used by ConnectionForward */ -class ErrorCertChainUnclaimed extends ErrorCertChain {} +class ErrorConnectionStart extends ErrorConnection { + description = 'Connection start failed'; + exitCode = sysexits.PROTOCOL; +} -/** - * If the signature chain is broken - */ -class ErrorCertChainBroken extends ErrorCertChain {} +class ErrorConnectionStartTimeout extends ErrorConnectionStart { + description = 'Connection start timed out'; + exitCode = sysexits.NOHOST; +} /** - * Certificate in the chain was expired + * Used by ConnectionReverse */ -class ErrorCertChainDateInvalid extends ErrorCertChain {} +class ErrorConnectionCompose extends ErrorConnection { + description = 'Connection compose failed'; + exitCode = sysexits.PROTOCOL; +} -/** - * Certificate is missing the common name - */ -class ErrorCertChainNameInvalid extends ErrorCertChain {} +class ErrorConnectionComposeTimeout extends ErrorConnectionCompose { + description = 'Connection compose timed out'; + exitCode = sysexits.NOHOST; +} /** - * Certificate public key doesn't generate the node id + * Used for certificate verification */ -class ErrorCertChainKeyInvalid extends ErrorCertChain {} +class ErrorCertChain extends ErrorNetwork {} -/** - * Certificate self-signed signature is invalid - */ -class ErrorCertChainSignatureInvalid extends ErrorCertChain {} +class ErrorCertChainEmpty extends ErrorCertChain { + description = 'Certificate chain is empty'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainUnclaimed extends ErrorCertChain { + description = 'The target node id is not claimed by any certificate'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainBroken extends ErrorCertChain { + description = 'The signature chain is broken'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainDateInvalid extends ErrorCertChain { + description = 'Certificate in the chain is expired'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainNameInvalid extends ErrorCertChain { + description = 'Certificate is missing the common name'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainKeyInvalid extends ErrorCertChain { + description = 'Certificate public key does not generate the Node ID'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainSignatureInvalid extends ErrorCertChain { + description = 'Certificate self-signed signature is invalid'; + exitCode = sysexits.PROTOCOL; +} class ErrorHostnameResolutionFailed extends ErrorNetwork {} export { ErrorNetwork, - ErrorForwardProxyNotStarted, - ErrorForwardProxyDestroyed, + ErrorForwardProxy, + ErrorForwardProxyNotRunning, ErrorForwardProxyInvalidUrl, ErrorForwardProxyMissingNodeId, ErrorForwardProxyAuth, - ErrorReverseProxyNotStarted, - ErrorReverseProxyDestroyed, + ErrorReverseProxy, + ErrorReverseProxyNotRunning, ErrorConnection, + ErrorConnectionNotRunning, + ErrorConnectionComposed, + ErrorConnectionNotComposed, ErrorConnectionMessageParse, - ErrorConnectionNotStarted, + ErrorConnectionTimeout, ErrorConnectionStart, ErrorConnectionStartTimeout, ErrorConnectionCompose, ErrorConnectionComposeTimeout, - ErrorConnectionComposed, - ErrorConnectionNotComposed, - ErrorConnectionTimeout, ErrorCertChain, ErrorCertChainEmpty, ErrorCertChainUnclaimed, diff --git a/src/nodes/NodeConnection.ts b/src/nodes/NodeConnection.ts index d4c9b61b8..fd5ac92b5 100644 --- a/src/nodes/NodeConnection.ts +++ b/src/nodes/NodeConnection.ts @@ -79,9 +79,9 @@ class NodeConnection { seedConnections?: Map; }): Promise { logger.info(`Creating ${this.name}`); - const proxyConfig: ProxyConfig = { - host: forwardProxy.proxyHost, - port: forwardProxy.proxyPort, + const proxyConfig = { + host: forwardProxy.getProxyHost(), + port: forwardProxy.getProxyPort(), authToken: forwardProxy.authToken, }; const nodeConnection = new NodeConnection({ @@ -146,8 +146,8 @@ class NodeConnection { this.logger.info(`Starting ${this.constructor.name}`); // 1. Get the egress port of the fwdProxy (used for hole punching) const egressAddress = networkUtils.buildAddress( - this.fwdProxy.egressHost, - this.fwdProxy.egressPort, + this.fwdProxy.getEgressHost(), + this.fwdProxy.getEgressPort(), ); // Also need to sign this for authentication (i.e. from expected source) const signature = await this.keyManager.signWithRootKeyPair( diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index f00994697..55f56f6e8 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -42,13 +42,11 @@ class NodeManager { protected db: DB; protected logger: Logger; protected lock: Mutex = new Mutex(); - protected nodeGraph: NodeGraph; protected sigchain: Sigchain; protected keyManager: KeyManager; protected fwdProxy: ForwardProxy; protected revProxy: ReverseProxy; - // Active connections to other nodes protected connections: NodeConnectionMap = new Map(); // Node ID -> node address mappings for the seed nodes @@ -125,20 +123,26 @@ class NodeManager { }: { fresh?: boolean; } = {}) { - this.logger.info(`Starting ${this.constructor.name}`); - // Instantiate the node graph (containing Kademlia implementation) - this.nodeGraph = await NodeGraph.createNodeGraph({ - db: this.db, - nodeManager: this, - logger: this.logger, - fresh, - }); - // Add the seed nodes to the NodeGraph - for (const id in this.seedNodes) { - const seedNodeId = id as NodeId; - await this.nodeGraph.setNode(seedNodeId, this.seedNodes[seedNodeId]); + try { + this.logger.info(`Starting ${this.constructor.name}`); + // Instantiate the node graph (containing Kademlia implementation) + this.nodeGraph = await NodeGraph.createNodeGraph({ + db: this.db, + nodeManager: this, + logger: this.logger, + fresh, + }); + // Add the seed nodes to the NodeGraph + for (const id in this.seedNodes) { + const seedNodeId = id as NodeId; + await this.nodeGraph.setNode(seedNodeId, this.seedNodes[seedNodeId]); + } + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.stop(); + throw e; } - this.logger.info(`Started ${this.constructor.name}`); } public async stop() { diff --git a/src/schema/Schema.ts b/src/schema/Schema.ts index 82b8da2c5..dbb2ba217 100644 --- a/src/schema/Schema.ts +++ b/src/schema/Schema.ts @@ -51,20 +51,20 @@ class Schema { public constructor({ statePath, - stateVersion, - lock, - fs, + stateVersion = config.stateVersion as StateVersion, + lock = new utils.RWLock(), + fs = require('fs'), logger, }: { statePath: string; - stateVersion: StateVersion; - lock: utils.RWLock; - fs: FileSystem; - logger: Logger; + stateVersion?: StateVersion; + lock?: utils.RWLock; + fs?: FileSystem; + logger?: Logger; }) { - this.logger = logger; + this.logger = logger ?? new Logger(this.constructor.name); this.statePath = statePath; - this.stateVersionPath = path.join(statePath, 'version'); + this.stateVersionPath = path.join(statePath, config.defaults.stateVersionBase); this.stateVersion = stateVersion; this.lock = lock; this.fs = fs; diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 0c86446ec..9ed2f3869 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -140,30 +140,36 @@ class VaultManager { public async start({ fresh = false, }: { fresh?: boolean } = {}): Promise { - this.logger.info(`Starting ${this.constructor.name}`); - this.vaultsDbDomain = 'VaultManager'; - this.vaultsDb = await this.db.level(this.vaultsDbDomain); - this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; - this.vaultsNamesDb = await this.db.level( - this.vaultsNamesDbDomain[1], - this.vaultsDb, - ); - if (fresh) { - await this.vaultsDb.clear(); - await this.fs.promises.rm(this.vaultsPath, { - force: true, - recursive: true, + try { + this.logger.info(`Starting ${this.constructor.name}`); + this.vaultsDbDomain = 'VaultManager'; + this.vaultsDb = await this.db.level(this.vaultsDbDomain); + this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; + this.vaultsNamesDb = await this.db.level( + this.vaultsNamesDbDomain[1], + this.vaultsDb, + ); + if (fresh) { + await this.vaultsDb.clear(); + await this.fs.promises.rm(this.vaultsPath, { + force: true, + recursive: true, + }); + this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + } + await utils.mkdirExists(this.fs, this.vaultsPath); + this.efs = await EncryptedFS.createEncryptedFS({ + dbPath: this.vaultsPath, + dbKey: this.vaultsKey, + logger: this.logger, }); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + await this.efs.start(); + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.stop(); + throw e; } - await utils.mkdirExists(this.fs, this.vaultsPath); - this.efs = await EncryptedFS.createEncryptedFS({ - dbPath: this.vaultsPath, - dbKey: this.vaultsKey, - logger: this.logger, - }); - await this.efs.start(); - this.logger.info(`Started ${this.constructor.name}`); } public async stop(): Promise { diff --git a/tests/PolykeyAgent.test.ts b/tests/PolykeyAgent.test.ts index 5fd643a08..4693faec7 100644 --- a/tests/PolykeyAgent.test.ts +++ b/tests/PolykeyAgent.test.ts @@ -1,190 +1,178 @@ +import type { StateVersion } from '@/schema/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import config from '@/config'; +import { utils as keysUtils } from '@/keys'; import { Status } from '@/status'; -import * as schemaErrors from '@/schema/errors'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import { Schema } from '@/schema'; +import * as errors from '@/errors'; +import config from '@/config'; +import * as testUtils from './utils'; -describe('Polykey', () => { +describe('PolykeyAgent', () => { const password = 'password'; const logger = new Logger('PolykeyAgent Test', LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let dataDir: string; - let pk: PolykeyAgent; beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); }); afterEach(async () => { - if (pk != null) { - await pk.stop(); - await pk.destroy(); - } await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test( - 'Able to construct', - async () => { - const nodePath = path.join(dataDir, 'polykey'); - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - expect(pk).toBeInstanceOf(PolykeyAgent); - }, - global.polykeyStartupTimeout, - ); - test( - 'async start constructs node path', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - const nodePathContents = await fs.promises.readdir( - path.join(nodePath, 'state'), - ); - expect(nodePathContents).toContain('keys'); - expect(nodePathContents).toContain('vaults'); - expect(nodePathContents).toContain('db'); - await pk.stop(); - }, - global.polykeyStartupTimeout, - ); - test( - 'async stop leaves the node path', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - await pk.stop(); - const nodePathContents = await fs.promises.readdir( - path.join(nodePath, 'state'), - ); - expect(nodePathContents).toContain('keys'); - expect(nodePathContents).toContain('db'); - expect(nodePathContents).toContain('vaults'); - }, - global.polykeyStartupTimeout, - ); - test( - 'able to async start after async stop', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - await pk.stop(); - await expect(pk.start({ password })).resolves.not.toThrowError(); - }, - global.polykeyStartupTimeout * 2, - ); - test('GithubProvider is registered', async () => { - const providerId = 'github.com'; + test('PolykeyAgent readiness', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + await expect(pkAgent.destroy()).rejects.toThrow( + errors.ErrorPolykeyAgentRunning, + ); + // Should be a noop + await pkAgent.start({ password }); + await pkAgent.stop(); + await pkAgent.destroy(); + await expect(pkAgent.start({ password })).rejects.toThrow( + errors.ErrorPolykeyAgentDestroyed, + ); + }); + test('start creates, stop leaves, and destroy destroys the node path', async () => { const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ + const pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, logger, }); - const providers = pk.identitiesManager.getProviders(); - // Exists - expect(providers[providerId]).toBeTruthy(); - // Matches clientID in config. - expect(providers[providerId].clientId).toEqual( - config.providers[providerId].clientId, + let nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + let stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase) + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + await pkAgent.stop(); + nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase) ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + await pkAgent.destroy(); + nodePathContents = await fs.promises.readdir(nodePath); + // The status will be the only file left over + expect(nodePathContents).toHaveLength(1); + expect(nodePathContents).toContain(config.defaults.statusBase); + }); + test('start after stop', async () => { + const nodePath = `${dataDir}/polykey`; + const statusPath = path.join(nodePath, config.defaults.statusBase); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + const status = new Status({ + statusPath, + fs, + logger, + }); + await pkAgent.stop(); + expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + await expect(pkAgent.start({ password })).resolves.not.toThrowError(); + expect(await status.readStatus()).toMatchObject({status: 'LIVE'}); + await pkAgent.stop(); + expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + await expect(pkAgent.start({ password: 'wrong password' })).rejects.toThrowError( + errors.ErrorRootKeysParse + ); + expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + await pkAgent.destroy(); + expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + }); + test('schema state version is maintained after start and stop', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const statePath = path.join( + nodePath, + config.defaults.stateBase, + ); + const schema = new Schema({ + statePath + }); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + expect(await schema.readVersion()).toBe(config.stateVersion); + await pkAgent.stop(); + // Still exists after being stopped + expect(await schema.readVersion()).toBe(config.stateVersion); + }); + test('cannot start during state version mismatch', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const statePath = path.join( + nodePath, + config.defaults.stateBase, + ); + await fs.promises.mkdir(nodePath); + let schema = await Schema.createSchema({ + statePath, + stateVersion: config.stateVersion + 1 as StateVersion, + logger, + fresh: true + }); + schema.stop(); + await expect(PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + })).rejects.toThrow(errors.ErrorSchemaVersionTooNew); + // The 0 version will always be too old + // Because we started our PK's state version as 1 + schema = await Schema.createSchema({ + statePath, + stateVersion: 0 as StateVersion, + logger, + fresh: true + }); + schema.stop(); + await expect(PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + })).rejects.toThrow(errors.ErrorSchemaVersionTooOld); }); - test( - 'throw error if state version does not match config', - async () => { - // Creating an old version file. - const nodePath = path.join(dataDir, 'polykey'); - const versionFilePath = path.join(nodePath, 'state', 'version'); - const versionInfo = { ...config }; // Cheeky clone - versionInfo.stateVersion = config.stateVersion + 1; - const versionInfoString = JSON.stringify(versionInfo); - await fs.promises.mkdir(path.join(nodePath, 'state'), { - recursive: true, - }); - await fs.promises.writeFile(versionFilePath, versionInfoString); - - // Attempt to start a polykeyAgent. - await expect(async () => { - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - }).rejects.toThrow(schemaErrors.ErrorSchemaVersionParse); - }, - global.polykeyStartupTimeout, - ); - test( - 'Creates the version file when starting Polykey', - async () => { - // Creating an old version file. - const nodePath = path.join(dataDir, 'polykey'); - const versionFilePath = path.join(nodePath, 'state', 'version'); - - // Attempt to start a polykeyAgent. - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - await pk.stop(); - - const versionFileContents = await fs.promises.readFile(versionFilePath); - const versionInfo = JSON.parse(versionFileContents.toString()); - expect(versionInfo).toStrictEqual(config.stateVersion); - }, - global.polykeyStartupTimeout, - ); - test( - 'Stopping and destroying properly stops Polykey', - async () => { - // Starting. - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - await pk.stop(); - await status.waitFor('DEAD', 2000); - await pk.destroy(); - await status.waitFor('DEAD', 2000); - }, - global.polykeyStartupTimeout * 2, - ); }); diff --git a/tests/PolykeyClient.test.ts b/tests/PolykeyClient.test.ts index 8f97b80aa..c4c3a8ad3 100644 --- a/tests/PolykeyClient.test.ts +++ b/tests/PolykeyClient.test.ts @@ -5,26 +5,28 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyClient, PolykeyAgent } from '@'; import { Session } from '@/sessions'; -import * as keysUtils from '@/keys/utils'; +import { utils as keysUtils } from '@/keys'; import config from '@/config'; import * as testUtils from './utils'; -jest - .spyOn(keysUtils, 'generateKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); -jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); - describe('PolykeyClient', () => { const password = 'password'; const logger = new Logger('PolykeyClient Test', LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let dataDir: string; let nodePath: string; let pkAgent: PolykeyAgent; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -41,6 +43,8 @@ describe('PolykeyClient', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('create PolykeyClient and connect to PolykeyAgent', async () => { const pkClient = await PolykeyClient.createPolykeyClient({ diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index 117babd30..6e1324ced 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -9,9 +9,8 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { utils as idUtils } from '@matrixai/id'; import { ACL, errors as aclErrors } from '@/acl'; -import { makeVaultId } from '@/vaults/utils'; -import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; +import { utils as keysUtils } from '@/keys'; +import { utils as vaultsUtils } from '@/vaults'; describe('ACL', () => { const logger = new Logger(`${ACL.name} Test`, LogLevel.WARN, [ @@ -23,7 +22,6 @@ describe('ACL', () => { let vaultId2: VaultId; let vaultId3: VaultId; let vaultId4: VaultId; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -33,12 +31,18 @@ describe('ACL', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(dbKey), + crypto: { + key: dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); - vaultId1 = makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); - vaultId2 = makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); - vaultId3 = makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); - vaultId4 = makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); + vaultId1 = vaultsUtils.makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); + vaultId2 = vaultsUtils.makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); + vaultId3 = vaultsUtils.makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); + vaultId4 = vaultsUtils.makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); }); afterEach(async () => { await db.stop(); diff --git a/tests/acl/utils.test.ts b/tests/acl/utils.test.ts index 928ab2a72..c001ac695 100644 --- a/tests/acl/utils.test.ts +++ b/tests/acl/utils.test.ts @@ -1,6 +1,6 @@ import * as aclUtils from '@/acl/utils'; -describe('utils', () => { +describe('acl/utils', () => { test('merging permissions', async () => { const perm1 = { gestalt: { diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 5fb406665..ca2572ca3 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -2,12 +2,12 @@ import type * as grpc from '@grpc/grpc-js'; import type { NodeAddress, NodeInfo } from '@/nodes/types'; import type { ClaimIdString, ClaimIntermediary } from '@/claims/types'; import type { Host, Port, TLSConfig } from '@/network/types'; -import type { GRPCClientAgent } from '@/agent'; import fs from 'fs'; import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Mutex } from 'async-mutex'; +import { GRPCClientAgent } from '@/agent'; import { DB } from '@matrixai/db'; import { KeyManager } from '@/keys'; import { NodeManager } from '@/nodes'; @@ -28,32 +28,37 @@ import * as testAgentUtils from './utils'; import * as testUtils from '../utils'; import TestNodeConnection from '../nodes/TestNodeConnection'; -jest - .spyOn(keysUtils, 'generateKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); -jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); - -describe('GRPC agent', () => { +describe('GRPCClientAgent', () => { const password = 'password'; - const logger = new Logger('AgentServerTest', LogLevel.WARN, [ + const logger = new Logger(`${GRPCClientAgent.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); const node1: NodeInfo = { id: makeNodeId('v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug'), chain: {}, }; - + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let client: GRPCClientAgent; let server: grpc.Server; let port: number; - let dataDir: string; let keysPath: string; let vaultsPath: string; let dbPath: string; - let keyManager: KeyManager; let vaultManager: VaultManager; let nodeManager: NodeManager; @@ -62,10 +67,8 @@ describe('GRPC agent', () => { let gestaltGraph: GestaltGraph; let db: DB; let notificationsManager: NotificationsManager; - let fwdProxy: ForwardProxy; let revProxy: ReverseProxy; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -73,14 +76,12 @@ describe('GRPC agent', () => { keysPath = path.join(dataDir, 'keys'); vaultsPath = path.join(dataDir, 'vaults'); dbPath = path.join(dataDir, 'db'); - keyManager = await KeyManager.createKeyManager({ password, keysPath, fs: fs, logger: logger, }); - const tlsConfig: TLSConfig = { keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -92,11 +93,9 @@ describe('GRPC agent', () => { await fwdProxy.start({ tlsConfig, }); - revProxy = new ReverseProxy({ logger: logger, }); - db = await DB.createDB({ dbPath: dbPath, fs: fs, @@ -106,27 +105,23 @@ describe('GRPC agent', () => { ops: { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, - } + }, }, }); - acl = await ACL.createACL({ db: db, logger: logger, }); - gestaltGraph = await GestaltGraph.createGestaltGraph({ db: db, acl: acl, logger: logger, }); - sigchain = await Sigchain.createSigchain({ keyManager: keyManager, db: db, logger: logger, }); - nodeManager = await NodeManager.createNodeManager({ db: db, sigchain: sigchain, @@ -135,7 +130,6 @@ describe('GRPC agent', () => { revProxy: revProxy, logger: logger, }); - notificationsManager = await NotificationsManager.createNotificationsManager({ acl: acl, @@ -145,7 +139,6 @@ describe('GRPC agent', () => { messageCap: 5, logger: logger, }); - vaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath: vaultsPath, @@ -157,7 +150,6 @@ describe('GRPC agent', () => { fs: fs, logger: logger, }); - await nodeManager.start(); [server, port] = await testAgentUtils.openTestAgentServer({ keyManager, @@ -171,7 +163,6 @@ describe('GRPC agent', () => { afterEach(async () => { await testAgentUtils.closeTestAgentClient(client); await testAgentUtils.closeTestAgentServer(server); - await vaultManager.stop(); await notificationsManager.stop(); await sigchain.stop(); @@ -181,13 +172,11 @@ describe('GRPC agent', () => { await fwdProxy.stop(); await db.stop(); await keyManager.stop(); - await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test('GRPCClientAgent readiness', async () => { await client.destroy(); await expect(async () => { diff --git a/tests/bin/agent/lock.test.ts b/tests/bin/agent/lock.test.ts index 74475b138..d1cd9a1cb 100644 --- a/tests/bin/agent/lock.test.ts +++ b/tests/bin/agent/lock.test.ts @@ -7,21 +7,25 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Session } from '@/sessions'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); describe('lock', () => { const logger = new Logger('lock test', LogLevel.WARN, [new StreamHandler()]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ + globalAgentDir, + globalAgentPassword, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -39,22 +43,22 @@ describe('lock', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const { exitCode } = await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); const session = await Session.createSession({ sessionTokenPath: path.join( - global.binAgentDir, + globalAgentDir, config.defaults.tokenBase, ), fs, @@ -64,7 +68,7 @@ describe('lock', () => { await session.stop(); }); test('lock ensures reauthentication is required', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; mockedPrompts.mockClear(); mockedPrompts.mockImplementation(async (_opts: any) => { return { password }; @@ -72,26 +76,26 @@ describe('lock', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); // Session token is deleted await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Will prompt to reauthenticate await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Prompted for password 1 time expect(mockedPrompts.mock.calls.length).toBe(1); diff --git a/tests/bin/agent/lockall.test.ts b/tests/bin/agent/lockall.test.ts index e6a5a3814..8c307ba2b 100644 --- a/tests/bin/agent/lockall.test.ts +++ b/tests/bin/agent/lockall.test.ts @@ -8,6 +8,7 @@ import { Session } from '@/sessions'; import config from '@/config'; import * as clientErrors from '@/client/errors'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; /** * Mock prompts module which is used prompt for password @@ -19,12 +20,18 @@ describe('lockall', () => { const logger = new Logger('lockall test', LogLevel.WARN, [ new StreamHandler(), ]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ + globalAgentDir, + globalAgentPassword, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -42,22 +49,22 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const { exitCode } = await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); const session = await Session.createSession({ sessionTokenPath: path.join( - global.binAgentDir, + globalAgentDir, config.defaults.tokenBase, ), fs, @@ -67,21 +74,21 @@ describe('lockall', () => { await session.stop(); }); test('lockall ensures reauthentication is required', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Token is deleted, reauthentication is required mockedPrompts.mockClear(); @@ -91,9 +98,9 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Prompted for password 1 time expect(mockedPrompts.mock.calls.length).toBe(1); @@ -103,14 +110,14 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const session = await Session.createSession({ sessionTokenPath: path.join( - global.binAgentDir, + globalAgentDir, config.defaults.tokenBase, ), fs, @@ -121,19 +128,19 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); // Old token is invalid const { exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_TOKEN: token, }, - global.binAgentDir, + globalAgentDir, ); testBinUtils.expectProcessError( exitCode, diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index 9a4ffd272..2950d0f34 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -146,7 +146,7 @@ describe('start', () => { global.defaultTimeout * 2, ); test( - 'concurrent starts are coalesced', + 'concurrent starts results in 1 success', async () => { const password = 'abc123'; // One of these processes is blocked @@ -220,7 +220,7 @@ describe('start', () => { global.defaultTimeout * 2, ); test( - 'concurrent bootstrap is coalesced', + 'concurrent with bootstrap results in 1 success', async () => { const password = 'abc123'; // One of these processes is blocked diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index 84782cdb7..8cf706ed5 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -6,6 +6,7 @@ import { Status } from '@/status'; import * as binErrors from '@/bin/errors'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('status', () => { const logger = new Logger('status test', LogLevel.WARN, [ @@ -70,8 +71,9 @@ describe('status', () => { dataDir, )); expect(exitCode).toBe(0); + // If the command was slow, it may have become DEAD already expect(JSON.parse(stdout)).toMatchObject({ - status: 'STOPPING', + status: expect.stringMatching(/STOPPING|DEAD/), pid: agentProcess.pid, }); await testBinUtils.processExit(agentProcess); @@ -104,16 +106,22 @@ describe('status', () => { ); }); describe('status with global agent', () => { - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ + globalAgentDir, + globalAgentPassword, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); test('status on LIVE agent', async () => { const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), fs, logger, }); @@ -121,10 +129,10 @@ describe('status', () => { const { exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json', '--verbose'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ @@ -148,9 +156,9 @@ describe('status', () => { }); test('status on remote LIVE agent', async () => { const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, global.binAgentPassword); + await fs.promises.writeFile(passwordPath, globalAgentPassword); const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), fs, logger, }); diff --git a/tests/bin/agent/unlock.test.ts b/tests/bin/agent/unlock.test.ts index 3cabdcd3e..31bf58617 100644 --- a/tests/bin/agent/unlock.test.ts +++ b/tests/bin/agent/unlock.test.ts @@ -5,15 +5,22 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Session } from '@/sessions'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('unlock', () => { - const logger = new Logger('lock test', LogLevel.WARN, [new StreamHandler()]); - let pkAgentClose; + const logger = new Logger('unlock test', LogLevel.WARN, [new StreamHandler()]); + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ + globalAgentDir, + globalAgentPassword, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -31,7 +38,7 @@ describe('unlock', () => { // Fresh session, to delete the token const session = await Session.createSession({ sessionTokenPath: path.join( - global.binAgentDir, + globalAgentDir, config.defaults.tokenBase, ), fs, @@ -42,19 +49,19 @@ describe('unlock', () => { ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); // Run command without password ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ status: 'LIVE' }); @@ -62,10 +69,10 @@ describe('unlock', () => { ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_TOKEN: await session.readToken(), }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ status: 'LIVE' }); diff --git a/tests/bin/bootstrap.test.ts b/tests/bin/bootstrap.test.ts index 9b557f794..c108d0345 100644 --- a/tests/bin/bootstrap.test.ts +++ b/tests/bin/bootstrap.test.ts @@ -112,7 +112,7 @@ describe('bootstrap', () => { global.defaultTimeout * 2, ); test( - 'concurrent bootstrapping are coalesced', + 'concurrent bootstrapping results in 1 success', async () => { const password = 'password'; const [bootstrapProcess1, bootstrapProcess2] = await Promise.all([ diff --git a/tests/bin/identities.test.ts b/tests/bin/identities/identities.test.ts similarity index 89% rename from tests/bin/identities.test.ts rename to tests/bin/identities/identities.test.ts index 042c7039f..d6d47c2e7 100644 --- a/tests/bin/identities.test.ts +++ b/tests/bin/identities/identities.test.ts @@ -2,7 +2,7 @@ import type { IdentityId, IdentityInfo, ProviderId, -} from '../../src/identities/types'; +} from '@/identities/types'; import type { NodeInfo } from '@/nodes/types'; import type { ClaimLinkIdentity, ClaimLinkNode } from '@/claims/types'; import os from 'os'; @@ -13,14 +13,9 @@ import { PolykeyAgent } from '@'; import { makeNodeId } from '@/nodes/utils'; import * as claimsUtils from '@/claims/utils'; import * as identitiesUtils from '@/identities/utils'; -import * as testUtils from './utils'; -import * as utils from './utils'; -import { - addRemoteDetails, - cleanupRemoteKeynode, - setupRemoteKeynode, -} from '../utils'; -import TestProvider from '../identities/TestProvider'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; +import TestProvider from '../../identities/TestProvider'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -132,7 +127,7 @@ describe('CLI Identities', () => { .mockImplementation(() => {}); // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], {}, dataDir, @@ -163,7 +158,7 @@ describe('CLI Identities', () => { describe('commandAllowGestalts', () => { test('Should allow permissions on node.', async () => { const commands = genCommands(['allow', node1.id, 'notify']); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( node1.id, @@ -172,7 +167,7 @@ describe('CLI Identities', () => { expect(actionKeys).toContain('notify'); const command2 = genCommands(['allow', node1.id, 'scan']); - const result2 = await testUtils.pkStdio(command2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(command2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeeds. const actions2 = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -184,7 +179,7 @@ describe('CLI Identities', () => { // Should fail for invalid action. const command3 = genCommands(['allow', node1.id, 'invalid']); - const result3 = await testUtils.pkStdio(command3, {}, dataDir); + const result3 = await testBinUtils.pkStdio(command3, {}, dataDir); expect(result3.exitCode).toBe(1); // Should fail. }); test('Should allow permissions on Identity.', async () => { @@ -193,7 +188,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'notify', ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -209,7 +204,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'scan', ]); - const result2 = await testUtils.pkStdio(command2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(command2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeedes. const actions2 = @@ -227,13 +222,13 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'invalid', ]); - const result3 = await testUtils.pkStdio(command3, {}, dataDir); + const result3 = await testBinUtils.pkStdio(command3, {}, dataDir); expect(result3.exitCode).toBe(1); // Should fail. }); test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['allow', invaldNode.id, 'scan']), {}, dataDir, @@ -241,7 +236,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'allow', identityString( @@ -256,7 +251,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid permission. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['allow', invaldNode.id, 'invalidPermission']), {}, dataDir, @@ -274,7 +269,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['disallow', node1.id, 'notify']); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -302,7 +297,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'scan', ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -317,7 +312,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['disallow', invaldNode.id, 'scan']), {}, dataDir, @@ -325,7 +320,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'disallow', identityString( @@ -340,7 +335,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid permission. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['disallow', node1.id, 'invalidPermission']), {}, dataDir, @@ -358,7 +353,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['permissions', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Print result. expect(result.stdout).toContain('notify'); @@ -381,7 +376,7 @@ describe('CLI Identities', () => { 'permissions', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Print result. expect(result.stdout).toContain('scan'); @@ -391,7 +386,7 @@ describe('CLI Identities', () => { describe('commandTrustGestalts', () => { test('Should set trust on Node.', async () => { const commands = genCommands(['trust', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -405,7 +400,7 @@ describe('CLI Identities', () => { 'trust', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -419,7 +414,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['trust', invaldNode.id]), {}, dataDir, @@ -427,7 +422,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'trust', identityString( @@ -451,7 +446,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['untrust', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -478,7 +473,7 @@ describe('CLI Identities', () => { 'untrust', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -493,7 +488,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['trust', invaldNode.id]), {}, dataDir, @@ -501,7 +496,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'untrust', identityString( @@ -531,7 +526,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Unauthenticate identity await polykeyAgent.identitiesManager.delToken( @@ -551,7 +546,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode === 0).toBeFalsy(); // Fails.. }); }); @@ -566,7 +561,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('randomtestcode'); // Unauthenticate identity @@ -579,7 +574,7 @@ describe('CLI Identities', () => { describe('commandGetGestalts', () => { test('Should list gestalt by Node', async () => { const commands = ['identities', 'get', '-np', nodePath, node1.id]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain(node1.id); expect(result.stdout).toContain(identity1.providerId); @@ -593,7 +588,7 @@ describe('CLI Identities', () => { nodePath, identityString(identity1.providerId, identity1.identityId), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain(node1.id); expect(result.stdout).toContain(identity1.providerId); @@ -610,7 +605,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node2.id, 'scan'); const commands = ['identities', 'list', '-np', nodePath]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('notify'); expect(result.stdout).toContain('scan'); @@ -628,7 +623,7 @@ describe('CLI Identities', () => { '--format', 'json', ]; - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeeds. expect(result2.stdout).toContain('notify'); expect(result2.stdout).toContain('scan'); @@ -654,7 +649,7 @@ describe('CLI Identities', () => { nodePath, testToken.providerId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain(testToken.providerId); expect(result.stdout).toContain(testToken.identityId); @@ -670,17 +665,16 @@ describe('CLI Identities', () => { beforeAll(async () => { // Setup the remote gestalt state here // Setting up remote nodes. - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); + nodeB = await testUtils.setupRemoteKeynode({ logger }); + nodeC = await testUtils.setupRemoteKeynode({ logger }); // Forming links // B->C // Adding connection details. - await addRemoteDetails(polykeyAgent, nodeB); - await addRemoteDetails(nodeB, polykeyAgent); - await addRemoteDetails(nodeB, nodeC); - await addRemoteDetails(nodeC, nodeB); - + await testUtils.addRemoteDetails(polykeyAgent, nodeB); + await testUtils.addRemoteDetails(nodeB, polykeyAgent); + await testUtils.addRemoteDetails(nodeB, nodeC); + await testUtils.addRemoteDetails(nodeC, nodeB); // Adding sigchain details. const claimBtoC: ClaimLinkNode = { type: 'node', @@ -714,8 +708,8 @@ describe('CLI Identities', () => { }, global.polykeyStartupTimeout * 2); afterAll(async () => { // Clean up the remote gestalt state here. - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); + await testUtils.cleanupRemoteKeynode(nodeB); + await testUtils.cleanupRemoteKeynode(nodeC); // Unclaim identity testProvider.links = {}; testProvider.linkIdCounter = 0; @@ -741,7 +735,7 @@ describe('CLI Identities', () => { nodePath, nodeB.nodeManager.getNodeId(), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // We expect to find a gestalt now. @@ -764,7 +758,6 @@ describe('CLI Identities', () => { identityId, testToken.tokenData, ); - const commands = [ 'identities', 'discover', @@ -772,9 +765,8 @@ describe('CLI Identities', () => { nodePath, identityString(testProvider.id, identityId), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); - // We expect to find a gestalt now. const gestalt = await polykeyAgent.gestaltGraph.getGestalts(); expect(gestalt.length).not.toBe(0); diff --git a/tests/bin/keys.test.ts b/tests/bin/keys/keys.test.ts similarity index 87% rename from tests/bin/keys.test.ts rename to tests/bin/keys/keys.test.ts index 17ab838e5..e65cd75aa 100644 --- a/tests/bin/keys.test.ts +++ b/tests/bin/keys/keys.test.ts @@ -3,7 +3,7 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -46,7 +46,7 @@ describe('CLI keys', () => { }); beforeEach(async () => { // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], {}, dataDir, @@ -56,24 +56,24 @@ describe('CLI keys', () => { describe('commandCertChain', () => { test('should get the certificate chain', async () => { command = ['keys', 'certchain', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); describe('commandGetCert', () => { test('should get the certificate', async () => { command = ['keys', 'cert', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); describe('commandGetRootKeypair', () => { test('should get the root keypair', async () => { command = ['keys', 'root', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const result2 = await utils.pkStdio([...command, '-pk'], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command, '-pk'], {}, dataDir); expect(result2.exitCode).toBe(0); }); }); @@ -84,7 +84,7 @@ describe('CLI keys', () => { encoding: 'binary', }); command = ['keys', 'encrypt', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -97,7 +97,7 @@ describe('CLI keys', () => { ); await fs.promises.writeFile(dataPath, encrypted, { encoding: 'binary' }); command = ['keys', 'decrypt', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -108,7 +108,7 @@ describe('CLI keys', () => { command = ['keys', 'sign', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -126,7 +126,7 @@ describe('CLI keys', () => { command = ['keys', 'verify', '-np', nodePath, dataPath, signatureTrue]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -143,7 +143,7 @@ describe('CLI keys', () => { command = ['keys', 'renew', '-np', nodePath, '-pnf', passPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const rootKeypairNew = polykeyAgent.keyManager.getRootKeyPair(); @@ -172,7 +172,7 @@ describe('CLI keys', () => { command = ['keys', 'reset', '-np', nodePath, '-pnf', passPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const rootKeypairNew = polykeyAgent.keyManager.getRootKeyPair(); @@ -200,7 +200,7 @@ describe('CLI keys', () => { command = ['keys', 'password', '-np', nodePath, '-pnf', passPath]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); await polykeyAgent.stop(); diff --git a/tests/bin/nodes.test.ts b/tests/bin/nodes/nodes.test.ts similarity index 83% rename from tests/bin/nodes.test.ts rename to tests/bin/nodes/nodes.test.ts index 5b42f9b64..d345d5528 100644 --- a/tests/bin/nodes.test.ts +++ b/tests/bin/nodes/nodes.test.ts @@ -6,8 +6,8 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { makeNodeId } from '@/nodes/utils'; -import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -66,26 +66,26 @@ describe('CLI Nodes', () => { keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode - remoteOnline = await testKeynodeUtils.setupRemoteKeynode({ + remoteOnline = await testUtils.setupRemoteKeynode({ logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); - remoteOnlineHost = remoteOnline.revProxy.ingressHost; - remoteOnlinePort = remoteOnline.revProxy.ingressPort; - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOnline); + remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); + remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); // Setting up an offline remote keynode - remoteOffline = await testKeynodeUtils.setupRemoteKeynode({ + remoteOffline = await testUtils.setupRemoteKeynode({ logger, }); remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); - remoteOfflineHost = remoteOffline.revProxy.ingressHost; - remoteOfflinePort = remoteOffline.revProxy.ingressPort; - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOffline); + remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); + remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); await remoteOffline.stop(); // Authorize session - await testUtils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], {}, nodePath, @@ -94,8 +94,8 @@ describe('CLI Nodes', () => { afterAll(async () => { await polykeyAgent.stop(); await polykeyAgent.destroy(); - await testKeynodeUtils.cleanupRemoteKeynode(remoteOnline); - await testKeynodeUtils.cleanupRemoteKeynode(remoteOffline); + await testUtils.cleanupRemoteKeynode(remoteOnline); + await testUtils.cleanupRemoteKeynode(remoteOffline); await fs.promises.rm(dataDir, { force: true, recursive: true, @@ -104,8 +104,8 @@ describe('CLI Nodes', () => { describe('commandClaimNode', () => { beforeAll(async () => { await remoteOnline.nodeManager.setNode(keynodeId, { - host: polykeyAgent.revProxy.ingressHost, - port: polykeyAgent.revProxy.ingressPort, + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), } as NodeAddress); await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { gestalt: { @@ -141,7 +141,7 @@ describe('CLI Nodes', () => { 'Should send a gestalt invite', async () => { const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands); + const result = await testBinUtils.pkStdio(commands); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('Gestalt Invite'); expect(result.stdout).toContain(remoteOnlineNodeId); @@ -157,7 +157,7 @@ describe('CLI Nodes', () => { remoteOnlineNodeId, '--force-invite', ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('Gestalt Invite'); expect(result.stdout).toContain(remoteOnlineNodeId); @@ -167,7 +167,7 @@ describe('CLI Nodes', () => { type: 'GestaltInvite', }); const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('cryptolink claim'); expect(result.stdout).toContain(remoteOnlineNodeId); @@ -178,7 +178,7 @@ describe('CLI Nodes', () => { 'Should return failure when pinging an offline node', async () => { const commands = genCommands(['ping', remoteOfflineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(1); // Should fail with no response. for automation purposes. expect(result.stdout).toContain('No response received'); @@ -189,7 +189,7 @@ describe('CLI Nodes', () => { '--format', 'json', ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(1); // Should fail with no response. for automation purposes. expect(result2.stdout).toContain('No response received'); }, @@ -200,13 +200,13 @@ describe('CLI Nodes', () => { async () => { const fakeNodeId = nodeId1; const commands = genCommands(['ping', fakeNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).not.toBe(0); // Should fail if node doesn't exist. expect(result.stdout).toContain('Failed to resolve node ID'); // Json format. const commands2 = genCommands(['ping', fakeNodeId, '--format', 'json']); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).not.toBe(0); // Should fail if node doesn't exist. expect(result2.stdout).toContain('success'); expect(result2.stdout).toContain('false'); @@ -217,7 +217,7 @@ describe('CLI Nodes', () => { ); test('Should return success when pinging a live node', async () => { const commands = genCommands(['ping', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain('Node is Active.'); @@ -228,7 +228,7 @@ describe('CLI Nodes', () => { '--format', 'json', ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(0); expect(result2.stdout).toContain('success'); expect(result2.stdout).toContain('true'); @@ -239,7 +239,7 @@ describe('CLI Nodes', () => { describe('commandFindNode', () => { test('Should find an online node', async () => { const commands = genCommands(['find', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain('Found node at'); expect(result.stdout).toContain(remoteOnlineHost); @@ -252,7 +252,7 @@ describe('CLI Nodes', () => { '--format', 'json', ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(0); expect(result2.stdout).toContain('success'); expect(result2.stdout).toContain('true'); @@ -267,7 +267,7 @@ describe('CLI Nodes', () => { }); test('Should find an offline node', async () => { const commands = genCommands(['find', remoteOfflineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain('Found node at'); expect(result.stdout).toContain(remoteOfflineHost); @@ -280,7 +280,7 @@ describe('CLI Nodes', () => { '--format', 'json', ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(0); expect(result2.stdout).toContain('success'); expect(result2.stdout).toContain('true'); @@ -298,7 +298,7 @@ describe('CLI Nodes', () => { async () => { const unknownNodeId = nodeId2; const commands = genCommands(['find', unknownNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(1); expect(result.stdout).toContain(`Failed to find node ${unknownNodeId}`); @@ -309,7 +309,7 @@ describe('CLI Nodes', () => { '--format', 'json', ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(1); expect(result2.stdout).toContain(`message`); expect(result2.stdout).toContain( @@ -337,8 +337,8 @@ describe('CLI Nodes', () => { }); afterAll(async () => { // Restore removed nodes - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOnline); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOffline); + await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); }); test('Should add the node', async () => { const commands = genCommands([ @@ -347,7 +347,7 @@ describe('CLI Nodes', () => { validHost, port.toString(), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Checking if node was added. @@ -365,7 +365,7 @@ describe('CLI Nodes', () => { validHost, port.toString(), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).not.toBe(0); expect(result.stderr).toContain('Invalid node ID.'); }, @@ -380,7 +380,7 @@ describe('CLI Nodes', () => { invalidHost, port.toString(), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).not.toBe(0); expect(result.stderr).toContain('Invalid IP address.'); diff --git a/tests/bin/notifications.test.ts b/tests/bin/notifications/notifications.test.ts similarity index 84% rename from tests/bin/notifications.test.ts rename to tests/bin/notifications/notifications.test.ts index a866b79d6..387767d08 100644 --- a/tests/bin/notifications.test.ts +++ b/tests/bin/notifications/notifications.test.ts @@ -8,8 +8,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { utils as idUtils } from '@matrixai/id'; import PolykeyAgent from '@/PolykeyAgent'; import { makeVaultId } from '@/vaults/utils'; -import * as utils from './utils'; -import * as testUtils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -64,12 +63,12 @@ describe('CLI Notifications', () => { senderNodeId = senderPolykeyAgent.nodeManager.getNodeId(); receiverNodeId = receiverPolykeyAgent.nodeManager.getNodeId(); await senderPolykeyAgent.nodeManager.setNode(receiverNodeId, { - host: receiverPolykeyAgent.revProxy.ingressHost, - port: receiverPolykeyAgent.revProxy.ingressPort, + host: receiverPolykeyAgent.revProxy.getIngressHost(), + port: receiverPolykeyAgent.revProxy.getIngressPort(), } as NodeAddress); // Authorize session - await utils.pkStdio([ + await testBinUtils.pkStdio([ 'agent', 'unlock', '-np', @@ -77,7 +76,7 @@ describe('CLI Notifications', () => { '--password-file', senderPasswordFile, ]); - await utils.pkStdio([ + await testBinUtils.pkStdio([ 'agent', 'unlock', '-np', @@ -108,7 +107,7 @@ describe('CLI Notifications', () => { vaults: {}, }); const commands = genCommandsSender(['send', receiverNodeId, 'msg']); - const result = await testUtils.pkStdio(commands, {}, senderDataDir); + const result = await testBinUtils.pkStdio(commands, {}, senderDataDir); expect(result.exitCode).toBe(0); // Succeeds const notifications = await receiverPolykeyAgent.notificationsManager.readNotifications(); @@ -123,7 +122,7 @@ describe('CLI Notifications', () => { vaults: {}, }); const commands = genCommandsSender(['send', receiverNodeId, 'msg']); - const result = await testUtils.pkStdio(commands, {}, senderDataDir); + const result = await testBinUtils.pkStdio(commands, {}, senderDataDir); expect(result.exitCode).toBe(0); // Succeeds const notifications = await receiverPolykeyAgent.notificationsManager.readNotifications(); @@ -153,11 +152,11 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver(['read']); - const result1 = await testUtils.pkStdio( + const result1 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -171,8 +170,8 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg4', ]); - await testUtils.pkStdio(senderCommands4, {}, senderDataDir); - const result2 = await testUtils.pkStdio( + await testBinUtils.pkStdio(senderCommands4, {}, senderDataDir); + const result2 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -205,19 +204,19 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands1 = genCommandsReceiver(['read']); - await testUtils.pkStdio(receiverCommands1, {}, receiverDataDir); + await testBinUtils.pkStdio(receiverCommands1, {}, receiverDataDir); const senderCommands4 = genCommandsSender([ 'send', receiverNodeId, 'msg4', ]); - await testUtils.pkStdio(senderCommands4, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands4, {}, senderDataDir); const receiverCommands2 = genCommandsReceiver(['read', '--unread']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands2, {}, receiverDataDir, @@ -250,11 +249,11 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver(['read', '--number', '2']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -286,9 +285,9 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver([ 'read', '--unread', @@ -297,17 +296,17 @@ describe('CLI Notifications', () => { '--order', 'oldest', ]); - const result1 = await testUtils.pkStdio( + const result1 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, ); - const result2 = await testUtils.pkStdio( + const result2 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, ); - const result3 = await testUtils.pkStdio( + const result3 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -321,7 +320,7 @@ describe('CLI Notifications', () => { }); test('Should read no notifications.', async () => { const receiverCommands = genCommandsReceiver(['read']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -365,7 +364,7 @@ describe('CLI Notifications', () => { notificationData3, ); const commands = genCommandsReceiver(['read']); - const result = await testUtils.pkStdio(commands, {}, receiverDataDir); + const result = await testBinUtils.pkStdio(commands, {}, receiverDataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain('Message from Keynode'); expect(result.stdout).toContain('invited you to join their Gestalt'); @@ -395,13 +394,13 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommandsClear = genCommandsReceiver(['clear']); const receiverCommandsRead = genCommandsReceiver(['read']); - await testUtils.pkStdio(receiverCommandsClear); - const result = await testUtils.pkStdio( + await testBinUtils.pkStdio(receiverCommandsClear); + const result = await testBinUtils.pkStdio( receiverCommandsRead, {}, receiverDataDir, diff --git a/tests/bin/polykey.test.ts b/tests/bin/polykey.test.ts index 688ec66d9..28bb328f6 100644 --- a/tests/bin/polykey.test.ts +++ b/tests/bin/polykey.test.ts @@ -1,8 +1,8 @@ -import { pkStdio } from './utils'; +import * as testBinUtils from './utils'; describe('polykey', () => { test('default help display', async () => { - const result = await pkStdio([]); + const result = await testBinUtils.pkStdio([]); expect(result.exitCode).toBe(0); expect(result.stdout).toBe(''); expect(result.stderr.length > 0).toBe(true); diff --git a/tests/bin/secret.test.ts b/tests/bin/secrets/secrets.test.ts similarity index 91% rename from tests/bin/secret.test.ts rename to tests/bin/secrets/secrets.test.ts index 0616d2f07..292dacdc9 100644 --- a/tests/bin/secret.test.ts +++ b/tests/bin/secrets/secrets.test.ts @@ -5,7 +5,7 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { vaultOps } from '@/vaults'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -33,7 +33,7 @@ describe('CLI secrets', () => { logger: logger, }); // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', dataDir, '--password-file', passwordFile], {}, dataDir, @@ -66,7 +66,7 @@ describe('CLI secrets', () => { `${vaultName}:MySecret`, ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -90,7 +90,7 @@ describe('CLI secrets', () => { command = ['secrets', 'delete', '-np', dataDir, `${vaultName}:MySecret`]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); list = await vaultOps.listSecrets(vault); @@ -106,7 +106,7 @@ describe('CLI secrets', () => { command = ['secrets', 'get', '-np', dataDir, `${vaultName}:MySecret`]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -121,7 +121,7 @@ describe('CLI secrets', () => { command = ['secrets', 'list', '-np', dataDir, vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -139,7 +139,7 @@ describe('CLI secrets', () => { '-r', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); await vaultOps.addSecret(vault, 'dir1/MySecret1', 'this is the secret 1'); @@ -171,7 +171,7 @@ describe('CLI secrets', () => { 'MyRenamedSecret', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -201,7 +201,7 @@ describe('CLI secrets', () => { `${vaultName}:MySecret`, ]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -236,7 +236,7 @@ describe('CLI secrets', () => { command = ['secrets', 'dir', '-np', dataDir, secretDir, vaultName]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); list = await vaultOps.listSecrets(vault); diff --git a/tests/bin/sessions.test.ts b/tests/bin/sessions.test.ts index 6cb95c393..c870254c0 100644 --- a/tests/bin/sessions.test.ts +++ b/tests/bin/sessions.test.ts @@ -14,23 +14,27 @@ import { sleep } from '@/utils'; import config from '@/config'; import * as clientErrors from '@/client/errors'; import * as testBinUtils from './utils'; +import * as testUtils from '../utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); -describe('CLI Sessions', () => { +describe('sessions', () => { const logger = new Logger('sessions test', LogLevel.WARN, [ new StreamHandler(), ]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ + globalAgentDir, + globalAgentPassword, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -47,7 +51,7 @@ describe('CLI Sessions', () => { test('serial commands refresh the session token', async () => { const session = await Session.createSession({ sessionTokenPath: path.join( - global.binAgentDir, + globalAgentDir, config.defaults.tokenBase, ), fs, @@ -57,10 +61,10 @@ describe('CLI Sessions', () => { ({ exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); const token1 = await session.readToken(); @@ -71,10 +75,10 @@ describe('CLI Sessions', () => { ({ exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); const token2 = await session.readToken(); @@ -87,11 +91,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: 'invalid', PK_TOKEN: 'token', }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -102,11 +106,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: 'invalid', PK_TOKEN: undefined, }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -117,11 +121,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: undefined, PK_TOKEN: 'token', }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -130,13 +134,13 @@ describe('CLI Sessions', () => { ); }); test('prompt for password to authenticate attended commands', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); mockedPrompts.mockClear(); mockedPrompts.mockImplementation(async (_opts: any) => { @@ -145,9 +149,9 @@ describe('CLI Sessions', () => { const { exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); // Prompted for password 1 time @@ -158,11 +162,11 @@ describe('CLI Sessions', () => { await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); - const validPassword = global.binAgentPassword; + const validPassword = globalAgentPassword; const invalidPassword = 'invalid'; mockedPrompts.mockClear(); mockedPrompts @@ -171,9 +175,9 @@ describe('CLI Sessions', () => { const { exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); // Prompted for password 2 times diff --git a/tests/bin/utils.retryAuth.test.ts b/tests/bin/utils.retryAuthentication.test.ts similarity index 98% rename from tests/bin/utils.retryAuth.test.ts rename to tests/bin/utils.retryAuthentication.test.ts index 79f3fce08..9a97f050f 100644 --- a/tests/bin/utils.retryAuth.test.ts +++ b/tests/bin/utils.retryAuthentication.test.ts @@ -4,9 +4,6 @@ import mockedEnv from 'mocked-env'; import { utils as clientUtils, errors as clientErrors } from '@/client'; import * as binUtils from '@/bin/utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); diff --git a/tests/bin/utils.ts b/tests/bin/utils.ts index 8b8bf115b..24905b260 100644 --- a/tests/bin/utils.ts +++ b/tests/bin/utils.ts @@ -309,111 +309,6 @@ async function pkExpect({ }); } -/** - * Creates a PK agent running in the global path - * Use this in beforeAll, and use the result in afterAll - * Uses a references directory as a reference count - * Uses fd-lock to serialise access to the pkAgent - * This means all test modules using this will be serialised - * Any beforeAll must use global.maxTimeout - * Tips for usage: - * * Do not restart this global agent - * * Ensure client-side side-effects are removed at the end of each test - * * Ensure server-side side-effects are removed at the end of each test - */ -async function pkAgent( - args: Array = [], - env: Record = {}, -): Promise<() => Promise> { - // The references directory will act like our reference count - await fs.promises.mkdir(path.join(global.binAgentDir, 'references'), { - recursive: true, - }); - const reference = Math.floor(Math.random() * 1000).toString(); - // Plus 1 to the reference count - await fs.promises.writeFile( - path.join(global.binAgentDir, 'references', reference), - reference, - ); - // This lock ensures serialised usage of global pkAgent - // It is placed after reference counting - // Because multiple test processes will queue up references - const testLockPath = path.join(global.binAgentDir, 'test.lock'); - const testLockFile = await fs.promises.open( - testLockPath, - fs.constants.O_WRONLY | fs.constants.O_CREAT, - ); - while (!lock(testLockFile.fd)) { - await sleep(1000); - } - // Here the agent server is part of the jest process - const { exitCode, stderr } = await pkStdio( - [ - 'agent', - 'start', - // 1024 is the smallest size and is faster to start - '--root-key-pair-bits', - '1024', - ...args, - ], - { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, - ...env, - }, - global.binAgentDir, - ); - // If the status is locked, we can ignore the start call - if (exitCode !== 0) { - // Last line of STDERR - const stdErrLine = stderr.trim().split('\n').pop(); - const e = new statusErrors.ErrorStatusLocked(); - // Expected output for ErrorStatusLocked - const eOutput = binUtils - .outputFormatter({ - type: 'error', - name: e.name, - description: e.description, - message: e.message, - }) - .trim(); - if (exitCode !== e.exitCode || stdErrLine !== eOutput) { - never(); - } - } - return async () => { - await fs.promises.rm( - path.join(global.binAgentDir, 'references', reference), - ); - lock.unlock(testLockFile.fd); - await testLockFile.close(); - // If the pids directory is not empty, there are other processes still running - try { - await fs.promises.rmdir(path.join(global.binAgentDir, 'references')); - } catch (e) { - if (e.code === 'ENOTEMPTY') { - return; - } - throw e; - } - const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), - fs, - }); - await pkStdio( - ['agent', 'stop'], - { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, - }, - global.binAgentDir, - ); - // `pk agent stop` is asynchronous, need to wait for it to be DEAD - // This also means STDERR from the stopping agent may appear on the test logs - await status.waitFor('DEAD'); - }; -} - /** * Waits for child process to exit * When process is terminated with signal @@ -457,7 +352,6 @@ export { pkExec, pkSpawn, pkExpect, - pkAgent, processExit, expectProcessError, }; diff --git a/tests/bin/vaults.test.ts b/tests/bin/vaults/vaults.test.ts similarity index 89% rename from tests/bin/vaults.test.ts rename to tests/bin/vaults/vaults.test.ts index 818e919a8..86cadb604 100644 --- a/tests/bin/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -7,7 +7,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { makeNodeId } from '@/nodes/utils'; import { makeVaultIdPretty } from '@/vaults/utils'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -86,7 +86,7 @@ describe('CLI vaults', () => { vaultNumber = 0; // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', dataDir, '--password-file', passwordFile], {}, dataDir, @@ -111,16 +111,16 @@ describe('CLI vaults', () => { await polykeyAgent.vaultManager.createVault('Vault1' as VaultName); await polykeyAgent.vaultManager.createVault('Vault2' as VaultName); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); }); }); describe('commandCreateVaults', () => { test('should create vaults', async () => { command = ['vaults', 'create', '-np', dataDir, 'MyTestVault']; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const result2 = await utils.pkStdio( + const result2 = await testBinUtils.pkStdio( ['vaults', 'touch', '-np', dataDir, 'MyTestVault2'], {}, dataDir, @@ -143,7 +143,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); @@ -166,7 +166,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); // Exit code of the exception expect(result.exitCode).toBe(10); @@ -188,7 +188,7 @@ describe('CLI vaults', () => { id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); @@ -206,7 +206,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -217,7 +217,7 @@ describe('CLI vaults', () => { const id = await polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); fail(); // FIXME methods not implemented. @@ -245,7 +245,7 @@ describe('CLI vaults', () => { // await polykeyAgent.vaults.setVaultPermissions(node2.id, id!); // await polykeyAgent.vaults.setVaultPermissions(node3.id, id!); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( // id!, @@ -272,7 +272,7 @@ describe('CLI vaults', () => { // await polykeyAgent.vaults.unsetVaultPermissions(node2.id, vault.vaultId); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); }); }); @@ -306,16 +306,16 @@ describe('CLI vaults', () => { // ); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -337,7 +337,7 @@ describe('CLI vaults', () => { // Vault does not exist on the source PolykeyAgent so the pull command throws an error which // caught, the error is checked and if it is ErrorVaultUndefined, then the Agent attempts a // clone instead - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Const list = (await polykeyAgent.vaults.listVaults()).map( @@ -382,16 +382,16 @@ describe('CLI vaults', () => { // ); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -425,7 +425,7 @@ describe('CLI vaults', () => { targetNodeId, ]; - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Await expect(clonedVault.listSecrets()).resolves.toStrictEqual([ @@ -454,16 +454,16 @@ describe('CLI vaults', () => { }); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -498,7 +498,7 @@ describe('CLI vaults', () => { '-ni', targetNodeId as string, ]; - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); await targetPolykeyAgent.stop(); @@ -526,7 +526,7 @@ describe('CLI vaults', () => { const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const fileContents = await vault.access(async (efs) => { @@ -553,12 +553,12 @@ describe('CLI vaults', () => { const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const command2 = ['vaults', 'version', '-np', dataDir, vaultName, 'last']; - const result2 = await utils.pkStdio([...command2], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command2], {}, dataDir); expect(result2.exitCode).toBe(0); }); test('should handle invalid version IDs', async () => { @@ -575,7 +575,7 @@ describe('CLI vaults', () => { 'NOT_A_VALID_CHECKOUT_ID', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); expect(result.stderr).toContain('ErrorVaultCommitUndefined'); @@ -590,7 +590,7 @@ describe('CLI vaults', () => { 'NOT_A_VALID_CHECKOUT_ID', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); expect(result.stderr).toContain('ErrorVaultUndefined'); }); @@ -629,7 +629,7 @@ describe('CLI vaults', () => { test('Should get all commits', async () => { const command = ['vaults', 'log', '-np', dataDir, vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); @@ -638,7 +638,7 @@ describe('CLI vaults', () => { test('should get a part of the log', async () => { const command = ['vaults', 'log', '-np', dataDir, '-d', '2', vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).not.toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); @@ -657,7 +657,7 @@ describe('CLI vaults', () => { commit2Oid, ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).not.toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); diff --git a/tests/bootstrap/bootstrap.test.ts b/tests/bootstrap/bootstrap.test.ts deleted file mode 100644 index ec7dd5a5e..000000000 --- a/tests/bootstrap/bootstrap.test.ts +++ /dev/null @@ -1,82 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import PolykeyAgent from '@/PolykeyAgent'; -import * as bootstrapUtils from '@/bootstrap/utils'; -import { Status } from '@/status'; -import config from '@/config'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('Bootstrap', () => { - const logger = new Logger('AgentServerTest', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let nodePath: string; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'status-test-')); - nodePath = path.join(dataDir, 'Node'); - await fs.promises.mkdir(nodePath); - }); - afterEach(async () => { - await fs.promises.rm(nodePath, { - force: true, - recursive: true, - }); - }); - - describe('BootstrapPolykeyState', () => { - const password = 'password123'; - test( - 'should create state if no directory', - async () => { - // Await fs.promises.rmdir(nodePath); - await bootstrapUtils.bootstrapState({ nodePath, password, logger }); - // Should have keynode state; - }, - global.polykeyStartupTimeout * 4, - ); - - test('should create state if empty directory', async () => { - await bootstrapUtils.bootstrapState({ - nodePath, - password, - logger, - }); - }); - - test( - 'should be able to start agent on created state.', - async () => { - await bootstrapUtils.bootstrapState({ - nodePath, - password, - logger, - }); - const polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - const statusPath = path.join(nodePath, config.defaults.statusBase); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 10000); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - await status.waitFor('DEAD', 10000); - }, - global.polykeyStartupTimeout * 2, - ); - }); -}); diff --git a/tests/bootstrap/utils.test.ts b/tests/bootstrap/utils.test.ts new file mode 100644 index 000000000..b0fbb60e9 --- /dev/null +++ b/tests/bootstrap/utils.test.ts @@ -0,0 +1,172 @@ +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import { utils as bootstrapUtils, errors as bootstrapErrors } from '@/bootstrap'; +import { utils as keysUtils } from '@/keys'; +import { errors as statusErrors } from '@/status'; +import config from '@/config'; +import * as testUtils from '../utils'; + +describe('bootstrap/utils', () => { + const logger = new Logger('bootstrap/utils test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); + let dataDir: string; + // let nodePath: string; + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-')); + // nodePath = path.join(dataDir, 'Node'); + // await fs.promises.mkdir(nodePath); + }); + afterEach(async () => { + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test('bootstraps new node path', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const password = 'password'; + const recoveryCode = await bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger + }); + expect(typeof recoveryCode).toBe('string'); + expect( + recoveryCode.split(' ').length === 12 || + recoveryCode.split(' ').length === 24, + ).toBe(true); + const nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents.length > 0).toBe(true); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + const stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase) + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + }); + test('bootstraps existing but empty node path', async () => { + const nodePath = path.join(dataDir, 'polykey'); + await fs.promises.mkdir(nodePath); + const password = 'password'; + const recoveryCode = await bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger + }); + expect(typeof recoveryCode).toBe('string'); + expect( + recoveryCode.split(' ').length === 12 || + recoveryCode.split(' ').length === 24, + ).toBe(true); + const nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents.length > 0).toBe(true); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + const stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase) + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + }); + test('bootstrap fails if non-empty node path', async () => { + // Normal file + const nodePath1 = path.join(dataDir, 'polykey1'); + await fs.promises.mkdir(nodePath1); + await fs.promises.writeFile( + path.join(nodePath1, 'random'), + 'normal file', + 'utf-8' + ); + const password = 'password'; + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath1, + fs, + logger + }) + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + // Hidden file + const nodePath2 = path.join(dataDir, 'polykey2'); + await fs.promises.mkdir(nodePath2); + await fs.promises.writeFile( + path.join(nodePath2, '.random'), + 'hidden file', + 'utf-8' + ); + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath2, + fs, + logger + }) + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + // Directory + const nodePath3 = path.join(dataDir, 'polykey3'); + await fs.promises.mkdir(nodePath3); + await fs.promises.mkdir(path.join(nodePath3, 'random')); + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath3, + fs, + logger + }) + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + }); + test('concurrent bootstrapping results in 1 success', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const password = 'password'; + const [result1, result2] = await Promise.allSettled([ + bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger + }), + bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger + }), + ]); + expect( + (result1.status === 'rejected' && + result1.reason instanceof statusErrors.ErrorStatusLocked) + || + (result2.status === 'rejected' && + result2.reason instanceof statusErrors.ErrorStatusLocked) + ).toBe(true); + expect( + (result1.status === 'fulfilled' && typeof result1.value === 'string') + || + (result2.status === 'fulfilled' && typeof result2.value === 'string') + ).toBe(true); + }); +}); diff --git a/tests/claims/utils.test.ts b/tests/claims/utils.test.ts index fccd00136..49bcfaba7 100644 --- a/tests/claims/utils.test.ts +++ b/tests/claims/utils.test.ts @@ -3,59 +3,18 @@ import type { PrivateKeyPem, PublicKeyPem } from '@/keys/types'; import type { IdentityId, ProviderId } from '@/identities/types'; import type { NodeId } from '@/nodes/types'; import type { Claim } from '@/claims/types'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; import { createPublicKey, createPrivateKey } from 'crypto'; import { generalVerify, GeneralSign } from 'jose'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import canonicalize from 'canonicalize'; -import { KeyManager } from '@/keys'; import { sleep } from '@/utils'; - import * as claimsUtils from '@/claims/utils'; import * as claimsErrors from '@/claims/errors'; import * as keysUtils from '@/keys/utils'; +import * as testUtils from '../utils'; -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('Claims utils', () => { - const password = 'password'; - const logger = new Logger('Claims Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let keyManager: KeyManager; - let publicKey: PublicKeyPem; - let privateKey: PrivateKeyPem; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const keysPath = `${dataDir}/keys`; - keyManager = await KeyManager.createKeyManager({ - password, - keysPath, - logger, - }); - publicKey = keyManager.getRootKeyPairPem().publicKey; - privateKey = keyManager.getRootKeyPairPem().privateKey; - }); - afterEach(async () => { - await keyManager.stop(); - await keyManager.destroy(); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - +describe('claims/utils', () => { + const publicKey: PublicKeyPem = testUtils.globalKeyPairPem.publicKey; + const privateKey: PrivateKeyPem = testUtils.globalKeyPairPem.privateKey; test('creates a claim (both node and identity)', async () => { const nodeClaim = await claimsUtils.createClaim({ privateKey, @@ -348,7 +307,7 @@ describe('Claims utils', () => { expect(await claimsUtils.verifyClaimSignature(claim, publicKey)).toBe(true); // Create some dummy public key, and check that this does not verify - const dummyKeyPair = await keysUtils.generateKeyPair(4096); + const dummyKeyPair = await keysUtils.generateKeyPair(2048); const dummyPublicKey = await keysUtils.publicKeyToPem( dummyKeyPair.publicKey, ); diff --git a/tests/client/rpcAgent.test.ts b/tests/client/rpcAgent.test.ts index 68a623f5f..ff33db8cb 100644 --- a/tests/client/rpcAgent.test.ts +++ b/tests/client/rpcAgent.test.ts @@ -9,6 +9,7 @@ import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import { KeyManager } from '@/keys'; import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; +import config from '@/config'; import { Status } from '@/status'; import * as testUtils from './utils'; @@ -110,7 +111,7 @@ describe('Agent client service', () => { const emptyMessage = new utilsPB.EmptyMessage(); await agentStop(emptyMessage, callCredentials); - const statusPath = path.join(polykeyAgent.nodePath, 'status'); + const statusPath = path.join(polykeyAgent.nodePath, config.defaults.statusBase); const status = new Status({ statusPath, fs, diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index 13769ec6b..913c8467e 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -17,9 +17,11 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as nodesErrors from '@/nodes/errors'; import { makeNodeId } from '@/nodes/utils'; +import config from '@/config'; import { Status } from '@/status'; import * as testUtils from './utils'; import * as testKeynodeUtils from '../utils'; +import { sleep } from '@/utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -43,7 +45,7 @@ jest.mock('@/keys/utils', () => ({ */ describe('Client service', () => { const password = 'password'; - const logger = new Logger('rpcNodes Test', LogLevel.WARN, [ + const logger = new Logger('rpcNodes Test', LogLevel.DEBUG, [ new StreamHandler(), ]); let client: ClientServiceClient; @@ -95,6 +97,9 @@ describe('Client service', () => { logger, fwdProxy, keyManager, + forwardProxyConfig: { + connTimeoutTime: 2000, + }, }); nodeManager = polykeyAgent.nodeManager; @@ -180,7 +185,7 @@ describe('Client service', () => { const serverNodeId = polykeyServer.nodeManager.getNodeId(); await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); await polykeyServer.stop(); - const statusPath = path.join(polykeyServer.nodePath, 'status.json'); + const statusPath = path.join(polykeyServer.nodePath, config.defaults.statusBase); const status = new Status({ statusPath, fs, @@ -208,6 +213,8 @@ describe('Client service', () => { // Case 3: pre-existing connection no longer active, so offline await polykeyServer.stop(); await status.waitFor('DEAD', 10000); + // Currently need this timeout - also set COnnectionForward setTImeout to 1000 + await sleep(3000); const res3 = await nodesPing(nodeMessage, callCredentials); expect(res3.getSuccess()).toEqual(false); }, diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index f912635e3..c4ea85b48 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -3,7 +3,7 @@ import type { IdentityId, ProviderId } from '@/identities/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; -import Logger, { LogLevel } from '@matrixai/logger'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Discovery } from '@/discovery'; import PolykeyAgent from '@/PolykeyAgent'; import * as discoveryErrors from '@/discovery/errors'; @@ -23,9 +23,10 @@ jest.mock('@/keys/utils', () => ({ })); describe('Discovery', () => { - // Constants. const password = 'password'; - const logger = new Logger('Discovery Tests', LogLevel.WARN); + const logger = new Logger('Discovery Tests', LogLevel.WARN, [ + new StreamHandler(), + ]); const testToken = { providerId: 'test-provider' as ProviderId, identityId: 'test_user' as IdentityId, diff --git a/tests/global.d.ts b/tests/global.d.ts new file mode 100644 index 000000000..228cfb6fc --- /dev/null +++ b/tests/global.d.ts @@ -0,0 +1,12 @@ +/** + * Follows the globals in jest.config.ts + * @module + */ +declare var projectDir: string; +declare var testDir: string; +declare var dataDir: string; +declare var password: string; +declare var defaultTimeout: number; +declare var polykeyStartupTimeout: number; +declare var failedConnectionTimeout: number; +declare var maxTimeout: number; diff --git a/tests/globalSetup.ts b/tests/globalSetup.ts index d2918915c..cfea609d8 100644 --- a/tests/globalSetup.ts +++ b/tests/globalSetup.ts @@ -1,45 +1,17 @@ +/* eslint-disable no-console */ +import fs from 'fs'; +import process from 'process'; + /** * Global setup for all jest tests * Side-effects are performed here - * No variable context is passed to the test modules * Jest does not support `@/` imports here - * @module */ -import os from 'os'; -import fs from 'fs'; -import path from 'path'; -import * as keysUtils from '../src/keys/utils'; - async function setup() { - // eslint-disable-next-line no-console console.log('\nGLOBAL SETUP'); - // Globals defined in setup.ts must be copied here - const keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - const binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - // Setup global root key pair - // eslint-disable-next-line no-console - console.log(`Creating global.keyPairDir: ${keyPairDir}`); - await fs.promises.rm(keyPairDir, { force: true, recursive: true }); - await fs.promises.mkdir(keyPairDir); - const rootKeyPair = await keysUtils.generateKeyPair(4096); - const rootKeyPairPem = keysUtils.keyPairToPem(rootKeyPair); - await Promise.all([ - fs.promises.writeFile( - path.join(keyPairDir, 'root.pub'), - rootKeyPairPem.publicKey, - 'utf-8', - ), - fs.promises.writeFile( - path.join(keyPairDir, 'root.key'), - rootKeyPairPem.privateKey, - 'utf-8', - ), - ]); - // Setup global agent directory - // eslint-disable-next-line no-console - console.log(`Creating global.binAgentDir: ${binAgentDir}`); - await fs.promises.rm(binAgentDir, { force: true, recursive: true }); - await fs.promises.mkdir(binAgentDir); + // The globalDataDir is already created + const globalDataDir = process.env['GLOBAL_DATA_DIR']!; + console.log(`Global Data Dir: ${globalDataDir}`); } export default setup; diff --git a/tests/globalTeardown.ts b/tests/globalTeardown.ts index 0959608ad..c199c4d5b 100644 --- a/tests/globalTeardown.ts +++ b/tests/globalTeardown.ts @@ -1,27 +1,16 @@ +/* eslint-disable no-console */ +import fs from 'fs'; + /** * Global teardown for all jest tests * Side-effects are performed here - * No variable context is inherited from test modules * Jest does not support `@/` imports here - * @module */ -import os from 'os'; -import fs from 'fs'; -import path from 'path'; - async function teardown() { - // eslint-disable-next-line no-console console.log('GLOBAL TEARDOWN'); - // Globals defined in setup.ts must be copied here - const keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - const binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - // eslint-disable-next-line no-console - console.log(`Destroying global.keyPairDir: ${keyPairDir}`); - await fs.promises.rm(keyPairDir, { force: true, recursive: true }); - // The global agent directory must be fresh - // eslint-disable-next-line no-console - console.log(`Destroying global.binAgentDir: ${binAgentDir}`); - await fs.promises.rm(binAgentDir, { force: true, recursive: true }); + const globalDataDir = process.env['GLOBAL_DATA_DIR']!; + console.log(`Destroying Global Data Dir: ${globalDataDir}`); + await fs.promises.rm(globalDataDir, { recursive: true }); } export default teardown; diff --git a/tests/keys/KeyManager.test.ts b/tests/keys/KeyManager.test.ts index 32a343ab9..248682135 100644 --- a/tests/keys/KeyManager.test.ts +++ b/tests/keys/KeyManager.test.ts @@ -210,7 +210,10 @@ describe('KeyManager', () => { }); // No way we can encrypt 1000 bytes without a ridiculous key size const plainText = Buffer.from(new Array(1000 + 1).join('A')); - const maxSize = keysUtils.maxEncryptSize(keysUtils.publicKeyBitSize(keyPair.publicKey) / 8, 32); + const maxSize = keysUtils.maxEncryptSize( + keysUtils.publicKeyBitSize(keyPair.publicKey) / 8, + 32, + ); await expect(keyManager.encryptWithRootKeyPair(plainText)).rejects.toThrow( `Maximum plain text byte size is ${maxSize}`, ); diff --git a/tests/network/ForwardProxy.test.ts b/tests/network/ForwardProxy.test.ts index 67c354c28..bfa787330 100644 --- a/tests/network/ForwardProxy.test.ts +++ b/tests/network/ForwardProxy.test.ts @@ -14,84 +14,89 @@ import { } from '@/network'; import * as keysUtils from '@/keys/utils'; import { promisify, promise, timerStart, timerStop } from '@/utils'; +import * as testUtils from '../utils'; describe('ForwardProxy', () => { + + // @ts-ignore + console.log(global.globalDataDir); + const logger = new Logger('ForwardProxy Test', LogLevel.WARN, [ new StreamHandler(), ]); - let keyPairPem, certPem; - let fwdProxy: ForwardProxy; - let authToken: string; + const keyPairPem = testUtils.globalKeyPairPem; + const cert = keysUtils.generateCertificate( + testUtils.globalKeyPair.publicKey, + testUtils.globalKeyPair.privateKey, + testUtils.globalKeyPair.privateKey, + 86400 + ); + const certPem = keysUtils.certToPem(cert); // Helper functions - async function connect( - host: string, - port: number, - token: string, - path: string, - ): Promise { - const socket = await new Promise((resolve, reject) => { - const req = http.request({ - method: 'CONNECT', - path: path, - host: host, - port: port, - headers: { - 'Proxy-Authorization': `Basic ${token}`, - }, - }); - req.end(); - req.once('connect', (res, clientSocket) => { - if (res.statusCode === 200) { - resolve(clientSocket); - } else { - reject(new Error(res.statusCode!.toString())); - } - }); - req.once('error', (e) => { - reject(e); - }); - }); - return socket; - } + // async function connect( + // host: string, + // port: number, + // token: string, + // path: string, + // ): Promise { + // const socket = await new Promise((resolve, reject) => { + // const req = http.request({ + // method: 'CONNECT', + // path: path, + // host: host, + // port: port, + // headers: { + // 'Proxy-Authorization': `Basic ${token}`, + // }, + // }); + // req.end(); + // req.once('connect', (res, clientSocket) => { + // if (res.statusCode === 200) { + // resolve(clientSocket); + // } else { + // reject(new Error(res.statusCode!.toString())); + // } + // }); + // req.once('error', (e) => { + // reject(e); + // }); + // }); + // return socket; + // } - beforeAll(async () => { - const keyPair = await keysUtils.generateKeyPair(4096); - keyPairPem = keysUtils.keyPairToPem(keyPair); - const cert = keysUtils.generateCertificate( - keyPair.publicKey, - keyPair.privateKey, - keyPair.privateKey, - 86400, - ); - certPem = keysUtils.certToPem(cert); - }); - beforeEach(async () => { - authToken = 'sdafjs8'; - fwdProxy = new ForwardProxy({ - authToken, - logger, + // beforeEach(async () => { + // authToken = 'sdafjs8'; + // fwdProxy = new ForwardProxy({ + // authToken, + // logger, + // }); + // await fwdProxy.start({ + // proxyHost: '::1' as Host, + // tlsConfig: { + // keyPrivatePem: keyPairPem.privateKey, + // certChainPem: certPem, + // }, + // }); + // }); + // afterEach(async () => { + // await fwdProxy.stop(); + // }); + + test('forward proxy readiness', async () => { + const fwdProxy = new ForwardProxy({ + authToken: '', + logger: logger, }); + // Should be a noop (already stopped) + await fwdProxy.stop(); await fwdProxy.start({ - proxyHost: '::1' as Host, tlsConfig: { keyPrivatePem: keyPairPem.privateKey, certChainPem: certPem, }, }); - }); - afterEach(async () => { - await fwdProxy.stop(); - }); - - test('reverseProxy readiness', async () => { - const fwdProxy = new ForwardProxy({ - authToken: '', - logger: logger, - }); - - // Should be a noop - await fwdProxy.stop(); + // Should be a noop (already started) await fwdProxy.start({ tlsConfig: { keyPrivatePem: keyPairPem.privateKey, @@ -100,815 +105,830 @@ describe('ForwardProxy', () => { }); await fwdProxy.stop(); expect(() => { - fwdProxy.proxyHost; - }).toThrow(networkErrors.ErrorForwardProxyNotStarted); + fwdProxy.getProxyHost(); + }).toThrow(networkErrors.ErrorForwardProxyNotRunning); await expect(async () => { await fwdProxy.closeConnection('::1' as Host, 1 as Port); - }).rejects.toThrow(networkErrors.ErrorForwardProxyNotStarted); + }).rejects.toThrow(networkErrors.ErrorForwardProxyNotRunning); }); + test('starting and stopping the forward proxy', async () => { - expect(typeof fwdProxy.proxyHost).toBe('string'); - expect(typeof fwdProxy.proxyPort).toBe('number'); - expect(fwdProxy.proxyPort).toBeGreaterThan(0); - expect(typeof fwdProxy.egressHost).toBe('string'); - expect(typeof fwdProxy.egressPort).toBe('number'); - expect(fwdProxy.egressPort).toBeGreaterThan(0); - expect(fwdProxy.connectionCount).toBe(0); - await fwdProxy.stop(); + const fwdProxy = new ForwardProxy({ + authToken: 'abc', + logger, + }); await fwdProxy.start({ - proxyHost: '::1' as Host, tlsConfig: { keyPrivatePem: keyPairPem.privateKey, certChainPem: certPem, }, }); - expect(fwdProxy.proxyHost).toBe('::1'); - await fwdProxy.stop(); - }); - test('connect failures to the forward proxy', async () => { - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - // Incorrect auth token - await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - 'sdfisojfo', - `127.0.0.1:80?nodeId=${encodeURIComponent('SOMENODEID')}`, - ), - ).rejects.toThrow('407'); - // No node id - await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - '127.0.0.1:80', - ), - ).rejects.toThrow('400'); - // Missing target - await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `?nodeId=${encodeURIComponent('123')}`, - ), - ).rejects.toThrow('400'); - // Targetting an un-used port - await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `127.0.0.1:0?nodeId=${encodeURIComponent('123')}`, - ), - ).rejects.toThrow('400'); - await fwdProxy.stop(); - }); - test('open connection to port 0 fails', async () => { - // Cannot open connection to port 0 - await expect(() => - fwdProxy.openConnection('abc' as NodeId, '127.0.0.1' as Host, 0 as Port), - ).rejects.toThrow(networkErrors.ErrorConnectionStart); - await fwdProxy.stop(); - }); - test('open connection timeout due to hanging remote', async () => { - // This UTP server will just hang and not respond - let receivedConnection = false; - const utpSocketHang = UTP.createServer(() => { - receivedConnection = true; - }); - const utpSocketHangListen = promisify(utpSocketHang.listen).bind( - utpSocketHang, - ); - await utpSocketHangListen(0, '127.0.0.1'); - const utpSocketHangPort = utpSocketHang.address().port; - const timer = timerStart(3000); - await expect(() => - fwdProxy.openConnection( - 'abc' as NodeId, - '127.0.0.1' as Host, - utpSocketHangPort as Port, - timer, - ), - ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); - timerStop(timer); - expect(receivedConnection).toBe(true); - utpSocketHang.close(); - utpSocketHang.unref(); - await fwdProxy.stop(); - }); - test('open connection reset due to ending remote', async () => { - // This UTP Server will immediately end and destroy - // the connection upon receiving a connection - let receivedConnection = false; - const utpSocketEnd = UTP.createServer((utpConn) => { - receivedConnection = true; - utpConn.end(); - utpConn.destroy(); - }); - const utpSocketEndListen = promisify(utpSocketEnd.listen).bind( - utpSocketEnd, - ); - await utpSocketEndListen(0, '127.0.0.1'); - const utpSocketEndPort = utpSocketEnd.address().port; - await expect(() => - fwdProxy.openConnection( - 'abc' as NodeId, - '127.0.0.1' as Host, - utpSocketEndPort as Port, - ), - ).rejects.toThrow(networkErrors.ErrorConnectionStart); - expect(receivedConnection).toBe(true); - // The actual error is UTP_ECONNRESET to be precise - await expect(() => - fwdProxy.openConnection( - 'abc' as NodeId, - '127.0.0.1' as Host, - utpSocketEndPort as Port, - ), - ).rejects.toThrow(/UTP_ECONNRESET/); - utpSocketEnd.close(); - utpSocketEnd.unref(); + expect(typeof fwdProxy.getProxyHost()).toBe('string'); + expect(typeof fwdProxy.getProxyPort()).toBe('number'); + expect(fwdProxy.getProxyPort()).toBeGreaterThan(0); + expect(typeof fwdProxy.getEgressHost()).toBe('string'); + expect(typeof fwdProxy.getEgressPort()).toBe('number'); + expect(fwdProxy.getEgressPort()).toBeGreaterThan(0); + expect(fwdProxy.getConnectionCount()).toBe(0); await fwdProxy.stop(); - }); - test('open connection fails due to missing certificates', async () => { - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - // This is an SSL handshake failure - await expect(() => - fwdProxy.openConnection( - 'somerandomnodeid' as NodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ), - ).rejects.toThrow(networkErrors.ErrorConnectionStart); - await expect(remoteClosedP).resolves.toBeUndefined(); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await fwdProxy.stop(); - }); - test('open connection fails due to invalid node id', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - // This UTP server will hold the connection - let secured = false; - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - secured = true; - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - await expect(() => - fwdProxy.openConnection( - 'somerandomnodeid' as NodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ), - ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); - await expect(remoteReadyP).resolves.toBeUndefined(); - // The secure event won't be fired - // because the connection will be ended before that happens - expect(secured).toBe(false); - expect(fwdProxy.connectionCount).toBe(0); - await expect(remoteClosedP).resolves.toBeUndefined(); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await fwdProxy.stop(); - }); - test('open connection success', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - await fwdProxy.openConnection( - serverNodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(remoteReadyP).resolves.toBeUndefined(); - await expect(remoteSecureP).resolves.toBeUndefined(); - // Opening a duplicate connection is noop - await fwdProxy.openConnection( - serverNodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ); - expect(fwdProxy.connectionCount).toBe(1); - await fwdProxy.closeConnection( - utpSocketHost as Host, - utpSocketPort as Port, - ); - expect(fwdProxy.connectionCount).toBe(0); - await expect(remoteClosedP).resolves.toBeUndefined(); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await fwdProxy.stop(); - }); - test('connect success by opening connection first', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - await fwdProxy.openConnection( - serverNodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(remoteReadyP).resolves.toBeUndefined(); - await expect(remoteSecureP).resolves.toBeUndefined(); - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - const clientSocket = await connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( - serverNodeId, - )}`, - ); - expect(clientSocket).toBeInstanceOf(net.Socket); - expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); - const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); - clientSocket.on('close', () => { - resolveLocalClosedP(); - }); - await fwdProxy.closeConnection( - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(localClosedP).resolves.toBeUndefined(); - await expect(remoteClosedP).resolves.toBeUndefined(); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await fwdProxy.stop(); - }); - test('connect success by direct connection', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, + await fwdProxy.start({ + proxyHost: '::1' as Host, + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - const clientSocket = await connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( - serverNodeId, - )}`, - ); - await expect(remoteReadyP).resolves.toBeUndefined(); - await expect(remoteSecureP).resolves.toBeUndefined(); - expect(clientSocket).toBeInstanceOf(net.Socket); - expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); - const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); - clientSocket.on('close', () => { - resolveLocalClosedP(); }); - await fwdProxy.closeConnection( - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(localClosedP).resolves.toBeUndefined(); - await expect(remoteClosedP).resolves.toBeUndefined(); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await fwdProxy.stop(); - }); - test('stopping the proxy with open connections', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - promise(); - const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - promise(); - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP(); - } - }; - utpSocket.on('message', handleMessage); - const send = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(0, '127.0.0.1'); - const utpSocketHost = utpSocket.address().address; - const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - await fwdProxy.openConnection( - serverNodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(remoteReadyP).resolves.toBeUndefined(); - await expect(remoteSecureP).resolves.toBeUndefined(); - expect(fwdProxy.connectionCount).toBe(1); - await fwdProxy.stop(); - expect(fwdProxy.connectionCount).toBe(0); - utpSocket.off('message', handleMessage); - utpSocket.close(); - utpSocket.unref(); - await expect(remoteClosedP).resolves.toBeUndefined(); - }); - test('open connection to multiple servers', async () => { - // First server keys - const serverKeyPair1 = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem1 = keysUtils.keyPairToPem(serverKeyPair1); - const serverCert1 = keysUtils.generateCertificate( - serverKeyPair1.publicKey, - serverKeyPair1.privateKey, - serverKeyPair1.privateKey, - 86400, - ); - const serverCertPem1 = keysUtils.certToPem(serverCert1); - const serverNodeId1 = networkUtils.certNodeId(serverCert1); - // Second server keys - const serverKeyPair2 = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem2 = keysUtils.keyPairToPem(serverKeyPair2); - const serverCert2 = keysUtils.generateCertificate( - serverKeyPair2.publicKey, - serverKeyPair2.privateKey, - serverKeyPair2.privateKey, - 86400, - ); - const serverCertPem2 = keysUtils.certToPem(serverCert2); - const serverNodeId2 = networkUtils.certNodeId(serverCert2); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; - // First signals - const { p: remoteReadyP1, resolveP: resolveRemoteReadyP1 } = - promise(); - const { p: remoteClosedP1, resolveP: resolveRemoteClosedP1 } = - promise(); - // Second signals - const { p: remoteReadyP2, resolveP: resolveRemoteReadyP2 } = - promise(); - const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = - promise(); - const utpSocket1 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem1, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP1(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send1(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send1(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP1; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage1 = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send1(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP1(); - } - }; - utpSocket1.on('message', handleMessage1); - const send1 = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket1.send).bind(utpSocket1); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen1 = promisify(utpSocket1.listen).bind(utpSocket1); - await utpSocketListen1(0, '127.0.0.1'); - const utpSocketHost1 = utpSocket1.address().address; - const utpSocketPort1 = utpSocket1.address().port; - const utpSocket2 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem2, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP2(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { - tlsSocket.end(); - }); - await send2(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send2(networkUtils.pingBuffer); - }, 2000); - await remoteReadyP2; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); - const handleMessage2 = async (data: Buffer) => { - const msg = networkUtils.unserializeNetworkMessage(data); - if (msg.type === 'ping') { - await send2(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - resolveRemoteReadyP2(); - } - }; - utpSocket2.on('message', handleMessage2); - const send2 = async (data: Buffer) => { - const utpSocketSend = promisify(utpSocket2.send).bind(utpSocket2); - await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - }; - const utpSocketListen2 = promisify(utpSocket2.listen).bind(utpSocket2); - await utpSocketListen2(0, '127.0.0.1'); - const utpSocketHost2 = utpSocket2.address().address; - const utpSocketPort2 = utpSocket2.address().port; - expect(fwdProxy.connectionCount).toBe(0); - await fwdProxy.openConnection( - serverNodeId1, - utpSocketHost1 as Host, - utpSocketPort1 as Port, - ); - await fwdProxy.openConnection( - serverNodeId2, - utpSocketHost2 as Host, - utpSocketPort2 as Port, - ); - expect(fwdProxy.connectionCount).toBe(2); - await expect(remoteReadyP1).resolves.toBeUndefined(); - await expect(remoteReadyP2).resolves.toBeUndefined(); - await fwdProxy.closeConnection( - utpSocketHost1 as Host, - utpSocketPort1 as Port, - ); - await fwdProxy.closeConnection( - utpSocketHost2 as Host, - utpSocketPort2 as Port, - ); - expect(fwdProxy.connectionCount).toBe(0); - await expect(remoteClosedP1).resolves.toBeUndefined(); - await expect(remoteClosedP2).resolves.toBeUndefined(); - utpSocket1.off('message', handleMessage1); - utpSocket1.close(); - utpSocket1.unref(); - utpSocket2.off('message', handleMessage2); - utpSocket2.close(); - utpSocket2.unref(); + expect(fwdProxy.getProxyHost()).toBe('::1'); await fwdProxy.stop(); }); + + + // test('connect failures to the forward proxy', async () => { + // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); + // // Incorrect auth token + // await expect(() => + // connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // 'sdfisojfo', + // `127.0.0.1:80?nodeId=${encodeURIComponent('SOMENODEID')}`, + // ), + // ).rejects.toThrow('407'); + // // No node id + // await expect(() => + // connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // authTokenEncoded, + // '127.0.0.1:80', + // ), + // ).rejects.toThrow('400'); + // // Missing target + // await expect(() => + // connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // authTokenEncoded, + // `?nodeId=${encodeURIComponent('123')}`, + // ), + // ).rejects.toThrow('400'); + // // Targetting an un-used port + // await expect(() => + // connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // authTokenEncoded, + // `127.0.0.1:0?nodeId=${encodeURIComponent('123')}`, + // ), + // ).rejects.toThrow('400'); + // await fwdProxy.stop(); + // }); + // test('open connection to port 0 fails', async () => { + // // Cannot open connection to port 0 + // await expect(() => + // fwdProxy.openConnection('abc' as NodeId, '127.0.0.1' as Host, 0 as Port), + // ).rejects.toThrow(networkErrors.ErrorConnectionStart); + // await fwdProxy.stop(); + // }); + // test('open connection timeout due to hanging remote', async () => { + // // This UTP server will just hang and not respond + // let receivedConnection = false; + // const utpSocketHang = UTP.createServer(() => { + // receivedConnection = true; + // }); + // const utpSocketHangListen = promisify(utpSocketHang.listen).bind( + // utpSocketHang, + // ); + // await utpSocketHangListen(0, '127.0.0.1'); + // const utpSocketHangPort = utpSocketHang.address().port; + // const timer = timerStart(3000); + // await expect(() => + // fwdProxy.openConnection( + // 'abc' as NodeId, + // '127.0.0.1' as Host, + // utpSocketHangPort as Port, + // timer, + // ), + // ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); + // timerStop(timer); + // expect(receivedConnection).toBe(true); + // utpSocketHang.close(); + // utpSocketHang.unref(); + // await fwdProxy.stop(); + // }); + // test('open connection reset due to ending remote', async () => { + // // This UTP Server will immediately end and destroy + // // the connection upon receiving a connection + // let receivedConnection = false; + // const utpSocketEnd = UTP.createServer((utpConn) => { + // receivedConnection = true; + // utpConn.end(); + // utpConn.destroy(); + // }); + // const utpSocketEndListen = promisify(utpSocketEnd.listen).bind( + // utpSocketEnd, + // ); + // await utpSocketEndListen(0, '127.0.0.1'); + // const utpSocketEndPort = utpSocketEnd.address().port; + // await expect(() => + // fwdProxy.openConnection( + // 'abc' as NodeId, + // '127.0.0.1' as Host, + // utpSocketEndPort as Port, + // ), + // ).rejects.toThrow(networkErrors.ErrorConnectionStart); + // expect(receivedConnection).toBe(true); + // // The actual error is UTP_ECONNRESET to be precise + // await expect(() => + // fwdProxy.openConnection( + // 'abc' as NodeId, + // '127.0.0.1' as Host, + // utpSocketEndPort as Port, + // ), + // ).rejects.toThrow(/UTP_ECONNRESET/); + // utpSocketEnd.close(); + // utpSocketEnd.unref(); + // await fwdProxy.stop(); + // }); + // test.only('open connection fails due to missing certificates', async () => { + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // // This UTP server will hold the connection + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('close', () => { + // console.log('CLOSE EVENT'); + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // console.log('END EVENT'); + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // expect(fwdProxy.connectionCount).toBe(0); + // // This is an SSL handshake failure + // await expect(() => + // fwdProxy.openConnection( + // 'somerandomnodeid' as NodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ), + // ).rejects.toThrow(networkErrors.ErrorConnectionStart); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await fwdProxy.stop(); + // }); + // test('open connection fails due to invalid node id', async () => { + // const serverKeyPair = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + // const serverCert = keysUtils.generateCertificate( + // serverKeyPair.publicKey, + // serverKeyPair.privateKey, + // serverKeyPair.privateKey, + // 86400, + // ); + // const serverCertPem = keysUtils.certToPem(serverCert); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // // This UTP server will hold the connection + // let secured = false; + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('secure', () => { + // secured = true; + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // expect(fwdProxy.connectionCount).toBe(0); + // await expect(() => + // fwdProxy.openConnection( + // 'somerandomnodeid' as NodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ), + // ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); + // await expect(remoteReadyP).resolves.toBeUndefined(); + // // The secure event won't be fired + // // because the connection will be ended before that happens + // expect(secured).toBe(false); + // expect(fwdProxy.connectionCount).toBe(0); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await fwdProxy.stop(); + // }); + // test('open connection success', async () => { + // const serverKeyPair = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + // const serverCert = keysUtils.generateCertificate( + // serverKeyPair.publicKey, + // serverKeyPair.privateKey, + // serverKeyPair.privateKey, + // 86400, + // ); + // const serverCertPem = keysUtils.certToPem(serverCert); + // const serverNodeId = networkUtils.certNodeId(serverCert); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + // promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // // This UTP server will hold the connection + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('secure', () => { + // resolveRemoteSecureP(); + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // expect(fwdProxy.connectionCount).toBe(0); + // await fwdProxy.openConnection( + // serverNodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // await expect(remoteReadyP).resolves.toBeUndefined(); + // await expect(remoteSecureP).resolves.toBeUndefined(); + // // Opening a duplicate connection is noop + // await fwdProxy.openConnection( + // serverNodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // expect(fwdProxy.connectionCount).toBe(1); + // await fwdProxy.closeConnection( + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // expect(fwdProxy.connectionCount).toBe(0); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await fwdProxy.stop(); + // }); + // test('connect success by opening connection first', async () => { + // const serverKeyPair = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + // const serverCert = keysUtils.generateCertificate( + // serverKeyPair.publicKey, + // serverKeyPair.privateKey, + // serverKeyPair.privateKey, + // 86400, + // ); + // const serverCertPem = keysUtils.certToPem(serverCert); + // const serverNodeId = networkUtils.certNodeId(serverCert); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + // promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // // This UTP server will hold the connection + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('secure', () => { + // resolveRemoteSecureP(); + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // await fwdProxy.openConnection( + // serverNodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // await expect(remoteReadyP).resolves.toBeUndefined(); + // await expect(remoteSecureP).resolves.toBeUndefined(); + // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); + // const clientSocket = await connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // authTokenEncoded, + // `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + // serverNodeId, + // )}`, + // ); + // expect(clientSocket).toBeInstanceOf(net.Socket); + // expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); + // expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); + // const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // clientSocket.on('close', () => { + // resolveLocalClosedP(); + // }); + // await fwdProxy.closeConnection( + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // await expect(localClosedP).resolves.toBeUndefined(); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await fwdProxy.stop(); + // }); + // test('connect success by direct connection', async () => { + // const serverKeyPair = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + // const serverCert = keysUtils.generateCertificate( + // serverKeyPair.publicKey, + // serverKeyPair.privateKey, + // serverKeyPair.privateKey, + // 86400, + // ); + // const serverCertPem = keysUtils.certToPem(serverCert); + // const serverNodeId = networkUtils.certNodeId(serverCert); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + // promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // // This UTP server will hold the connection + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('secure', () => { + // resolveRemoteSecureP(); + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); + // const clientSocket = await connect( + // fwdProxy.proxyHost, + // fwdProxy.proxyPort, + // authTokenEncoded, + // `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + // serverNodeId, + // )}`, + // ); + // await expect(remoteReadyP).resolves.toBeUndefined(); + // await expect(remoteSecureP).resolves.toBeUndefined(); + // expect(clientSocket).toBeInstanceOf(net.Socket); + // expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); + // expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); + // const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // clientSocket.on('close', () => { + // resolveLocalClosedP(); + // }); + // await fwdProxy.closeConnection( + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // await expect(localClosedP).resolves.toBeUndefined(); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await fwdProxy.stop(); + // }); + // test('stopping the proxy with open connections', async () => { + // const serverKeyPair = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + // const serverCert = keysUtils.generateCertificate( + // serverKeyPair.publicKey, + // serverKeyPair.privateKey, + // serverKeyPair.privateKey, + // 86400, + // ); + // const serverCertPem = keysUtils.certToPem(serverCert); + // const serverNodeId = networkUtils.certNodeId(serverCert); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + // promise(); + // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + // promise(); + // const utpSocket = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('secure', () => { + // resolveRemoteSecureP(); + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP(); + // } + // }; + // utpSocket.on('message', handleMessage); + // const send = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + // await utpSocketListen(0, '127.0.0.1'); + // const utpSocketHost = utpSocket.address().address; + // const utpSocketPort = utpSocket.address().port; + // expect(fwdProxy.connectionCount).toBe(0); + // await fwdProxy.openConnection( + // serverNodeId, + // utpSocketHost as Host, + // utpSocketPort as Port, + // ); + // await expect(remoteReadyP).resolves.toBeUndefined(); + // await expect(remoteSecureP).resolves.toBeUndefined(); + // expect(fwdProxy.connectionCount).toBe(1); + // await fwdProxy.stop(); + // expect(fwdProxy.connectionCount).toBe(0); + // utpSocket.off('message', handleMessage); + // utpSocket.close(); + // utpSocket.unref(); + // await expect(remoteClosedP).resolves.toBeUndefined(); + // }); + // test('open connection to multiple servers', async () => { + // // First server keys + // const serverKeyPair1 = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem1 = keysUtils.keyPairToPem(serverKeyPair1); + // const serverCert1 = keysUtils.generateCertificate( + // serverKeyPair1.publicKey, + // serverKeyPair1.privateKey, + // serverKeyPair1.privateKey, + // 86400, + // ); + // const serverCertPem1 = keysUtils.certToPem(serverCert1); + // const serverNodeId1 = networkUtils.certNodeId(serverCert1); + // // Second server keys + // const serverKeyPair2 = await keysUtils.generateKeyPair(4096); + // const serverKeyPairPem2 = keysUtils.keyPairToPem(serverKeyPair2); + // const serverCert2 = keysUtils.generateCertificate( + // serverKeyPair2.publicKey, + // serverKeyPair2.privateKey, + // serverKeyPair2.privateKey, + // 86400, + // ); + // const serverCertPem2 = keysUtils.certToPem(serverCert2); + // const serverNodeId2 = networkUtils.certNodeId(serverCert2); + // const egressHost = fwdProxy.egressHost; + // const egressPort = fwdProxy.egressPort; + // // First signals + // const { p: remoteReadyP1, resolveP: resolveRemoteReadyP1 } = + // promise(); + // const { p: remoteClosedP1, resolveP: resolveRemoteClosedP1 } = + // promise(); + // // Second signals + // const { p: remoteReadyP2, resolveP: resolveRemoteReadyP2 } = + // promise(); + // const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = + // promise(); + // const utpSocket1 = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem1, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP1(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send1(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send1(networkUtils.pingBuffer); + // }, 1000); + // await remoteReadyP1; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage1 = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send1(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP1(); + // } + // }; + // utpSocket1.on('message', handleMessage1); + // const send1 = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket1.send).bind(utpSocket1); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen1 = promisify(utpSocket1.listen).bind(utpSocket1); + // await utpSocketListen1(0, '127.0.0.1'); + // const utpSocketHost1 = utpSocket1.address().address; + // const utpSocketPort1 = utpSocket1.address().port; + // const utpSocket2 = UTP.createServer( + // { + // allowHalfOpen: false, + // }, + // async (utpConn) => { + // const tlsSocket = new tls.TLSSocket(utpConn, { + // key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), + // cert: Buffer.from(serverCertPem2, 'ascii'), + // isServer: true, + // requestCert: true, + // rejectUnauthorized: false, + // }); + // tlsSocket.on('close', () => { + // resolveRemoteClosedP2(); + // }); + // // AllowHalfOpen is buggy + // // this ends the connection in case it doesn't work + // tlsSocket.on('end', () => { + // tlsSocket.end(); + // }); + // await send2(networkUtils.pingBuffer); + // const punchInterval = setInterval(async () => { + // await send2(networkUtils.pingBuffer); + // }, 2000); + // await remoteReadyP2; + // clearInterval(punchInterval); + // } + // ); + // const handleMessage2 = async (data: Buffer) => { + // const msg = networkUtils.unserializeNetworkMessage(data); + // if (msg.type === 'ping') { + // await send2(networkUtils.pongBuffer); + // } else if (msg.type === 'pong') { + // resolveRemoteReadyP2(); + // } + // }; + // utpSocket2.on('message', handleMessage2); + // const send2 = async (data: Buffer) => { + // const utpSocketSend = promisify(utpSocket2.send).bind(utpSocket2); + // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + // }; + // const utpSocketListen2 = promisify(utpSocket2.listen).bind(utpSocket2); + // await utpSocketListen2(0, '127.0.0.1'); + // const utpSocketHost2 = utpSocket2.address().address; + // const utpSocketPort2 = utpSocket2.address().port; + // expect(fwdProxy.connectionCount).toBe(0); + // await fwdProxy.openConnection( + // serverNodeId1, + // utpSocketHost1 as Host, + // utpSocketPort1 as Port, + // ); + // await fwdProxy.openConnection( + // serverNodeId2, + // utpSocketHost2 as Host, + // utpSocketPort2 as Port, + // ); + // expect(fwdProxy.connectionCount).toBe(2); + // await expect(remoteReadyP1).resolves.toBeUndefined(); + // await expect(remoteReadyP2).resolves.toBeUndefined(); + // await fwdProxy.closeConnection( + // utpSocketHost1 as Host, + // utpSocketPort1 as Port, + // ); + // await fwdProxy.closeConnection( + // utpSocketHost2 as Host, + // utpSocketPort2 as Port, + // ); + // expect(fwdProxy.connectionCount).toBe(0); + // await expect(remoteClosedP1).resolves.toBeUndefined(); + // await expect(remoteClosedP2).resolves.toBeUndefined(); + // utpSocket1.off('message', handleMessage1); + // utpSocket1.close(); + // utpSocket1.unref(); + // utpSocket2.off('message', handleMessage2); + // utpSocket2.close(); + // utpSocket2.unref(); + // await fwdProxy.stop(); + // }); }); diff --git a/tests/network/index.test.ts b/tests/network/index.test.ts index 128aad672..9d94542cb 100644 --- a/tests/network/index.test.ts +++ b/tests/network/index.test.ts @@ -12,7 +12,7 @@ describe('network index', () => { ]); test('integration of forward and reverse proxy', async () => { // Client keys - const clientKeyPair = await keysUtils.generateKeyPair(4096); + const clientKeyPair = await keysUtils.generateKeyPair(1024); const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( clientKeyPair.publicKey, @@ -23,7 +23,7 @@ describe('network index', () => { const clientCertPem = keysUtils.certToPem(clientCert); const clientNodeId = networkUtils.certNodeId(clientCert); // Server keys - const serverKeyPair = await keysUtils.generateKeyPair(4096); + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -68,8 +68,8 @@ describe('network index', () => { host: revProxy.ingressHost, port: revProxy.ingressPort, proxyConfig: { - host: fwdProxy.proxyHost, - port: fwdProxy.proxyPort, + host: fwdProxy.getProxyHost(), + port: fwdProxy.getProxyPort(), authToken: fwdProxy.authToken, }, logger, @@ -105,29 +105,29 @@ describe('network index', () => { expect(duplexStreamResponse.value.getChallenge()).toBe(m.getChallenge()); } // Ensure that the connection count is the same - expect(fwdProxy.connectionCount).toBe(1); + expect(fwdProxy.getConnectionCount()).toBe(1); expect(revProxy.connectionCount).toBe(1); expect( fwdProxy.getConnectionInfoByIngress(client.host, client.port), ).toEqual( expect.objectContaining({ nodeId: serverNodeId, - egressHost: fwdProxy.egressHost, - egressPort: fwdProxy.egressPort, + egressHost: fwdProxy.getEgressHost(), + egressPort: fwdProxy.getEgressPort(), ingressHost: revProxy.ingressHost, ingressPort: revProxy.ingressPort, }), ); expect( revProxy.getConnectionInfoByEgress( - fwdProxy.egressHost, - fwdProxy.egressPort, + fwdProxy.getEgressHost(), + fwdProxy.getEgressPort(), ), ).toEqual( expect.objectContaining({ nodeId: clientNodeId, - egressHost: fwdProxy.egressHost, - egressPort: fwdProxy.egressPort, + egressHost: fwdProxy.getEgressHost(), + egressPort: fwdProxy.getEgressPort(), ingressHost: revProxy.ingressHost, ingressPort: revProxy.ingressPort, }), diff --git a/tests/nodes/TestNodeConnection.ts b/tests/nodes/TestNodeConnection.ts index 56c1eb4a4..953c0d106 100644 --- a/tests/nodes/TestNodeConnection.ts +++ b/tests/nodes/TestNodeConnection.ts @@ -35,8 +35,8 @@ class TestNodeConnection extends NodeConnection { }): Promise { const logger_ = logger ?? new Logger('NodeConnection'); const proxyConfig_ = { - host: forwardProxy.proxyHost, - port: forwardProxy.proxyPort, + host: forwardProxy.getProxyHost(), + port: forwardProxy.getProxyPort(), authToken: forwardProxy.authToken, } as ProxyConfig; return new TestNodeConnection({ diff --git a/tests/setup.ts b/tests/setup.ts index 5cac9b6aa..e69de29bb 100644 --- a/tests/setup.ts +++ b/tests/setup.ts @@ -1,59 +0,0 @@ -import os from 'os'; -import path from 'path'; - -declare global { - namespace NodeJS { - interface Global { - projectDir: string; - testDir: string; - keyPairDir: string; - binAgentDir: string; - binAgentPassword: string; - defaultTimeout: number; - polykeyStartupTimeout: number; - failedConnectionTimeout: number; - maxTimeout: number; - } - } -} - -/** - * Absolute directory to the project root - */ -global.projectDir = path.join(__dirname, '../'); - -/** - * Absolute directory to the test root - */ -global.testDir = __dirname; - -/** - * Absolute directory to shared keypair directory - * Generating the root key pair takes time - * This global key pair can be used by mocks - */ -global.keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - -/** - * Absolute directory to a shared data directory used by bin tests - * This has to be a static path - * The setup.ts is copied into each test module - */ -global.binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - -/** - * Shared password for agent used by for bin tests - */ -global.binAgentPassword = 'hello world'; - -/** - * Default asynchronous test timeout - */ -global.defaultTimeout = 20000; -global.polykeyStartupTimeout = 30000; -global.failedConnectionTimeout = 50000; - -/** - * Timeouts rely on setTimeout which takes 32 bit numbers - */ -global.maxTimeout = Math.pow(2, 31) - 1; diff --git a/tests/status/Status.test.ts b/tests/status/Status.test.ts index ce8f9248c..7b9e75c82 100644 --- a/tests/status/Status.test.ts +++ b/tests/status/Status.test.ts @@ -4,21 +4,21 @@ import fs from 'fs'; import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import config from '@/config'; import { sleep, errors as utilsErrors } from '@/utils'; import { Status, errors as statusErrors } from '@/status'; -describe('Lockfile is', () => { - const logger = new Logger('Lockfile Test', LogLevel.WARN, [ +describe('Status', () => { + const logger = new Logger(`${Status.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); const waitForTimeout = 1000; let dataDir: string; let status: Status; let statusPath: string; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'status-test-')); - statusPath = path.join(dataDir, 'status'); + statusPath = path.join(dataDir, config.defaults.statusBase); status = new Status({ statusPath, fs: fs, @@ -125,7 +125,7 @@ describe('Lockfile is', () => { // Try to start a new status. // Creation should succeed. const status2 = new Status({ - statusPath: path.join(dataDir, 'status'), + statusPath: path.join(dataDir, config.defaults.statusBase), fs: fs, logger: logger, }); diff --git a/tests/utils.ts b/tests/utils.ts index 65b6854e5..f1d55e41b 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -1,20 +1,184 @@ -import type Logger from '@matrixai/logger'; import type { NodeAddress } from '@/nodes/types'; +import type { StatusLive } from '@/status/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; +import lock from 'fd-lock'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyAgent } from '@'; -import * as keysUtils from '@/keys/utils'; +import { Status } from '@/status'; +import { utils as keysUtils } from '@/keys'; +import { GRPCClientClient, Metadata, utils as clientUtils } from '@/client'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import { sleep } from '@/utils'; +import config from '@/config'; -async function getGlobalKeyPair() { - const [publicKeyPem, privateKeyPem] = await Promise.all([ - fs.promises.readFile(path.join(global.keyPairDir, 'root.pub'), 'utf-8'), - fs.promises.readFile(path.join(global.keyPairDir, 'root.key'), 'utf-8'), - ]); - return keysUtils.keyPairFromPem({ - publicKey: publicKeyPem, - privateKey: privateKeyPem, +/** + * Setup the global keypair + * This is expected to be executed by multiple worker processes + */ +async function setupGlobalKeypair() { + const globalKeyPairDir = path.join(globalThis.dataDir, 'keypair'); + const globalKeyPairLock = await fs.promises.open( + path.join(global.dataDir, 'keypair.lock'), + fs.constants.O_WRONLY | fs.constants.O_CREAT, + ); + while (!lock(globalKeyPairLock.fd)) { + await sleep(1000); + } + try { + try { + await fs.promises.mkdir(globalKeyPairDir); + } catch (e) { + // Return key pair if the directory exists + if (e.code === 'EEXIST') { + const globalKeyPairPem = { + publicKey: fs.readFileSync(path.join(globalKeyPairDir, 'root.pub'), 'utf-8'), + privateKey: fs.readFileSync( + path.join(globalKeyPairDir, 'root.key'), + 'utf-8', + ), + }; + const globalKeyPair = keysUtils.keyPairFromPem(globalKeyPairPem); + return globalKeyPair; + } + } + const globalKeyPair = await keysUtils.generateKeyPair(4096); + const globalKeyPairPem = keysUtils.keyPairToPem(globalKeyPair); + await Promise.all([ + fs.promises.writeFile( + path.join(globalKeyPairDir, 'root.pub'), + globalKeyPairPem.publicKey, + 'utf-8', + ), + fs.promises.writeFile( + path.join(globalKeyPairDir, 'root.key'), + globalKeyPairPem.privateKey, + 'utf-8', + ), + ]); + return globalKeyPair; + } finally { + // Unlock when we have returned the keypair + lock.unlock(globalKeyPairLock.fd); + await globalKeyPairLock.close(); + } +} + +/** + * Setup the global agent + * Use this in beforeAll, and use the closeGlobalAgent in afterAll + * This is expected to be executed by multiple worker processes + * Uses a references directory as a reference count + * Uses fd-lock to serialise access + * This means all test modules using this will be serialised + * Any beforeAll must use global.maxTimeout + * Tips for usage: + * * Do not restart this global agent + * * Ensure client-side side-effects are removed at the end of each test + * * Ensure server-side side-effects are removed at the end of each test + */ +async function setupGlobalAgent( + logger: Logger = new Logger(setupGlobalAgent.name, LogLevel.WARN, [ + new StreamHandler(), + ]) +) { + const globalAgentPassword = 'password'; + const globalAgentDir = path.join(globalThis.dataDir, 'agent'); + // The references directory will act like our reference count + await fs.promises.mkdir(path.join(globalAgentDir, 'references'), { + recursive: true, }); + const pid = process.pid.toString(); + // Plus 1 to the reference count + await fs.promises.writeFile(path.join(globalAgentDir, 'references', pid), ''); + const globalAgentLock = await fs.promises.open( + path.join(global.dataDir, 'agent.lock'), + fs.constants.O_WRONLY | fs.constants.O_CREAT, + ); + while (!lock(globalAgentLock.fd)) { + await sleep(1000); + } + const status = new Status({ + statusPath: path.join( + globalAgentDir, + config.defaults.statusBase + ), + fs, + }); + let statusInfo = await status.readStatus(); + if (statusInfo == null || statusInfo.status === 'DEAD') { + await PolykeyAgent.createPolykeyAgent({ + password: globalAgentPassword, + nodePath: globalAgentDir, + keysConfig: { + rootKeyPairBits: 1024 + }, + logger, + }); + statusInfo = await status.readStatus(); + } + return { + globalAgentDir, + globalAgentPassword, + globalAgentStatus: statusInfo as StatusLive, + globalAgentClose: async () => { + // Closing the global agent cannot be done in the globalTeardown + // This is due to a sequence of reasons: + // 1. The global agent is not started as a separate process + // 2. Because we need to be able to mock dependencies + // 3. This means it is part of a jest worker process + // 4. Which will block termination of the jest worker process + // 5. Therefore globalTeardown will never get to execute + // 6. The global agent is not part of globalSetup + // 7. Because not all tests need the global agent + // 8. Therefore setupGlobalAgent is lazy and executed by jest worker processes + try { + await fs.promises.rm( + path.join(globalAgentDir, 'references', pid), + ); + // If the references directory is not empty + // there are other processes still using the global agent + try { + await fs.promises.rmdir(path.join(globalAgentDir, 'references')); + } catch (e) { + if (e.code === 'ENOTEMPTY') { + return; + } + throw e; + } + // Stopping may occur in a different jest worker process + // therefore we cannot rely on pkAgent, but instead use GRPC + const statusInfo = await status.readStatus() as StatusLive; + const grpcClient = await GRPCClientClient.createGRPCClientClient({ + nodeId: statusInfo.data.nodeId, + host: statusInfo.data.clientHost, + port: statusInfo.data.clientPort, + tlsConfig: { keyPrivatePem: undefined, certChainPem: undefined }, + logger + }); + const emptyMessage = new utilsPB.EmptyMessage(); + const meta = clientUtils.encodeAuthFromPassword(globalAgentPassword); + // This is asynchronous + await grpcClient.agentStop(emptyMessage, meta); + await grpcClient.destroy(); + await status.waitFor('DEAD'); + } finally { + lock.unlock(globalAgentLock.fd); + await globalAgentLock.close(); + } + } + }; +} + +function makeCrypto(dbKey: Buffer) { + return { + key: dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }; } /** @@ -67,25 +231,16 @@ async function addRemoteDetails( ) { // Add remote node's details to local node await localNode.nodeManager.setNode(remoteNode.nodeManager.getNodeId(), { - host: remoteNode.revProxy.ingressHost, - port: remoteNode.revProxy.ingressPort, + host: remoteNode.revProxy.getIngressHost(), + port: remoteNode.revProxy.getIngressPort(), } as NodeAddress); } -function makeCrypto(dbKey: Buffer) { - return { - key: dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }; -} - export { - getGlobalKeyPair, + setupGlobalKeypair, + setupGlobalAgent, + makeCrypto, setupRemoteKeynode, cleanupRemoteKeynode, addRemoteDetails, - makeCrypto, }; diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 242431e18..6cb6d6280 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -3,14 +3,16 @@ import os from 'os'; import path from 'path'; import { EncryptedFS } from 'encryptedfs'; -import Logger, { LogLevel } from '@matrixai/logger'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { IdRandom } from '@matrixai/id'; import * as utils from '@/utils'; import * as vaultsUtils from '@/vaults/utils'; import { isVaultId } from '@/vaults/utils'; describe('Vaults utils', () => { - const logger = new Logger('Vaults utils tests', LogLevel.WARN); + const logger = new Logger('Vaults utils tests', LogLevel.WARN, [ + new StreamHandler(), + ]); let dataDir: string; beforeEach(async () => { From 06cb29c0e72a3cbf833d7c1b217967ca80a38843 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Fri, 17 Dec 2021 21:08:19 +1100 Subject: [PATCH 07/28] Splitting the client and agent service RPC handlers --- src/PolykeyAgent.ts | 8 +- src/agent/GRPCClientAgent.ts | 8 +- src/agent/agentService.ts | 471 ------------ src/agent/index.ts | 5 +- src/agent/service/echo.ts | 15 + src/agent/service/index.ts | 50 ++ src/agent/service/nodesChainDataGet.ts | 48 ++ src/agent/service/nodesClaimsGet.ts | 22 + .../service/nodesClosestLocalNodesGet.ts | 41 ++ src/agent/service/nodesCrossSignClaim.ts | 156 ++++ .../service/nodesHolePunchMessageSend.ts | 44 ++ src/agent/service/notificationsSend.ts | 39 + src/agent/service/vaultsGitInfoGet.ts | 53 ++ src/agent/service/vaultsGitPackGet.ts | 73 ++ src/agent/service/vaultsPermissionsCheck.ts | 35 + src/agent/service/vaultsScan.ts | 33 + src/bin/agent/CommandLockAll.ts | 2 +- src/bin/agent/CommandUnlock.ts | 2 +- src/client/GRPCClientClient.ts | 12 +- src/client/clientService.ts | 143 ---- src/client/index.ts | 11 +- src/client/rpcGestalts.ts | 295 -------- src/client/rpcIdentities.ts | 269 ------- src/client/rpcKeys.ts | 249 ------- src/client/rpcNodes.ts | 155 ---- src/client/rpcNotifications.ts | 123 ---- src/client/rpcSessions.ts | 49 -- src/client/rpcStatus.ts | 60 -- src/client/rpcVaults.ts | 690 ------------------ src/client/service/agentLockAll.ts | 32 + src/client/service/agentStatus.ts | 58 ++ src/client/service/agentStop.ts | 38 + src/client/service/agentUnlock.ts | 28 + .../service/gestaltsActionsGetByIdentity.ts | 48 ++ .../service/gestaltsActionsGetByNode.ts | 45 ++ .../service/gestaltsActionsSetByIdentity.ts | 44 ++ .../service/gestaltsActionsSetByNode.ts | 39 + .../service/gestaltsActionsUnsetByIdentity.ts | 44 ++ .../service/gestaltsActionsUnsetByNode.ts | 39 + .../service/gestaltsDiscoveryByIdentity.ts | 42 ++ src/client/service/gestaltsDiscoveryByNode.ts | 41 ++ .../service/gestaltsGestaltGetByIdentity.ts | 40 + .../service/gestaltsGestaltGetByNode.ts | 39 + src/client/service/gestaltsGestaltList.ts | 39 + src/client/service/identitiesAuthenticate.ts | 64 ++ src/client/service/identitiesClaim.ts | 65 ++ src/client/service/identitiesInfoGet.ts | 49 ++ .../service/identitiesInfoGetConnected.ts | 61 ++ src/client/service/identitiesProvidersList.ts | 36 + src/client/service/identitiesTokenDelete.ts | 37 + src/client/service/identitiesTokenGet.ts | 37 + src/client/service/identitiesTokenPut.ts | 43 ++ src/client/service/index.ts | 186 +++++ src/client/service/keysCertsChainGet.ts | 39 + src/client/service/keysCertsGet.ts | 34 + src/client/service/keysDecrypt.ts | 35 + src/client/service/keysEncrypt.ts | 35 + src/client/service/keysKeyPairRenew.ts | 58 ++ src/client/service/keysKeyPairReset.ts | 58 ++ src/client/service/keysKeyPairRoot.ts | 35 + src/client/service/keysPasswordChange.ts | 34 + src/client/service/keysSign.ts | 36 + src/client/service/keysVerify.ts | 37 + src/client/service/nodesAdd.ts | 58 ++ src/client/service/nodesClaim.ts | 59 ++ src/client/service/nodesFind.ts | 45 ++ src/client/service/nodesPing.ts | 40 + src/client/service/notificationsClear.ts | 33 + src/client/service/notificationsRead.ts | 73 ++ src/client/service/notificationsSend.ts | 42 ++ src/client/service/vaultsClone.ts | 49 ++ src/client/service/vaultsCreate.ts | 42 ++ src/client/service/vaultsDelete.ts | 51 ++ src/client/service/vaultsList.ts | 46 ++ src/client/service/vaultsLog.ts | 69 ++ src/client/service/vaultsPermissions.ts | 57 ++ src/client/service/vaultsPermissionsSet.ts | 45 ++ src/client/service/vaultsPermissionsUnset.ts | 45 ++ src/client/service/vaultsPull.ts | 47 ++ src/client/service/vaultsRename.ts | 55 ++ src/client/service/vaultsScan.ts | 42 ++ src/client/service/vaultsSecretsDelete.ts | 58 ++ src/client/service/vaultsSecretsEdit.ts | 64 ++ src/client/service/vaultsSecretsGet.ts | 58 ++ src/client/service/vaultsSecretsList.ts | 59 ++ src/client/service/vaultsSecretsMkdir.ts | 60 ++ src/client/service/vaultsSecretsNew.ts | 59 ++ src/client/service/vaultsSecretsNewDir.ts | 61 ++ src/client/service/vaultsSecretsRename.ts | 63 ++ src/client/service/vaultsSecretsStat.ts | 37 + src/client/service/vaultsVersion.ts | 70 ++ src/client/types.ts | 10 + src/client/utils/utils.ts | 8 +- src/config.ts | 1 - src/grpc/GRPCServer.ts | 2 +- src/grpc/index.ts | 6 + .../js/polykey/v1/agent_service_grpc_pb.d.ts | 20 +- .../js/polykey/v1/agent_service_grpc_pb.js | 4 +- .../js/polykey/v1/client_service_grpc_pb.d.ts | 54 +- .../js/polykey/v1/client_service_grpc_pb.js | 29 +- .../schemas/polykey/v1/agent_service.proto | 2 +- .../schemas/polykey/v1/client_service.proto | 6 +- tests/acl/ACL.test.ts | 4 +- tests/agent/GRPCClientAgent.test.ts | 8 +- tests/bootstrap/utils.test.ts | 7 +- tests/claims/utils.test.ts | 10 +- tests/client/GRPCClientClient.test.ts | 28 +- tests/client/rpcAgent.test.ts | 124 ---- tests/client/rpcGestalts.test.ts | 32 +- tests/client/rpcIdentities.test.ts | 36 +- tests/client/rpcKeys.test.ts | 52 +- tests/client/rpcNodes.test.ts | 56 +- tests/client/rpcNotifications.test.ts | 47 +- tests/client/rpcSessions.test.ts | 20 +- tests/client/rpcVaults.test.ts | 28 +- tests/client/service/agentStop.test.ts | 167 +++++ tests/client/utils.ts | 38 +- tests/grpc/GRPCServer.test.ts | 58 +- tests/utils.ts | 2 +- 119 files changed, 4124 insertions(+), 2978 deletions(-) delete mode 100644 src/agent/agentService.ts create mode 100644 src/agent/service/echo.ts create mode 100644 src/agent/service/index.ts create mode 100644 src/agent/service/nodesChainDataGet.ts create mode 100644 src/agent/service/nodesClaimsGet.ts create mode 100644 src/agent/service/nodesClosestLocalNodesGet.ts create mode 100644 src/agent/service/nodesCrossSignClaim.ts create mode 100644 src/agent/service/nodesHolePunchMessageSend.ts create mode 100644 src/agent/service/notificationsSend.ts create mode 100644 src/agent/service/vaultsGitInfoGet.ts create mode 100644 src/agent/service/vaultsGitPackGet.ts create mode 100644 src/agent/service/vaultsPermissionsCheck.ts create mode 100644 src/agent/service/vaultsScan.ts delete mode 100644 src/client/clientService.ts delete mode 100644 src/client/rpcGestalts.ts delete mode 100644 src/client/rpcIdentities.ts delete mode 100644 src/client/rpcKeys.ts delete mode 100644 src/client/rpcNodes.ts delete mode 100644 src/client/rpcNotifications.ts delete mode 100644 src/client/rpcSessions.ts delete mode 100644 src/client/rpcStatus.ts delete mode 100644 src/client/rpcVaults.ts create mode 100644 src/client/service/agentLockAll.ts create mode 100644 src/client/service/agentStatus.ts create mode 100644 src/client/service/agentStop.ts create mode 100644 src/client/service/agentUnlock.ts create mode 100644 src/client/service/gestaltsActionsGetByIdentity.ts create mode 100644 src/client/service/gestaltsActionsGetByNode.ts create mode 100644 src/client/service/gestaltsActionsSetByIdentity.ts create mode 100644 src/client/service/gestaltsActionsSetByNode.ts create mode 100644 src/client/service/gestaltsActionsUnsetByIdentity.ts create mode 100644 src/client/service/gestaltsActionsUnsetByNode.ts create mode 100644 src/client/service/gestaltsDiscoveryByIdentity.ts create mode 100644 src/client/service/gestaltsDiscoveryByNode.ts create mode 100644 src/client/service/gestaltsGestaltGetByIdentity.ts create mode 100644 src/client/service/gestaltsGestaltGetByNode.ts create mode 100644 src/client/service/gestaltsGestaltList.ts create mode 100644 src/client/service/identitiesAuthenticate.ts create mode 100644 src/client/service/identitiesClaim.ts create mode 100644 src/client/service/identitiesInfoGet.ts create mode 100644 src/client/service/identitiesInfoGetConnected.ts create mode 100644 src/client/service/identitiesProvidersList.ts create mode 100644 src/client/service/identitiesTokenDelete.ts create mode 100644 src/client/service/identitiesTokenGet.ts create mode 100644 src/client/service/identitiesTokenPut.ts create mode 100644 src/client/service/index.ts create mode 100644 src/client/service/keysCertsChainGet.ts create mode 100644 src/client/service/keysCertsGet.ts create mode 100644 src/client/service/keysDecrypt.ts create mode 100644 src/client/service/keysEncrypt.ts create mode 100644 src/client/service/keysKeyPairRenew.ts create mode 100644 src/client/service/keysKeyPairReset.ts create mode 100644 src/client/service/keysKeyPairRoot.ts create mode 100644 src/client/service/keysPasswordChange.ts create mode 100644 src/client/service/keysSign.ts create mode 100644 src/client/service/keysVerify.ts create mode 100644 src/client/service/nodesAdd.ts create mode 100644 src/client/service/nodesClaim.ts create mode 100644 src/client/service/nodesFind.ts create mode 100644 src/client/service/nodesPing.ts create mode 100644 src/client/service/notificationsClear.ts create mode 100644 src/client/service/notificationsRead.ts create mode 100644 src/client/service/notificationsSend.ts create mode 100644 src/client/service/vaultsClone.ts create mode 100644 src/client/service/vaultsCreate.ts create mode 100644 src/client/service/vaultsDelete.ts create mode 100644 src/client/service/vaultsList.ts create mode 100644 src/client/service/vaultsLog.ts create mode 100644 src/client/service/vaultsPermissions.ts create mode 100644 src/client/service/vaultsPermissionsSet.ts create mode 100644 src/client/service/vaultsPermissionsUnset.ts create mode 100644 src/client/service/vaultsPull.ts create mode 100644 src/client/service/vaultsRename.ts create mode 100644 src/client/service/vaultsScan.ts create mode 100644 src/client/service/vaultsSecretsDelete.ts create mode 100644 src/client/service/vaultsSecretsEdit.ts create mode 100644 src/client/service/vaultsSecretsGet.ts create mode 100644 src/client/service/vaultsSecretsList.ts create mode 100644 src/client/service/vaultsSecretsMkdir.ts create mode 100644 src/client/service/vaultsSecretsNew.ts create mode 100644 src/client/service/vaultsSecretsNewDir.ts create mode 100644 src/client/service/vaultsSecretsRename.ts create mode 100644 src/client/service/vaultsSecretsStat.ts create mode 100644 src/client/service/vaultsVersion.ts create mode 100644 src/client/types.ts delete mode 100644 tests/client/rpcAgent.test.ts create mode 100644 tests/client/service/agentStop.test.ts diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 23dd2eba9..f841f28cc 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -324,7 +324,7 @@ class PolykeyAgent { await status?.stop({}); throw e; } - const polykeyAgent = new PolykeyAgent({ + const pkAgent = new PolykeyAgent({ nodePath, status, schema, @@ -346,13 +346,13 @@ class PolykeyAgent { fs, logger, }); - await polykeyAgent.start({ + await pkAgent.start({ password, networkConfig, fresh, }); logger.info(`Created ${this.name}`); - return polykeyAgent; + return pkAgent; } public readonly nodePath: string; @@ -467,7 +467,7 @@ class PolykeyAgent { notificationsManager: this.notificationsManager, }); const clientService = createClientService({ - polykeyAgent: this, + pkAgent: this, discovery: this.discovery, gestaltGraph: this.gestaltGraph, identitiesManager: this.identitiesManager, diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index 6af1e3534..19f63f3cb 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -42,7 +42,6 @@ class GRPCClientAgent extends GRPCClient { timeout?: number; logger?: Logger; }): Promise { - logger.info(`Creating ${this.name}`); const { client, serverCertChain, flowCountInterceptor } = await super.createClient({ clientConstructor: AgentServiceClient, @@ -65,14 +64,11 @@ class GRPCClientAgent extends GRPCClient { flowCountInterceptor, logger, }); - logger.info(`Created ${this.name}`); return grpcClientAgent; } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready(new agentErrors.ErrorAgentClientDestroyed()) @@ -157,10 +153,10 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsPermisssionsCheck(...args) { + public vaultsPermissionsCheck(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermisssionsCheck, + this.client.vaultsPermissionsCheck, )(...args); } diff --git a/src/agent/agentService.ts b/src/agent/agentService.ts deleted file mode 100644 index 97811084f..000000000 --- a/src/agent/agentService.ts +++ /dev/null @@ -1,471 +0,0 @@ -import type { - ClaimEncoded, - ClaimIntermediary, - ClaimIdString, -} from '../claims/types'; -import type { VaultName } from '../vaults/types'; - -import type { NodeManager } from '../nodes'; -import type { VaultManager } from '../vaults'; -import type { Sigchain } from '../sigchain'; -import type { KeyManager } from '../keys'; -import type { NotificationsManager } from '../notifications'; -import type { IAgentServiceServer } from '../proto/js/polykey/v1/agent_service_grpc_pb'; -import type * as notificationsPB from '../proto/js/polykey/v1/notifications/notifications_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { promisify } from '../utils'; -import * as networkUtils from '../network/utils'; -import { ErrorGRPC } from '../grpc/errors'; -import { AgentServiceService } from '../proto/js/polykey/v1/agent_service_grpc_pb'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import * as grpcUtils from '../grpc/utils'; -import { - utils as notificationsUtils, - errors as notificationsErrors, -} from '../notifications'; -import { errors as vaultsErrors } from '../vaults'; -import { utils as claimsUtils, errors as claimsErrors } from '../claims'; -import { makeVaultId, makeVaultIdPretty } from '../vaults/utils'; -import { makeNodeId } from '../nodes/utils'; - -/** - * Creates the client service for use with a GRPCServer - * @param domains An object representing all the domains / managers the agent server uses. - * @returns an IAgentServer object - */ -function createAgentService({ - keyManager, - vaultManager, - nodeManager, - notificationsManager, - sigchain, -}: { - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - sigchain: Sigchain; - notificationsManager: NotificationsManager; -}): IAgentServiceServer { - const agentService: IAgentServiceServer = { - echo: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EchoMessage(); - response.setChallenge(call.request.getChallenge()); - callback(null, response); - }, - vaultsGitInfoGet: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - const request = call.request; - const vaultNameOrId = request.getNameOrId(); - let vaultId, vaultName; - try { - vaultId = makeVaultId(idUtils.fromString(vaultNameOrId)); - await vaultManager.openVault(vaultId); - vaultName = await vaultManager.getVaultName(vaultId); - } catch (err) { - if (err instanceof vaultsErrors.ErrorVaultUndefined) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - vaultName = vaultNameOrId; - } else { - throw err; - } - } - // TODO: Check the permissions here - const meta = new grpc.Metadata(); - meta.set('vaultName', vaultName); - meta.set('vaultId', makeVaultIdPretty(vaultId)); - genWritable.stream.sendMetadata(meta); - const response = new vaultsPB.PackChunk(); - const responseGen = vaultManager.handleInfoRequest(vaultId); - for await (const byte of responseGen) { - if (byte !== null) { - response.setChunk(byte); - await genWritable.next(response); - } else { - await genWritable.next(null); - } - } - await genWritable.next(null); - }, - vaultsGitPackGet: async ( - call: grpc.ServerDuplexStream, - ) => { - const write = promisify(call.write).bind(call); - const clientBodyBuffers: Buffer[] = []; - call.on('data', (d) => { - clientBodyBuffers.push(d.getChunk_asU8()); - }); - - call.on('end', async () => { - const body = Buffer.concat(clientBodyBuffers); - const meta = call.metadata; - const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); - if (vaultNameOrId == null) - throw new ErrorGRPC('vault-name not in metadata.'); - let vaultId; - try { - vaultId = makeVaultId(vaultNameOrId); - await vaultManager.openVault(vaultId); - } catch (err) { - if ( - err instanceof vaultsErrors.ErrorVaultUndefined || - err instanceof SyntaxError - ) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - } else { - throw err; - } - } - // TODO: Check the permissions here - const response = new vaultsPB.PackChunk(); - const [sideBand, progressStream] = await vaultManager.handlePackRequest( - vaultId, - Buffer.from(body), - ); - response.setChunk(Buffer.from('0008NAK\n')); - await write(response); - const responseBuffers: Buffer[] = []; - await new Promise((resolve, reject) => { - sideBand.on('data', async (data: Buffer) => { - responseBuffers.push(data); - }); - sideBand.on('end', async () => { - response.setChunk(Buffer.concat(responseBuffers)); - await write(response); - resolve(); - }); - sideBand.on('error', (err) => { - reject(err); - }); - progressStream.write(Buffer.from('0014progress is at 50%\n')); - progressStream.end(); - }); - call.end(); - }); - }, - vaultsScan: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - // Const response = new vaultsPB.Vault(); - // const id = makeNodeId(call.request.getNodeId()); - try { - throw Error('Not implemented'); - // FIXME: handleVaultNamesRequest doesn't exist. - // const listResponse = vaultManager.handleVaultNamesRequest(id); - // let listResponse; - // for await (const vault of listResponse) { - // if (vault !== null) { - // response.setNameOrId(vault); - // await genWritable.next(response); - // } else { - // await genWritable.next(null); - // } - // } - // await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); - } - }, - /** - * Retrieves the local nodes (i.e. from the current node) that are closest - * to some provided node ID. - * @param call call that encodes a nodeId representing the target search node. - * @param callback - */ - nodesClosestLocalNodesGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.NodeTable(); - try { - const targetNodeId = makeNodeId(call.request.getNodeId()); - // Get all local nodes that are closest to the target node from the request - const closestNodes = await nodeManager.getClosestLocalNodes( - targetNodeId, - ); - for (const node of closestNodes) { - const addressMessage = new nodesPB.Address(); - addressMessage.setHost(node.address.host); - addressMessage.setPort(node.address.port); - // Add the node to the response's map (mapping of node ID -> node address) - response.getNodeTableMap().set(node.id, addressMessage); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - /** - * Retrieves all claims (of a specific type) of this node (within its sigchain). - * TODO: Currently not required. Will need to refactor once we filter on what - * claims we desire from the sigchain (e.g. in discoverGestalt). - */ - nodesClaimsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.Claims(); - // Response.setClaimsList( - // await sigchain.getClaims(call.request.getClaimtype() as ClaimType) - // ); - callback(null, response); - }, - /** - * Retrieves the ChainDataEncoded of this node. - */ - nodesChainDataGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.ChainData(); - try { - const chainData = await nodeManager.getChainData(); - // Iterate through each claim in the chain, and serialize for transport - for (const c in chainData) { - const claimId = c as ClaimIdString; - const claim = chainData[claimId]; - const claimMessage = new nodesPB.AgentClaim(); - // Will always have a payload (never undefined) so cast as string - claimMessage.setPayload(claim.payload as string); - // Add the signatures - for (const signatureData of claim.signatures) { - const signature = new nodesPB.Signature(); - // Will always have a protected header (never undefined) so cast as string - signature.setProtected(signatureData.protected as string); - signature.setSignature(signatureData.signature); - claimMessage.getSignaturesList().push(signature); - } - // Add the serialized claim - response.getChainDataMap().set(claimId, claimMessage); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - nodesHolePunchMessageSend: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Firstly, check if this node is the desired node - // If so, then we want to make this node start sending hole punching packets - // back to the source node. - if ( - nodeManager.getNodeId() === makeNodeId(call.request.getTargetId()) - ) { - const [host, port] = networkUtils.parseAddress( - call.request.getEgressAddress(), - ); - await nodeManager.openConnection(host, port); - // Otherwise, find if node in table - // If so, ask the nodeManager to relay to the node - } else if ( - await nodeManager.knowsNode(makeNodeId(call.request.getSrcId())) - ) { - await nodeManager.relayHolePunchMessage(call.request); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - notificationsSend: async ( - call: grpc.ServerUnaryCall< - notificationsPB.AgentNotification, - utilsPB.EmptyMessage - >, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const jwt = call.request.getContent(); - const notification = await notificationsUtils.verifyAndDecodeNotif(jwt); - await notificationsManager.receiveNotification(notification); - } catch (err) { - if (err instanceof notificationsErrors.ErrorNotifications) { - callback(grpcUtils.fromError(err), response); - } else { - throw err; - } - } - callback(null, response); - }, - vaultsPermisssionsCheck: async ( - call: grpc.ServerUnaryCall< - vaultsPB.NodePermission, - vaultsPB.NodePermissionAllowed - >, - callback: grpc.sendUnaryData, - ): Promise => { - // Const response = new vaultsPB.NodePermissionAllowed(); - try { - // Const nodeId = makeNodeId(call.request.getNodeId()); - // const vaultId = makeVaultId(call.request.getVaultId()); - throw Error('Not Implemented'); - // FIXME: getVaultPermissions not implemented. - // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); - // let result; - // if (result[nodeId] === undefined) { - // response.setPermission(false); - // } else if (result[nodeId]['pull'] === undefined) { - // response.setPermission(false); - // } else { - // response.setPermission(true); - // } - // callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - } - }, - nodesCrossSignClaim: async ( - call: grpc.ServerDuplexStream, - ) => { - // TODO: Move all "await genClaims.throw" to a final catch(). Wrap this - // entire thing in a try block. And re-throw whatever error is caught - const genClaims = grpcUtils.generatorDuplex(call); - try { - await sigchain.transaction(async (sigchain) => { - const readStatus = await genClaims.read(); - // If nothing to read, end and destroy - if (readStatus.done) { - throw new claimsErrors.ErrorEmptyStream(); - } - const receivedMessage = readStatus.value; - const intermediaryClaimMessage = - receivedMessage.getSinglySignedClaim(); - if (!intermediaryClaimMessage) { - throw new claimsErrors.ErrorUndefinedSinglySignedClaim(); - } - const intermediarySignature = intermediaryClaimMessage.getSignature(); - if (!intermediarySignature) { - throw new claimsErrors.ErrorUndefinedSignature(); - } - - // 3. X --> responds with double signing the Y signed claim, and also --> Y - // bundles it with its own signed claim (intermediate) - // Reconstruct the claim to verify its signature - const constructedIntermediaryClaim: ClaimIntermediary = { - payload: intermediaryClaimMessage.getPayload(), - signature: { - protected: intermediarySignature.getProtected(), - signature: intermediarySignature.getSignature(), - }, - }; - // Get the sender's node ID from the claim - const constructedEncodedClaim: ClaimEncoded = { - payload: intermediaryClaimMessage.getPayload(), - signatures: [ - { - protected: intermediarySignature.getProtected(), - signature: intermediarySignature.getSignature(), - }, - ], - }; - const decodedClaim = claimsUtils.decodeClaim(constructedEncodedClaim); - const payloadData = decodedClaim.payload.data; - if (payloadData.type !== 'node') { - throw new claimsErrors.ErrorNodesClaimType(); - } - // Verify the claim - const senderPublicKey = await nodeManager.getPublicKey( - payloadData.node1, - ); - const verified = await claimsUtils.verifyClaimSignature( - constructedEncodedClaim, - senderPublicKey, - ); - if (!verified) { - throw new claimsErrors.ErrorSinglySignedClaimVerificationFailed(); - } - // If verified, add your own signature to the received claim - const doublySignedClaim = await claimsUtils.signIntermediaryClaim({ - claim: constructedIntermediaryClaim, - privateKey: keyManager.getRootKeyPairPem().privateKey, - signeeNodeId: nodeManager.getNodeId(), - }); - // Then create your own intermediary node claim (from X -> Y) - const singlySignedClaim = await sigchain.createIntermediaryClaim({ - type: 'node', - node1: nodeManager.getNodeId(), - node2: payloadData.node1, - }); - // Should never be reached, but just for type safety - if (!doublySignedClaim.payload || !singlySignedClaim.payload) { - throw new claimsErrors.ErrorClaimsUndefinedClaimPayload(); - } - // Write both these claims to a message to send - const crossSignMessage = claimsUtils.createCrossSignMessage({ - singlySignedClaim, - doublySignedClaim, - }); - await genClaims.write(crossSignMessage); - // 4. We expect to receive our singly signed claim we sent to now be a - // doubly signed claim (signed by the other node). - const responseStatus = await genClaims.read(); - if (responseStatus.done) { - throw new claimsErrors.ErrorEmptyStream(); - } - const receivedResponse = responseStatus.value; - const receivedDoublySignedClaimMessage = - receivedResponse.getDoublySignedClaim(); - if (!receivedDoublySignedClaimMessage) { - throw new claimsErrors.ErrorUndefinedDoublySignedClaim(); - } - // Reconstruct the expected object from message - const constructedDoublySignedClaim: ClaimEncoded = { - payload: receivedDoublySignedClaimMessage.getPayload(), - signatures: receivedDoublySignedClaimMessage - .getSignaturesList() - .map((sMsg) => { - return { - protected: sMsg.getProtected(), - signature: sMsg.getSignature(), - }; - }), - }; - // Verify the doubly signed claim with both our public key, and the sender's - const verifiedDoubly = - (await claimsUtils.verifyClaimSignature( - constructedDoublySignedClaim, - keyManager.getRootKeyPairPem().publicKey, - )) && - (await claimsUtils.verifyClaimSignature( - constructedDoublySignedClaim, - senderPublicKey, - )); - if (!verifiedDoubly) { - await genClaims.throw( - new claimsErrors.ErrorDoublySignedClaimVerificationFailed(), - ); - } - // If verified, then we can safely add to our sigchain - await sigchain.addExistingClaim(constructedDoublySignedClaim); - // Close the stream - await genClaims.next(null); - }); - } catch (e) { - await genClaims.throw(e); - // TODO: Handle the exception on this server - throw e? - // throw e; - } - }, - }; - - return agentService; -} - -export default createAgentService; - -export { AgentServiceService }; diff --git a/src/agent/index.ts b/src/agent/index.ts index 8a9bebd20..f45d230fe 100644 --- a/src/agent/index.ts +++ b/src/agent/index.ts @@ -1,6 +1,3 @@ -export { - default as createAgentService, - AgentServiceService, -} from './agentService'; +export { default as createAgentService, AgentServiceService } from './service'; export { default as GRPCClientAgent } from './GRPCClientAgent'; export * as errors from './errors'; diff --git a/src/agent/service/echo.ts b/src/agent/service/echo.ts new file mode 100644 index 000000000..45b8f0279 --- /dev/null +++ b/src/agent/service/echo.ts @@ -0,0 +1,15 @@ +import type * as grpc from '@grpc/grpc-js'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function echo(_) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EchoMessage(); + response.setChallenge(call.request.getChallenge()); + callback(null, response); + }; +} + +export default echo; diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts new file mode 100644 index 000000000..dbe5b3826 --- /dev/null +++ b/src/agent/service/index.ts @@ -0,0 +1,50 @@ +import type { KeyManager } from '../../keys'; +import type { VaultManager } from '../../vaults'; +import type { NodeManager } from '../../nodes'; +import type { NotificationsManager } from '../../notifications'; +import type { Sigchain } from '../../sigchain'; +import type { IAgentServiceServer } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import echo from './echo'; +import nodesChainDataGet from './nodesChainDataGet'; +import nodesClaimsGet from './nodesClaimsGet'; +import nodesClosestLocalNodesGet from './nodesClosestLocalNodesGet'; +import nodesCrossSignClaim from './nodesCrossSignClaim'; +import nodesHolePunchMessageSend from './nodesHolePunchMessageSend'; +import notificationsSend from './notificationsSend'; +import vaultsGitInfoGet from './vaultsGitInfoGet'; +import vaultsGitPackGet from './vaultsGitPackGet'; +import vaultsPermissionsCheck from './vaultsPermissionsCheck'; +import vaultsScan from './vaultsScan'; +import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; + +function createService ( + container: { + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + notificationsManager: NotificationsManager; + sigchain: Sigchain; + } +) { + const container_ = { + ...container + }; + const service: IAgentServiceServer = { + echo: echo(container_), + nodesChainDataGet: nodesChainDataGet(container_), + nodesClaimsGet: nodesClaimsGet(container_), + nodesClosestLocalNodesGet: nodesClosestLocalNodesGet(container_), + nodesCrossSignClaim: nodesCrossSignClaim(container_), + nodesHolePunchMessageSend: nodesHolePunchMessageSend(container_), + notificationsSend: notificationsSend(container_), + vaultsGitInfoGet: vaultsGitInfoGet(container_), + vaultsGitPackGet: vaultsGitPackGet(container_), + vaultsPermissionsCheck: vaultsPermissionsCheck(container_), + vaultsScan: vaultsScan(container_), + }; + return service; +} + +export default createService; + +export { AgentServiceService }; diff --git a/src/agent/service/nodesChainDataGet.ts b/src/agent/service/nodesChainDataGet.ts new file mode 100644 index 000000000..d161f9a01 --- /dev/null +++ b/src/agent/service/nodesChainDataGet.ts @@ -0,0 +1,48 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { ClaimIdString, } from '../../claims/types'; +import type { NodeManager } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Retrieves the ChainDataEncoded of this node. + */ +function nodesChainDataGet({ + nodeManager, +}: { + nodeManager: NodeManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.ChainData(); + try { + const chainData = await nodeManager.getChainData(); + // Iterate through each claim in the chain, and serialize for transport + for (const c in chainData) { + const claimId = c as ClaimIdString; + const claim = chainData[claimId]; + const claimMessage = new nodesPB.AgentClaim(); + // Will always have a payload (never undefined) so cast as string + claimMessage.setPayload(claim.payload as string); + // Add the signatures + for (const signatureData of claim.signatures) { + const signature = new nodesPB.Signature(); + // Will always have a protected header (never undefined) so cast as string + signature.setProtected(signatureData.protected as string); + signature.setSignature(signatureData.signature); + claimMessage.getSignaturesList().push(signature); + } + // Add the serialized claim + response.getChainDataMap().set(claimId, claimMessage); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesChainDataGet; diff --git a/src/agent/service/nodesClaimsGet.ts b/src/agent/service/nodesClaimsGet.ts new file mode 100644 index 000000000..920555388 --- /dev/null +++ b/src/agent/service/nodesClaimsGet.ts @@ -0,0 +1,22 @@ +import type * as grpc from '@grpc/grpc-js'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Retrieves all claims (of a specific type) of this node (within its sigchain). + * TODO: Currently not required. Will need to refactor once we filter on what + * claims we desire from the sigchain (e.g. in discoverGestalt). + */ +function nodesClaimsGet(_) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.Claims(); + // Response.setClaimsList( + // await sigchain.getClaims(call.request.getClaimtype() as ClaimType) + // ); + callback(null, response); + }; +} + +export default nodesClaimsGet; diff --git a/src/agent/service/nodesClosestLocalNodesGet.ts b/src/agent/service/nodesClosestLocalNodesGet.ts new file mode 100644 index 000000000..06bbde0d1 --- /dev/null +++ b/src/agent/service/nodesClosestLocalNodesGet.ts @@ -0,0 +1,41 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NodeManager } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Retrieves the local nodes (i.e. from the current node) that are closest + * to some provided node ID. + */ +function nodesClosestLocalNodesGet({ + nodeManager, +}: { + nodeManager: NodeManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.NodeTable(); + try { + const targetNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + // Get all local nodes that are closest to the target node from the request + const closestNodes = await nodeManager.getClosestLocalNodes( + targetNodeId, + ); + for (const node of closestNodes) { + const addressMessage = new nodesPB.Address(); + addressMessage.setHost(node.address.host); + addressMessage.setPort(node.address.port); + // Add the node to the response's map (mapping of node ID -> node address) + response.getNodeTableMap().set(node.id, addressMessage); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesClosestLocalNodesGet; diff --git a/src/agent/service/nodesCrossSignClaim.ts b/src/agent/service/nodesCrossSignClaim.ts new file mode 100644 index 000000000..81ade4628 --- /dev/null +++ b/src/agent/service/nodesCrossSignClaim.ts @@ -0,0 +1,156 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { + ClaimEncoded, + ClaimIntermediary, +} from '../../claims/types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as claimsUtils, errors as claimsErrors } from '../../claims'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +function nodesCrossSignClaim({ + keyManager, + nodeManager, + sigchain, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + sigchain: Sigchain; +}) { + return async ( + call: grpc.ServerDuplexStream, + ) => { + // TODO: Move all "await genClaims.throw" to a final catch(). Wrap this + // entire thing in a try block. And re-throw whatever error is caught + const genClaims = grpcUtils.generatorDuplex(call); + try { + await sigchain.transaction(async (sigchain) => { + const readStatus = await genClaims.read(); + // If nothing to read, end and destroy + if (readStatus.done) { + throw new claimsErrors.ErrorEmptyStream(); + } + const receivedMessage = readStatus.value; + const intermediaryClaimMessage = + receivedMessage.getSinglySignedClaim(); + if (!intermediaryClaimMessage) { + throw new claimsErrors.ErrorUndefinedSinglySignedClaim(); + } + const intermediarySignature = intermediaryClaimMessage.getSignature(); + if (!intermediarySignature) { + throw new claimsErrors.ErrorUndefinedSignature(); + } + + // 3. X --> responds with double signing the Y signed claim, and also --> Y + // bundles it with its own signed claim (intermediate) + // Reconstruct the claim to verify its signature + const constructedIntermediaryClaim: ClaimIntermediary = { + payload: intermediaryClaimMessage.getPayload(), + signature: { + protected: intermediarySignature.getProtected(), + signature: intermediarySignature.getSignature(), + }, + }; + // Get the sender's node ID from the claim + const constructedEncodedClaim: ClaimEncoded = { + payload: intermediaryClaimMessage.getPayload(), + signatures: [ + { + protected: intermediarySignature.getProtected(), + signature: intermediarySignature.getSignature(), + }, + ], + }; + const decodedClaim = claimsUtils.decodeClaim(constructedEncodedClaim); + const payloadData = decodedClaim.payload.data; + if (payloadData.type !== 'node') { + throw new claimsErrors.ErrorNodesClaimType(); + } + // Verify the claim + const senderPublicKey = await nodeManager.getPublicKey( + payloadData.node1, + ); + const verified = await claimsUtils.verifyClaimSignature( + constructedEncodedClaim, + senderPublicKey, + ); + if (!verified) { + throw new claimsErrors.ErrorSinglySignedClaimVerificationFailed(); + } + // If verified, add your own signature to the received claim + const doublySignedClaim = await claimsUtils.signIntermediaryClaim({ + claim: constructedIntermediaryClaim, + privateKey: keyManager.getRootKeyPairPem().privateKey, + signeeNodeId: nodeManager.getNodeId(), + }); + // Then create your own intermediary node claim (from X -> Y) + const singlySignedClaim = await sigchain.createIntermediaryClaim({ + type: 'node', + node1: nodeManager.getNodeId(), + node2: payloadData.node1, + }); + // Should never be reached, but just for type safety + if (!doublySignedClaim.payload || !singlySignedClaim.payload) { + throw new claimsErrors.ErrorClaimsUndefinedClaimPayload(); + } + // Write both these claims to a message to send + const crossSignMessage = claimsUtils.createCrossSignMessage({ + singlySignedClaim, + doublySignedClaim, + }); + await genClaims.write(crossSignMessage); + // 4. We expect to receive our singly signed claim we sent to now be a + // doubly signed claim (signed by the other node). + const responseStatus = await genClaims.read(); + if (responseStatus.done) { + throw new claimsErrors.ErrorEmptyStream(); + } + const receivedResponse = responseStatus.value; + const receivedDoublySignedClaimMessage = + receivedResponse.getDoublySignedClaim(); + if (!receivedDoublySignedClaimMessage) { + throw new claimsErrors.ErrorUndefinedDoublySignedClaim(); + } + // Reconstruct the expected object from message + const constructedDoublySignedClaim: ClaimEncoded = { + payload: receivedDoublySignedClaimMessage.getPayload(), + signatures: receivedDoublySignedClaimMessage + .getSignaturesList() + .map((sMsg) => { + return { + protected: sMsg.getProtected(), + signature: sMsg.getSignature(), + }; + }), + }; + // Verify the doubly signed claim with both our public key, and the sender's + const verifiedDoubly = + (await claimsUtils.verifyClaimSignature( + constructedDoublySignedClaim, + keyManager.getRootKeyPairPem().publicKey, + )) && + (await claimsUtils.verifyClaimSignature( + constructedDoublySignedClaim, + senderPublicKey, + )); + if (!verifiedDoubly) { + await genClaims.throw( + new claimsErrors.ErrorDoublySignedClaimVerificationFailed(), + ); + } + // If verified, then we can safely add to our sigchain + await sigchain.addExistingClaim(constructedDoublySignedClaim); + // Close the stream + await genClaims.next(null); + }); + } catch (e) { + await genClaims.throw(e); + // TODO: Handle the exception on this server - throw e? + // throw e; + } + }; +} + +export default nodesCrossSignClaim; diff --git a/src/agent/service/nodesHolePunchMessageSend.ts b/src/agent/service/nodesHolePunchMessageSend.ts new file mode 100644 index 000000000..3aea995ad --- /dev/null +++ b/src/agent/service/nodesHolePunchMessageSend.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NodeManager } from '../../nodes'; +import { utils as networkUtils } from '../../network'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function nodesHolePunchMessageSend({ + nodeManager, +}: { + nodeManager: NodeManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Firstly, check if this node is the desired node + // If so, then we want to make this node start sending hole punching packets + // back to the source node. + if ( + nodeManager.getNodeId() === nodesUtils.makeNodeId(call.request.getTargetId()) + ) { + const [host, port] = networkUtils.parseAddress( + call.request.getEgressAddress(), + ); + await nodeManager.openConnection(host, port); + // Otherwise, find if node in table + // If so, ask the nodeManager to relay to the node + } else if ( + await nodeManager.knowsNode(nodesUtils.makeNodeId(call.request.getSrcId())) + ) { + await nodeManager.relayHolePunchMessage(call.request); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesHolePunchMessageSend; diff --git a/src/agent/service/notificationsSend.ts b/src/agent/service/notificationsSend.ts new file mode 100644 index 000000000..3631d8af9 --- /dev/null +++ b/src/agent/service/notificationsSend.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NotificationsManager } from '../../notifications'; +import type * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { + utils as notificationsUtils, + errors as notificationsErrors, +} from '../../notifications'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function notificationsSend({ + notificationsManager, +}: { + notificationsManager: NotificationsManager; +}) { + return async ( + call: grpc.ServerUnaryCall< + notificationsPB.AgentNotification, + utilsPB.EmptyMessage + >, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const jwt = call.request.getContent(); + const notification = await notificationsUtils.verifyAndDecodeNotif(jwt); + await notificationsManager.receiveNotification(notification); + } catch (err) { + if (err instanceof notificationsErrors.ErrorNotifications) { + callback(grpcUtils.fromError(err), response); + } else { + throw err; + } + } + callback(null, response); + }; +} + +export default notificationsSend; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts new file mode 100644 index 000000000..cca0e063d --- /dev/null +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -0,0 +1,53 @@ +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils, errors as grpcErrors } from '../../grpc'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsGitInfoGet({ + vaultManager, +}: { + vaultManager: VaultManager; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + const request = call.request; + const vaultNameOrId = request.getNameOrId(); + let vaultId, vaultName; + try { + vaultId = vaultsUtils.makeVaultId(idUtils.fromString(vaultNameOrId)); + await vaultManager.openVault(vaultId); + vaultName = await vaultManager.getVaultName(vaultId); + } catch (err) { + if (err instanceof vaultsErrors.ErrorVaultUndefined) { + vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + await vaultManager.openVault(vaultId); + vaultName = vaultNameOrId; + } else { + throw err; + } + } + // TODO: Check the permissions here + const meta = new grpc.Metadata(); + meta.set('vaultName', vaultName); + meta.set('vaultId', vaultsUtils.makeVaultIdPretty(vaultId)); + genWritable.stream.sendMetadata(meta); + const response = new vaultsPB.PackChunk(); + const responseGen = vaultManager.handleInfoRequest(vaultId); + for await (const byte of responseGen) { + if (byte !== null) { + response.setChunk(byte); + await genWritable.next(response); + } else { + await genWritable.next(null); + } + } + await genWritable.next(null); + }; +} + +export default vaultsGitInfoGet; diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts new file mode 100644 index 000000000..6da05bccb --- /dev/null +++ b/src/agent/service/vaultsGitPackGet.ts @@ -0,0 +1,73 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import { promisify } from '../../utils'; +import { errors as grpcErrors } from '../../grpc'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsGitPackGet({ + vaultManager, +}: { + vaultManager: VaultManager; +}) { + return async ( + call: grpc.ServerDuplexStream, + ) => { + const write = promisify(call.write).bind(call); + const clientBodyBuffers: Buffer[] = []; + call.on('data', (d) => { + clientBodyBuffers.push(d.getChunk_asU8()); + }); + + call.on('end', async () => { + const body = Buffer.concat(clientBodyBuffers); + const meta = call.metadata; + const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); + if (vaultNameOrId == null) + throw new grpcErrors.ErrorGRPC('vault-name not in metadata.'); + let vaultId; + try { + vaultId = vaultsUtils.makeVaultId(vaultNameOrId); + await vaultManager.openVault(vaultId); + } catch (err) { + if ( + err instanceof vaultsErrors.ErrorVaultUndefined || + err instanceof SyntaxError + ) { + vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + await vaultManager.openVault(vaultId); + } else { + throw err; + } + } + // TODO: Check the permissions here + const response = new vaultsPB.PackChunk(); + const [sideBand, progressStream] = await vaultManager.handlePackRequest( + vaultId, + Buffer.from(body), + ); + response.setChunk(Buffer.from('0008NAK\n')); + await write(response); + const responseBuffers: Buffer[] = []; + await new Promise((resolve, reject) => { + sideBand.on('data', async (data: Buffer) => { + responseBuffers.push(data); + }); + sideBand.on('end', async () => { + response.setChunk(Buffer.concat(responseBuffers)); + await write(response); + resolve(); + }); + sideBand.on('error', (err) => { + reject(err); + }); + progressStream.write(Buffer.from('0014progress is at 50%\n')); + progressStream.end(); + }); + call.end(); + }); + }; +} + +export default vaultsGitPackGet; diff --git a/src/agent/service/vaultsPermissionsCheck.ts b/src/agent/service/vaultsPermissionsCheck.ts new file mode 100644 index 000000000..4b47d8648 --- /dev/null +++ b/src/agent/service/vaultsPermissionsCheck.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsPermissionsCheck(_) { + return async ( + call: grpc.ServerUnaryCall< + vaultsPB.NodePermission, + vaultsPB.NodePermissionAllowed + >, + callback: grpc.sendUnaryData, + ): Promise => { + // Const response = new vaultsPB.NodePermissionAllowed(); + try { + // Const nodeId = makeNodeId(call.request.getNodeId()); + // const vaultId = makeVaultId(call.request.getVaultId()); + throw Error('Not Implemented'); + // FIXME: getVaultPermissions not implemented. + // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); + // let result; + // if (result[nodeId] === undefined) { + // response.setPermission(false); + // } else if (result[nodeId]['pull'] === undefined) { + // response.setPermission(false); + // } else { + // response.setPermission(true); + // } + // callback(null, response); + } catch (err) { + callback(grpcUtils.fromError(err), null); + } + }; +} + +export default vaultsPermissionsCheck; diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts new file mode 100644 index 000000000..84d84124b --- /dev/null +++ b/src/agent/service/vaultsScan.ts @@ -0,0 +1,33 @@ +import type * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +function vaultsScan(_) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + // Const response = new vaultsPB.Vault(); + // const id = makeNodeId(call.request.getNodeId()); + try { + throw Error('Not implemented'); + // FIXME: handleVaultNamesRequest doesn't exist. + // const listResponse = vaultManager.handleVaultNamesRequest(id); + // let listResponse; + // for await (const vault of listResponse) { + // if (vault !== null) { + // response.setNameOrId(vault); + // await genWritable.next(response); + // } else { + // await genWritable.next(null); + // } + // } + // await genWritable.next(null); + } catch (err) { + await genWritable.throw(err); + } + }; +} + +export default vaultsScan; diff --git a/src/bin/agent/CommandLockAll.ts b/src/bin/agent/CommandLockAll.ts index faa154072..865438286 100644 --- a/src/bin/agent/CommandLockAll.ts +++ b/src/bin/agent/CommandLockAll.ts @@ -53,7 +53,7 @@ class CommandLockAll extends CommandPolykey { }); const emptyMessage = new utilsPB.EmptyMessage(); await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.sessionsLockAll(emptyMessage, auth), + (auth) => pkClient.grpcClient.agentLockAll(emptyMessage, auth), meta, ); // Destroy local session diff --git a/src/bin/agent/CommandUnlock.ts b/src/bin/agent/CommandUnlock.ts index 3215253a5..b8804d9e6 100644 --- a/src/bin/agent/CommandUnlock.ts +++ b/src/bin/agent/CommandUnlock.ts @@ -41,7 +41,7 @@ class CommandUnlock extends CommandPolykey { }); const emptyMessage = new utilsPB.EmptyMessage(); await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.sessionsUnlock(emptyMessage, auth), + (auth) => pkClient.grpcClient.agentUnlock(emptyMessage, auth), meta, ); } finally { diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index 28f8577cb..7f7d36008 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -49,7 +49,6 @@ class GRPCClientClient extends GRPCClient { timeout?: number; logger?: Logger; }): Promise { - logger.info(`Creating ${this.name}`); const interceptors: Array = []; if (session != null) { interceptors.push(clientUtils.sessionInterceptor(session)); @@ -77,14 +76,11 @@ class GRPCClientClient extends GRPCClient { flowCountInterceptor, logger, }); - logger.info(`Created ${this.name}`); return grpcClientClient; } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready(new clientErrors.ErrorClientClientDestroyed()) @@ -104,18 +100,18 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public sessionsUnlock(...args) { + public agentUnlock(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.sessionsUnlock, + this.client.agentUnlock, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public sessionsLockAll(...args) { + public agentLockAll(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.sessionsLockAll, + this.client.agentLockAll, )(...args); } diff --git a/src/client/clientService.ts b/src/client/clientService.ts deleted file mode 100644 index a46f1fa59..000000000 --- a/src/client/clientService.ts +++ /dev/null @@ -1,143 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type PolykeyAgent from '../PolykeyAgent'; -import type { KeyManager } from '../keys'; -import type { VaultManager } from '../vaults'; -import type { NodeManager } from '../nodes'; -import type { IdentitiesManager } from '../identities'; -import type { GestaltGraph } from '../gestalts'; -import type { SessionManager } from '../sessions'; -import type { NotificationsManager } from '../notifications'; -import type { Discovery } from '../discovery'; -import type { Sigchain } from '../sigchain'; -import type { GRPCServer } from '../grpc'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type { FileSystem } from '../types'; -import type { IClientServiceServer } from '../proto/js/polykey/v1/client_service_grpc_pb'; -import createStatusRPC from './rpcStatus'; -import createSessionsRPC from './rpcSessions'; -import createVaultRPC from './rpcVaults'; -import createKeysRPC from './rpcKeys'; -import createNodesRPC from './rpcNodes'; -import createGestaltRPC from './rpcGestalts'; -import createIdentitiesRPC from './rpcIdentities'; -import createNotificationsRPC from './rpcNotifications'; -import * as clientUtils from './utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import { ClientServiceService } from '../proto/js/polykey/v1/client_service_grpc_pb'; - -/** - * Creates the client service for use with a GRPCServer - * @param domains An object representing all the domains / managers the client server uses. - * @returns an IClientServer object - */ -function createClientService({ - polykeyAgent, - keyManager, - vaultManager, - nodeManager, - identitiesManager, - gestaltGraph, - sessionManager, - notificationsManager, - discovery, - sigchain, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, - fs, -}: { - polykeyAgent: PolykeyAgent; - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - identitiesManager: IdentitiesManager; - gestaltGraph: GestaltGraph; - sessionManager: SessionManager; - notificationsManager: NotificationsManager; - discovery: Discovery; - sigchain: Sigchain; - grpcServerClient: GRPCServer; - grpcServerAgent: GRPCServer; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; - fs: FileSystem; -}) { - const authenticate = clientUtils.authenticator(sessionManager, keyManager); - const clientService: IClientServiceServer = { - ...createStatusRPC({ - authenticate, - keyManager, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, - }), - ...createSessionsRPC({ - authenticate, - sessionManager, - }), - ...createVaultRPC({ - vaultManager, - authenticate, - fs, - }), - ...createKeysRPC({ - keyManager, - nodeManager, - authenticate, - fwdProxy, - revProxy, - grpcServerClient, - }), - ...createIdentitiesRPC({ - identitiesManager, - sigchain, - nodeManager, - authenticate, - }), - ...createGestaltRPC({ - gestaltGraph, - authenticate, - discovery, - }), - ...createNodesRPC({ - nodeManager, - notificationsManager, - authenticate, - }), - ...createNotificationsRPC({ - notificationsManager, - authenticate, - }), - agentStop: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - if (!polykeyAgent.running) { - callback(null, response); - return; - } - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Respond first to close the GRPC connection - callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - // Stop is called after GRPC resources are cleared - await polykeyAgent.stop(); - return; - }, - }; - - return clientService; -} - -export default createClientService; - -export { ClientServiceService }; diff --git a/src/client/index.ts b/src/client/index.ts index 44ff1f832..b2b36c024 100644 --- a/src/client/index.ts +++ b/src/client/index.ts @@ -1,12 +1,5 @@ -export { - default as createClientService, - ClientServiceService, -} from './clientService'; +export { default as createClientService, ClientServiceService } from './service'; export { default as GRPCClientClient } from './GRPCClientClient'; export * as errors from './errors'; export * as utils from './utils'; - -/** - * This allows us to create a MetaData() object without explicitly importing `@grpc/grpc-js`. - */ -export { Metadata } from '@grpc/grpc-js'; +export * as types from './types'; diff --git a/src/client/rpcGestalts.ts b/src/client/rpcGestalts.ts deleted file mode 100644 index ab3a21933..000000000 --- a/src/client/rpcGestalts.ts +++ /dev/null @@ -1,295 +0,0 @@ -import type { Discovery } from '../discovery'; -import type { GestaltGraph } from '../gestalts'; -import type { Gestalt } from '../gestalts/types'; -import type { IdentityId, ProviderId } from '../identities/types'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as clientUtils from './utils'; -import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import type * as identitiesPB from '../proto/js/polykey/v1/identities/identities_pb'; - -import { makeGestaltAction } from '../gestalts/utils'; - -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as gestaltsPB from '../proto/js/polykey/v1/gestalts/gestalts_pb'; -import * as permissionsPB from '../proto/js/polykey/v1/permissions/permissions_pb'; -import { makeNodeId } from '../nodes/utils'; - -const createGestaltsRPC = ({ - gestaltGraph, - authenticate, - discovery, -}: { - gestaltGraph: GestaltGraph; - authenticate: clientUtils.Authenticate; - discovery: Discovery; -}) => { - return { - gestaltsGestaltGetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new gestaltsPB.Graph(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const gestalt = await gestaltGraph.getGestaltByNode( - makeNodeId(call.request.getNodeId()), - ); - if (gestalt != null) { - response.setGestaltGraph(JSON.stringify(gestalt)); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsGestaltGetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new gestaltsPB.Graph(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const gestalt = await gestaltGraph.getGestaltByIdentity( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - if (gestalt != null) { - response.setGestaltGraph(JSON.stringify(gestalt)); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsGestaltList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - let gestaltMessage: gestaltsPB.Gestalt; - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const certs: Array = await gestaltGraph.getGestalts(); - for (const cert of certs) { - gestaltMessage = new gestaltsPB.Gestalt(); - gestaltMessage.setName(JSON.stringify(cert)); - await genWritable.next(gestaltMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - gestaltsDiscoveryByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Constructing identity info. - const gen = discovery.discoverGestaltByNode( - makeNodeId(info.getNodeId()), - ); - for await (const _ of gen) { - // Empty - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsDiscoveryByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Constructing identity info. - const gen = discovery.discoverGestaltByIdentity( - info.getProviderId() as ProviderId, - info.getIdentityId() as IdentityId, - ); - for await (const _ of gen) { - // Empty - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsGetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new permissionsPB.Actions(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const result = await gestaltGraph.getGestaltActionsByNode( - makeNodeId(info.getNodeId()), - ); - if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. - response.setActionList([]); - } else { - // Contains permission - const actions = Object.keys(result); - response.setActionList(actions); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsGetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new permissionsPB.Actions(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providerId = info.getProviderId() as ProviderId; - const identityId = info.getIdentityId() as IdentityId; - const result = await gestaltGraph.getGestaltActionsByIdentity( - providerId, - identityId, - ); - if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. - response.setActionList([]); - } else { - // Contains permission - const actions = Object.keys(result); - response.setActionList(actions); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsSetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const nodeId = makeNodeId(info.getNode()?.getNodeId()); - await gestaltGraph.setGestaltActionByNode(nodeId, action); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsSetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const providerId = info.getIdentity()?.getProviderId() as ProviderId; - const identityId = info.getIdentity()?.getIdentityId() as IdentityId; - await gestaltGraph.setGestaltActionByIdentity( - providerId, - identityId, - action, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsUnsetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const nodeId = makeNodeId(info.getNode()?.getNodeId()); - await gestaltGraph.unsetGestaltActionByNode(nodeId, action); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsUnsetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const providerId = info.getIdentity()?.getProviderId() as ProviderId; - const identityId = info.getIdentity()?.getIdentityId() as IdentityId; - await gestaltGraph.unsetGestaltActionByIdentity( - providerId, - identityId, - action, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createGestaltsRPC; diff --git a/src/client/rpcIdentities.ts b/src/client/rpcIdentities.ts deleted file mode 100644 index aa13785fb..000000000 --- a/src/client/rpcIdentities.ts +++ /dev/null @@ -1,269 +0,0 @@ -import type * as utils from './utils'; -import type { NodeManager } from '../nodes'; -import type { Sigchain } from '../sigchain'; -import type { IdentitiesManager } from '../identities'; -import type { IdentityId, ProviderId, TokenData } from '../identities/types'; -import type * as grpc from '@grpc/grpc-js'; -import * as clientErrors from './errors'; -import * as claimsUtils from '../claims/utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as identitiesPB from '../proto/js/polykey/v1/identities/identities_pb'; -import * as identitiesErrors from '../identities/errors'; -import { never } from '../utils'; - -const createIdentitiesRPC = ({ - identitiesManager, - sigchain, - nodeManager, - authenticate, -}: { - identitiesManager: IdentitiesManager; - sigchain: Sigchain; - nodeManager: NodeManager; - authenticate: utils.Authenticate; -}) => { - return { - identitiesAuthenticate: async ( - call: grpc.ServerWritableStream< - identitiesPB.Provider, - identitiesPB.AuthenticationProcess - >, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - const provider = identitiesManager.getProvider( - call.request.getProviderId() as ProviderId, - ); - if (provider == null) { - throw new clientErrors.ErrorClientInvalidProvider(); - } - const authFlow = provider.authenticate(); - let authFlowResult = await authFlow.next(); - if (authFlowResult.done) { - never(); - } - const authProcess = new identitiesPB.AuthenticationProcess(); - const authRequest = new identitiesPB.AuthenticationRequest(); - authRequest.setUrl(authFlowResult.value.url); - const map = authRequest.getDataMap(); - for (const [k, v] of Object.entries(authFlowResult.value.data)) { - map.set(k, v); - } - authProcess.setRequest(authRequest); - await genWritable.next(authProcess); - authFlowResult = await authFlow.next(); - if (!authFlowResult.done) { - never(); - } - const authResponse = new identitiesPB.AuthenticationResponse(); - authResponse.setIdentityId(authFlowResult.value); - authProcess.setResponse(authResponse); - await genWritable.next(authProcess); - await genWritable.next(null); - return; - } catch (e) { - await genWritable.throw(e); - return; - } - }, - identitiesTokenPut: async ( - call: grpc.ServerUnaryCall< - identitiesPB.TokenSpecific, - utilsPB.EmptyMessage - >, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const provider = call.request.getProvider(); - await identitiesManager.putToken( - provider?.getProviderId() as ProviderId, - provider?.getIdentityId() as IdentityId, - { accessToken: call.request.getToken() } as TokenData, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesTokenGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Token(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const tokens = await identitiesManager.getToken( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - response.setToken(JSON.stringify(tokens)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesTokenDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await identitiesManager.delToken( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesProvidersList: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Provider(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providers = identitiesManager.getProviders(); - response.setProviderId(JSON.stringify(Object.keys(providers))); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesInfoGetConnected: async ( - call: grpc.ServerWritableStream< - identitiesPB.ProviderSearch, - identitiesPB.Info - >, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providerId = call.request - .getProvider() - ?.getProviderId() as ProviderId; - const identityId = call.request - .getProvider() - ?.getIdentityId() as IdentityId; - const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); - - const identities = provider.getConnectedIdentityDatas( - identityId, - call.request.getSearchTermList(), - ); - - for await (const identity of identities) { - const identityInfoMessage = new identitiesPB.Info(); - const providerMessage = new identitiesPB.Provider(); - providerMessage.setProviderId(identity.providerId); - providerMessage.setIdentityId(identity.identityId); - identityInfoMessage.setProvider(providerMessage); - identityInfoMessage.setName(identity.name ?? ''); - identityInfoMessage.setEmail(identity.email ?? ''); - identityInfoMessage.setUrl(identity.url ?? ''); - await genWritable.next(identityInfoMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - /** - * Gets the first identityId of the local keynode. - */ - identitiesInfoGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Provider(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Get's an identity out of all identities. - const providerId = call.request.getProviderId() as ProviderId; - const provider = identitiesManager.getProvider(providerId); - if (provider !== undefined) { - const identities = await provider.getAuthIdentityIds(); - response.setProviderId(providerId); - if (identities.length !== 0) { - response.setIdentityId(identities[0]); - } - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Augments the keynode with a new identity. - */ - identitiesClaim: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Check provider is authenticated - const providerId = call.request.getProviderId() as ProviderId; - const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); - const identityId = call.request.getIdentityId() as IdentityId; - const identities = await provider.getAuthIdentityIds(); - if (!identities.includes(identityId)) { - throw new identitiesErrors.ErrorProviderUnauthenticated(); - } - // Create identity claim on our node - const claim = await sigchain.addClaim({ - type: 'identity', - node: nodeManager.getNodeId(), - provider: providerId, - identity: identityId, - }); - // Publish claim on identity - const claimDecoded = claimsUtils.decodeClaim(claim); - await provider.publishClaim(identityId, claimDecoded); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createIdentitiesRPC; diff --git a/src/client/rpcKeys.ts b/src/client/rpcKeys.ts deleted file mode 100644 index 3363dd326..000000000 --- a/src/client/rpcKeys.ts +++ /dev/null @@ -1,249 +0,0 @@ -import type { KeyManager } from '../keys'; -import type { NodeManager } from '../nodes'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type { TLSConfig } from '../network/types'; -import type { GRPCServer } from '../grpc'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import type * as sessionsPB from '../proto/js/polykey/v1/sessions/sessions_pb'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as keysPB from '../proto/js/polykey/v1/keys/keys_pb'; - -const createKeysRPC = ({ - keyManager, - nodeManager, - authenticate, - fwdProxy, - revProxy, - grpcServerClient, -}: { - keyManager: KeyManager; - nodeManager: NodeManager; - authenticate: utils.Authenticate; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; - grpcServerClient: GRPCServer; -}) => { - return { - keysKeyPairRoot: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.KeyPair(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const keyPair = keyManager.getRootKeyPairPem(); - response.setPublic(keyPair.publicKey); - response.setPrivate(keyPair.privateKey); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysKeyPairReset: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Lock the nodeManager - because we need to do a database refresh too - await nodeManager.transaction(async (nodeManager) => { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await keyManager.resetRootKeyPair(call.request.getName()); - // Reset the TLS config with new keypair + certificate - const tlsConfig: TLSConfig = { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }; - fwdProxy.setTLSConfig(tlsConfig); - revProxy.setTLSConfig(tlsConfig); - grpcServerClient.setTLSConfig(tlsConfig); - // Finally, refresh the node buckets - await nodeManager.refreshBuckets(); - }); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysKeyPairRenew: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Lock the nodeManager - because we need to do a database refresh too - await nodeManager.transaction(async (nodeManager) => { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - await keyManager.renewRootKeyPair(call.request.getName()); - // Reset the TLS config with new keypair + certificate - const tlsConfig: TLSConfig = { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }; - fwdProxy.setTLSConfig(tlsConfig); - revProxy.setTLSConfig(tlsConfig); - grpcServerClient.setTLSConfig(tlsConfig); - // Finally, refresh the node buckets - await nodeManager.refreshBuckets(); - }); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysEncrypt: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const data = await keyManager.encryptWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setData(data.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysDecrypt: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const data = await keyManager.decryptWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setData(data.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysSign: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const signature = await keyManager.signWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setSignature(signature.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysVerify: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const status = await keyManager.verifyWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - Buffer.from(call.request.getSignature(), 'binary'), - ); - response.setSuccess(status); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysPasswordChange: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await keyManager.changePassword(call.request.getPassword()); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysCertsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Certificate(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const cert = keyManager.getRootCertPem(); - response.setCert(cert); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysCertsChainGet: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const certs: Array = await keyManager.getRootCertChainPems(); - let certMessage: keysPB.Certificate; - for (const cert of certs) { - certMessage = new keysPB.Certificate(); - certMessage.setCert(cert); - await genWritable.next(certMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - }; -}; - -export default createKeysRPC; diff --git a/src/client/rpcNodes.ts b/src/client/rpcNodes.ts deleted file mode 100644 index 34458e0ab..000000000 --- a/src/client/rpcNodes.ts +++ /dev/null @@ -1,155 +0,0 @@ -import type { NodeManager } from '../nodes'; -import type { NodeAddress } from '../nodes/types'; -import type { NotificationData } from '../notifications/types'; -import type { NotificationsManager } from '../notifications'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from '../client/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import { utils as nodesUtils, errors as nodesErrors } from '../nodes'; -import * as grpcUtils from '../grpc/utils'; -import * as networkUtils from '../network/utils'; - -const createNodesRPC = ({ - nodeManager, - authenticate, - notificationsManager, -}: { - nodeManager: NodeManager; - authenticate: utils.Authenticate; - notificationsManager: NotificationsManager; -}) => { - return { - /** - * Adds a node ID -> node address mapping into the buckets database. - * This is an unrestricted add: no validity checks are made for the correctness - * of the passed ID or host/port. - */ - nodesAdd: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Validate the passed node ID and host - const validNodeId = nodesUtils.isNodeId(call.request.getNodeId()); - if (!validNodeId) { - throw new nodesErrors.ErrorInvalidNodeId(); - } - const validHost = networkUtils.isValidHost( - call.request.getAddress()!.getHost(), - ); - if (!validHost) { - throw new nodesErrors.ErrorInvalidHost(); - } - await nodeManager.setNode( - nodesUtils.makeNodeId(call.request.getNodeId()), - { - host: call.request.getAddress()!.getHost(), - port: call.request.getAddress()!.getPort(), - } as NodeAddress, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Checks if a remote node is online. - */ - nodesPing: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const status = await nodeManager.pingNode( - nodesUtils.makeNodeId(call.request.getNodeId()), - ); - response.setSuccess(status); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Checks whether there is an existing Gestalt Invitation from the other node. - * If not, send an invitation, if so, create a cryptolink claim between the - * other node and host node. - */ - nodesClaim: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const remoteNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); - const gestaltInvite = await notificationsManager.findGestaltInvite( - remoteNodeId, - ); - - // Check first whether there is an existing gestalt invite from the remote node - // or if we want to force an invitation rather than a claim - if (gestaltInvite === undefined || call.request.getForceInvite()) { - const data = { - type: 'GestaltInvite', - } as NotificationData; - await notificationsManager.sendNotification(remoteNodeId, data); - response.setSuccess(false); - } else { - // There is an existing invitation, and we want to claim the node - await nodeManager.claimNode(remoteNodeId); - response.setSuccess(true); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Attempts to get the node address of a provided node ID (by contacting - * keynodes in the wider Polykey network). - * @throws ErrorNodeGraphNodeNotFound if node address cannot be found - */ - nodesFind: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.NodeAddress(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeId = nodesUtils.makeNodeId(call.request.getNodeId()); - const address = await nodeManager.findNode(nodeId); - response - .setNodeId(nodeId) - .setAddress( - new nodesPB.Address().setHost(address.host).setPort(address.port), - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createNodesRPC; diff --git a/src/client/rpcNotifications.ts b/src/client/rpcNotifications.ts deleted file mode 100644 index 62507d444..000000000 --- a/src/client/rpcNotifications.ts +++ /dev/null @@ -1,123 +0,0 @@ -import type { NotificationsManager } from '../notifications'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as notificationsPB from '../proto/js/polykey/v1/notifications/notifications_pb'; -import * as grpcUtils from '../grpc/utils'; -import * as notificationsUtils from '../notifications/utils'; -import { makeNodeId } from '../nodes/utils'; - -const createNotificationsRPC = ({ - notificationsManager, - authenticate, -}: { - notificationsManager: NotificationsManager; - authenticate: utils.Authenticate; -}) => { - return { - notificationsSend: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const receivingId = makeNodeId(call.request.getReceiverId()); - const data = { - type: 'General', - message: call.request.getData()?.getMessage(), - }; - const validatedData = - notificationsUtils.validateGeneralNotification(data); - await notificationsManager.sendNotification(receivingId, validatedData); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - notificationsRead: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new notificationsPB.List(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const unread = call.request.getUnread(); - const order = call.request.getOrder() as 'newest' | 'oldest'; - const numberField = call.request.getNumber(); - let number: number | 'all'; - if (numberField === 'all') { - number = numberField; - } else { - number = parseInt(numberField); - } - - const notifications = await notificationsManager.readNotifications({ - unread, - number, - order, - }); - - const notifMessages: Array = []; - for (const notif of notifications) { - const notificationsMessage = new notificationsPB.Notification(); - switch (notif.data.type) { - case 'General': { - const generalMessage = new notificationsPB.General(); - generalMessage.setMessage(notif.data.message); - notificationsMessage.setGeneral(generalMessage); - break; - } - case 'GestaltInvite': { - notificationsMessage.setGestaltInvite('GestaltInvite'); - break; - } - case 'VaultShare': { - const vaultShareMessage = new notificationsPB.Share(); - vaultShareMessage.setVaultId(notif.data.vaultId); - vaultShareMessage.setVaultName(notif.data.vaultName); - vaultShareMessage.setActionsList(Object.keys(notif.data.actions)); - notificationsMessage.setVaultShare(vaultShareMessage); - break; - } - } - notificationsMessage.setSenderId(notif.senderId); - notificationsMessage.setIsRead(notif.isRead); - notifMessages.push(notificationsMessage); - } - response.setNotificationList(notifMessages); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - notificationsClear: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await notificationsManager.clearNotifications(); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createNotificationsRPC; diff --git a/src/client/rpcSessions.ts b/src/client/rpcSessions.ts deleted file mode 100644 index e535c5f67..000000000 --- a/src/client/rpcSessions.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { SessionManager } from '../sessions'; -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; - -const createSessionsRPC = ({ - authenticate, - sessionManager, -}: { - authenticate: utils.Authenticate; - sessionManager: SessionManager; -}) => { - return { - sessionsUnlock: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - sessionsLockAll: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - await sessionManager.resetKey(); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createSessionsRPC; diff --git a/src/client/rpcStatus.ts b/src/client/rpcStatus.ts deleted file mode 100644 index 0f7b6e449..000000000 --- a/src/client/rpcStatus.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type { Authenticate } from './utils'; -import type { KeyManager } from '../keys'; -import type { GRPCServer } from '../grpc'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import process from 'process'; -import * as grpcUtils from '../grpc/utils'; -import * as agentPB from '../proto/js/polykey/v1/agent/agent_pb'; - -const createStatusRPC = ({ - authenticate, - keyManager, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, -}: { - authenticate: Authenticate; - keyManager: KeyManager; - grpcServerClient: GRPCServer; - grpcServerAgent: GRPCServer; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; -}) => { - return { - agentStatus: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new agentPB.InfoMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - response.setPid(process.pid); - response.setNodeId(keyManager.getNodeId()); - response.setClientHost(grpcServerClient.host); - response.setClientPort(grpcServerClient.port); - response.setIngressHost(revProxy.getIngressHost()); - response.setIngressPort(revProxy.getIngressPort()); - response.setEgressHost(fwdProxy.getEgressHost()); - response.setEgressPort(fwdProxy.getEgressPort()); - response.setAgentHost(grpcServerAgent.host); - response.setAgentPort(grpcServerAgent.port); - response.setProxyHost(fwdProxy.getProxyHost()); - response.setProxyPort(fwdProxy.getProxyPort()); - response.setRootPublicKeyPem(keyManager.getRootKeyPairPem().publicKey); - response.setRootCertPem(keyManager.getRootCertPem()); - response.setRootCertChainPem(await keyManager.getRootCertChainPem()); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createStatusRPC; diff --git a/src/client/rpcVaults.ts b/src/client/rpcVaults.ts deleted file mode 100644 index 16be5875b..000000000 --- a/src/client/rpcVaults.ts +++ /dev/null @@ -1,690 +0,0 @@ -import type { Vault, VaultId, VaultName } from '../vaults/types'; -import type { VaultManager } from '../vaults'; -import type { FileSystem } from '../types'; - -import type * as utils from './utils'; -import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import { utils as idUtils } from '@matrixai/id'; -import * as grpc from '@grpc/grpc-js'; -import * as grpcUtils from '../grpc/utils'; -import { - vaultOps, - utils as vaultsUtils, - errors as vaultsErrors, -} from '../vaults'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as secretsPB from '../proto/js/polykey/v1/secrets/secrets_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - -const createVaultRPC = ({ - vaultManager, - authenticate, - fs, -}: { - vaultManager: VaultManager; - authenticate: utils.Authenticate; - fs: FileSystem; -}) => { - return { - vaultsList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - // Call.on('error', (e) => console.error(e)); - // call.on('close', () => console.log('Got close')); - // call.on('finish', () => console.log('Got finish')); - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaults = await vaultManager.listVaults(); - for await (const [vaultName, vaultId] of vaults) { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); - await genWritable.next(((_) => vaultListMessage)()); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsCreate: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Vault(); - let vault: Vault; - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - vault = await vaultManager.createVault( - call.request.getNameOrId() as VaultName, - ); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsRename: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Vault(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const newName = call.request.getNewName() as VaultName; - await vaultManager.renameVault(vaultId, newName); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const vaultMessage = call.request; - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - await vaultManager.destroyVault(vaultId); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsClone: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Vault id - // const vaultId = parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - throw Error('Not implemented'); - // FIXME, not fully implemented - // await vaultManager.cloneVault(vaultId, id); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPull: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Vault name - // const vaultId = await parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - // Await vaultManager.pullVault(vaultId, id); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsScan: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - // Const nodeId = makeNodeId(call.request.getNodeId()); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaults = await vaultManager.listVaults(); - vaults.forEach(async (vaultId, vaultName) => { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); - await genWritable.next(vaultListMessage); - }); - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsSecretsList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request; - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secrets = await vaultOps.listSecrets(vault); - let secretMessage: secretsPB.Secret; - for (const secret of secrets) { - secretMessage = new secretsPB.Secret(); - secretMessage.setSecretName(secret); - await genWritable.next(secretMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsSecretsMkdir: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMkdirMessge = call.request; - const vaultMessage = vaultMkdirMessge.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { - recursive: vaultMkdirMessge.getRecursive(), - }); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsStat: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Stat(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - // Const vaultMessage = call.request; - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // const vault = await vaultManager.openVault(id); - // FIXME, reimplement this. - throw Error('Not Implemented'); - // Const stats = await vaultManager.vaultStats(id); - // response.setStats(JSON.stringify(stats));); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = call.request.getSecretName(); - await vaultOps.deleteSecret(vault, secretName); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsEdit: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const secretMessage = call.request; - if (secretMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const vaultMessage = secretMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = secretMessage.getSecretName(); - const secretContent = Buffer.from(secretMessage.getSecretContent()); - await vaultOps.updateSecret(vault, secretName, secretContent); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new secretsPB.Secret(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = call.request.getSecretName(); - const secretContent = await vaultOps.getSecret(vault, secretName); - - response.setSecretContent(secretContent); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsRename: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const secretMessage = call.request.getOldSecret(); - if (!secretMessage) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const vaultMessage = secretMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const oldSecret = secretMessage.getSecretName(); - const newSecret = call.request.getNewName(); - await vaultOps.renameSecret(vault, oldSecret, newSecret); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsNew: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secret = call.request.getSecretName(); - const content = Buffer.from(call.request.getSecretContent()); - await vaultOps.addSecret(vault, secret, content); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsNewDir: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretsPath = call.request.getSecretDirectory(); - await vaultOps.addSecretDirectory(vault, secretsPath, fs); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissionsSet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not Implemented'); - // Await vaultManager.setVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissionsUnset: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not implemented'); - // Await vaultManager.unsetVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissions: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const node = nodeMessage.getNodeId(); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // let perms: Record; - throw Error('Not implemented'); - // FIXME - // if (isNodeId(node)) { - // Perms = await vaultManager.getVaultPermissions(id, node); - // } else { - // Perms = await vaultManager.getVaultPermissions(id); - // } - // const permissionMessage = new vaultsPB.Permission(); - // For (const nodeId in perms) { - // permissionMessage.setNodeId(nodeId); - // if (perms[nodeId]['pull'] !== undefined) { - // permissionMessage.setAction('pull'); - // } - // await genWritable.next(permissionMessage); - // } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsVersion: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.VersionResult(); - try { - // Checking session token - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultsVersionMessage = call.request; - - // Getting vault ID - const vaultMessage = vaultsVersionMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - - // Doing the deed - const vault = await vaultManager.openVault(vaultId); - const latestOid = (await vault.log())[0].oid; - const versionId = vaultsVersionMessage.getVersionId(); - - await vault.version(versionId); - const currentVersionId = (await vault.log(0, versionId))[0]?.oid; - - // Checking if latest version ID. - const isLatestVersion = latestOid === currentVersionId; - - // Creating message - response.setIsLatestVersion(isLatestVersion); - - // Sending message - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsLog: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Getting the vault. - const vaultsLogMessage = call.request; - const vaultMessage = vaultsLogMessage.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - - // Getting the log - const depth = vaultsLogMessage.getLogDepth(); - let commitId: string | undefined = vaultsLogMessage.getCommitId(); - commitId = commitId ? commitId : undefined; - const log = await vault.log(depth, commitId); - - const vaultsLogEntryMessage = new vaultsPB.LogEntry(); - for (const entry of log) { - vaultsLogEntryMessage.setOid(entry.oid); - vaultsLogEntryMessage.setCommitter(entry.committer); - vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); - vaultsLogEntryMessage.setMessage(entry.message); - await genWritable.next(vaultsLogEntryMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - }; -}; - -export default createVaultRPC; diff --git a/src/client/service/agentLockAll.ts b/src/client/service/agentLockAll.ts new file mode 100644 index 000000000..70ce95a87 --- /dev/null +++ b/src/client/service/agentLockAll.ts @@ -0,0 +1,32 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { SessionManager } from '../../sessions'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentLockAll ({ + sessionManager, + authenticate, +}: { + sessionManager: SessionManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await sessionManager.resetKey(); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentLockAll; diff --git a/src/client/service/agentStatus.ts b/src/client/service/agentStatus.ts new file mode 100644 index 000000000..2b5c91116 --- /dev/null +++ b/src/client/service/agentStatus.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import process from 'process'; +import * as grpcUtils from '../../grpc/utils'; +import * as agentPB from '../../proto/js/polykey/v1/agent/agent_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentStatus({ + authenticate, + keyManager, + grpcServerClient, + grpcServerAgent, + fwdProxy, + revProxy, +}: { + authenticate: Authenticate; + keyManager: KeyManager; + grpcServerClient: GRPCServer; + grpcServerAgent: GRPCServer; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new agentPB.InfoMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + response.setPid(process.pid); + response.setNodeId(keyManager.getNodeId()); + response.setClientHost(grpcServerClient.host); + response.setClientPort(grpcServerClient.port); + response.setIngressHost(revProxy.getIngressHost()); + response.setIngressPort(revProxy.getIngressPort()); + response.setEgressHost(fwdProxy.getEgressHost()); + response.setEgressPort(fwdProxy.getEgressPort()); + response.setAgentHost(grpcServerAgent.host); + response.setAgentPort(grpcServerAgent.port); + response.setProxyHost(fwdProxy.getProxyHost()); + response.setProxyPort(fwdProxy.getProxyPort()); + response.setRootPublicKeyPem(keyManager.getRootKeyPairPem().publicKey); + response.setRootCertPem(keyManager.getRootCertPem()); + response.setRootCertChainPem(await keyManager.getRootCertChainPem()); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentStatus; diff --git a/src/client/service/agentStop.ts b/src/client/service/agentStop.ts new file mode 100644 index 000000000..ac030aa98 --- /dev/null +++ b/src/client/service/agentStop.ts @@ -0,0 +1,38 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type PolykeyAgent from '../../PolykeyAgent'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentStop ({ + authenticate, + pkAgent, +}: { + authenticate: Authenticate; + pkAgent: PolykeyAgent; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + if (!pkAgent.running) { + callback(null, response); + return; + } + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Respond first to close the GRPC connection + callback(null, response); + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + // Stop is called after GRPC resources are cleared + await pkAgent.stop(); + return; + }; +} + +export default agentStop; diff --git a/src/client/service/agentUnlock.ts b/src/client/service/agentUnlock.ts new file mode 100644 index 000000000..ca04af6e6 --- /dev/null +++ b/src/client/service/agentUnlock.ts @@ -0,0 +1,28 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentUnlock ({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentUnlock; diff --git a/src/client/service/gestaltsActionsGetByIdentity.ts b/src/client/service/gestaltsActionsGetByIdentity.ts new file mode 100644 index 000000000..f70bb2391 --- /dev/null +++ b/src/client/service/gestaltsActionsGetByIdentity.ts @@ -0,0 +1,48 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsGetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new permissionsPB.Actions(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const providerId = info.getProviderId() as ProviderId; + const identityId = info.getIdentityId() as IdentityId; + const result = await gestaltGraph.getGestaltActionsByIdentity( + providerId, + identityId, + ); + if (result == null) { + // Node doesn't exist, so no permissions. might throw error instead TBD. + response.setActionList([]); + } else { + // Contains permission + const actions = Object.keys(result); + response.setActionList(actions); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsGetByIdentity; diff --git a/src/client/service/gestaltsActionsGetByNode.ts b/src/client/service/gestaltsActionsGetByNode.ts new file mode 100644 index 000000000..45a4e5190 --- /dev/null +++ b/src/client/service/gestaltsActionsGetByNode.ts @@ -0,0 +1,45 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsGetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new permissionsPB.Actions(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const result = await gestaltGraph.getGestaltActionsByNode( + nodesUtils.makeNodeId(info.getNodeId()), + ); + if (result == null) { + // Node doesn't exist, so no permissions. might throw error instead TBD. + response.setActionList([]); + } else { + // Contains permission + const actions = Object.keys(result); + response.setActionList(actions); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsGetByNode; diff --git a/src/client/service/gestaltsActionsSetByIdentity.ts b/src/client/service/gestaltsActionsSetByIdentity.ts new file mode 100644 index 000000000..d6cccfe94 --- /dev/null +++ b/src/client/service/gestaltsActionsSetByIdentity.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsSetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const providerId = info.getIdentity()?.getProviderId() as ProviderId; + const identityId = info.getIdentity()?.getIdentityId() as IdentityId; + await gestaltGraph.setGestaltActionByIdentity( + providerId, + identityId, + action, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsSetByIdentity; diff --git a/src/client/service/gestaltsActionsSetByNode.ts b/src/client/service/gestaltsActionsSetByNode.ts new file mode 100644 index 000000000..aec7b96bf --- /dev/null +++ b/src/client/service/gestaltsActionsSetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsSetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const nodeId = nodesUtils.makeNodeId(info.getNode()?.getNodeId()); + await gestaltGraph.setGestaltActionByNode(nodeId, action); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsSetByNode; diff --git a/src/client/service/gestaltsActionsUnsetByIdentity.ts b/src/client/service/gestaltsActionsUnsetByIdentity.ts new file mode 100644 index 000000000..bf6b35c0a --- /dev/null +++ b/src/client/service/gestaltsActionsUnsetByIdentity.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsUnsetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const providerId = info.getIdentity()?.getProviderId() as ProviderId; + const identityId = info.getIdentity()?.getIdentityId() as IdentityId; + await gestaltGraph.unsetGestaltActionByIdentity( + providerId, + identityId, + action, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsUnsetByIdentity; diff --git a/src/client/service/gestaltsActionsUnsetByNode.ts b/src/client/service/gestaltsActionsUnsetByNode.ts new file mode 100644 index 000000000..6c62590cb --- /dev/null +++ b/src/client/service/gestaltsActionsUnsetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsUnsetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const nodeId = nodesUtils.makeNodeId(info.getNode()?.getNodeId()); + await gestaltGraph.unsetGestaltActionByNode(nodeId, action); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsUnsetByNode; diff --git a/src/client/service/gestaltsDiscoveryByIdentity.ts b/src/client/service/gestaltsDiscoveryByIdentity.ts new file mode 100644 index 000000000..66de892ca --- /dev/null +++ b/src/client/service/gestaltsDiscoveryByIdentity.ts @@ -0,0 +1,42 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { Discovery } from '../../discovery'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function gestaltsDiscoveryByIdentity({ + authenticate, + discovery, +}: { + authenticate: Authenticate; + discovery: Discovery; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Constructing identity info. + const gen = discovery.discoverGestaltByIdentity( + info.getProviderId() as ProviderId, + info.getIdentityId() as IdentityId, + ); + for await (const _ of gen) { + // Empty + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsDiscoveryByIdentity; diff --git a/src/client/service/gestaltsDiscoveryByNode.ts b/src/client/service/gestaltsDiscoveryByNode.ts new file mode 100644 index 000000000..3a43b7492 --- /dev/null +++ b/src/client/service/gestaltsDiscoveryByNode.ts @@ -0,0 +1,41 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { Discovery } from '../../discovery'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +function gestaltsDiscoveryByNode({ + authenticate, + discovery, +}: { + authenticate: Authenticate; + discovery: Discovery; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Constructing identity info. + const gen = discovery.discoverGestaltByNode( + nodesUtils.makeNodeId(info.getNodeId()), + ); + for await (const _ of gen) { + // Empty + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsDiscoveryByNode; diff --git a/src/client/service/gestaltsGestaltGetByIdentity.ts b/src/client/service/gestaltsGestaltGetByIdentity.ts new file mode 100644 index 000000000..a82e26c9e --- /dev/null +++ b/src/client/service/gestaltsGestaltGetByIdentity.ts @@ -0,0 +1,40 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; + +function gestaltsGestaltGetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new gestaltsPB.Graph(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const gestalt = await gestaltGraph.getGestaltByIdentity( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + if (gestalt != null) { + response.setGestaltGraph(JSON.stringify(gestalt)); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsGestaltGetByIdentity; diff --git a/src/client/service/gestaltsGestaltGetByNode.ts b/src/client/service/gestaltsGestaltGetByNode.ts new file mode 100644 index 000000000..38b6bf4fd --- /dev/null +++ b/src/client/service/gestaltsGestaltGetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +function gestaltsGestaltGetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new gestaltsPB.Graph(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const gestalt = await gestaltGraph.getGestaltByNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + ); + if (gestalt != null) { + response.setGestaltGraph(JSON.stringify(gestalt)); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsGestaltGetByNode; diff --git a/src/client/service/gestaltsGestaltList.ts b/src/client/service/gestaltsGestaltList.ts new file mode 100644 index 000000000..e97609215 --- /dev/null +++ b/src/client/service/gestaltsGestaltList.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { Gestalt } from '../../gestalts/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsGestaltList({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + let gestaltMessage: gestaltsPB.Gestalt; + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const certs: Array = await gestaltGraph.getGestalts(); + for (const cert of certs) { + gestaltMessage = new gestaltsPB.Gestalt(); + gestaltMessage.setName(JSON.stringify(cert)); + await genWritable.next(gestaltMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default gestaltsGestaltList; diff --git a/src/client/service/identitiesAuthenticate.ts b/src/client/service/identitiesAuthenticate.ts new file mode 100644 index 000000000..6cb12f941 --- /dev/null +++ b/src/client/service/identitiesAuthenticate.ts @@ -0,0 +1,64 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { ProviderId } from '../../identities/types'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { never } from '../../utils'; + +function identitiesAuthenticate({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream< + identitiesPB.Provider, + identitiesPB.AuthenticationProcess + >, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const provider = identitiesManager.getProvider( + call.request.getProviderId() as ProviderId, + ); + if (provider == null) { + throw new clientErrors.ErrorClientInvalidProvider(); + } + const authFlow = provider.authenticate(); + let authFlowResult = await authFlow.next(); + if (authFlowResult.done) { + never(); + } + const authProcess = new identitiesPB.AuthenticationProcess(); + const authRequest = new identitiesPB.AuthenticationRequest(); + authRequest.setUrl(authFlowResult.value.url); + const map = authRequest.getDataMap(); + for (const [k, v] of Object.entries(authFlowResult.value.data)) { + map.set(k, v); + } + authProcess.setRequest(authRequest); + await genWritable.next(authProcess); + authFlowResult = await authFlow.next(); + if (!authFlowResult.done) { + never(); + } + const authResponse = new identitiesPB.AuthenticationResponse(); + authResponse.setIdentityId(authFlowResult.value); + authProcess.setResponse(authResponse); + await genWritable.next(authProcess); + await genWritable.next(null); + return; + } catch (e) { + await genWritable.throw(e); + return; + } + }; +} + +export default identitiesAuthenticate; diff --git a/src/client/service/identitiesClaim.ts b/src/client/service/identitiesClaim.ts new file mode 100644 index 000000000..1304b6774 --- /dev/null +++ b/src/client/service/identitiesClaim.ts @@ -0,0 +1,65 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as claimsUtils } from '../../claims'; +import { errors as identitiesErrors } from '../../identities'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Augments the keynode with a new identity. + */ +function identitiesClaim({ + identitiesManager, + sigchain, + nodeManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + sigchain: Sigchain; + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Check provider is authenticated + const providerId = call.request.getProviderId() as ProviderId; + const provider = identitiesManager.getProvider(providerId); + if (provider == null) + throw new clientErrors.ErrorClientInvalidProvider(); + const identityId = call.request.getIdentityId() as IdentityId; + const identities = await provider.getAuthIdentityIds(); + if (!identities.includes(identityId)) { + throw new identitiesErrors.ErrorProviderUnauthenticated(); + } + // Create identity claim on our node + const claim = await sigchain.addClaim({ + type: 'identity', + node: nodeManager.getNodeId(), + provider: providerId, + identity: identityId, + }); + // Publish claim on identity + const claimDecoded = claimsUtils.decodeClaim(claim); + await provider.publishClaim(identityId, claimDecoded); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesClaim; diff --git a/src/client/service/identitiesInfoGet.ts b/src/client/service/identitiesInfoGet.ts new file mode 100644 index 000000000..1c16c10c2 --- /dev/null +++ b/src/client/service/identitiesInfoGet.ts @@ -0,0 +1,49 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { IdentitiesManager } from '../../identities'; +import type { ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +/** + * Gets the first identityId of the local keynode. + */ +function identitiesInfoGet({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + sigchain: Sigchain; + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Provider(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Get's an identity out of all identities. + const providerId = call.request.getProviderId() as ProviderId; + const provider = identitiesManager.getProvider(providerId); + if (provider !== undefined) { + const identities = await provider.getAuthIdentityIds(); + response.setProviderId(providerId); + if (identities.length !== 0) { + response.setIdentityId(identities[0]); + } + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesInfoGet; diff --git a/src/client/service/identitiesInfoGetConnected.ts b/src/client/service/identitiesInfoGetConnected.ts new file mode 100644 index 000000000..faae81fe4 --- /dev/null +++ b/src/client/service/identitiesInfoGetConnected.ts @@ -0,0 +1,61 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function identitiesInfoGetConnected({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream< + identitiesPB.ProviderSearch, + identitiesPB.Info + >, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const providerId = call.request + .getProvider() + ?.getProviderId() as ProviderId; + const identityId = call.request + .getProvider() + ?.getIdentityId() as IdentityId; + const provider = identitiesManager.getProvider(providerId); + if (provider == null) + throw new clientErrors.ErrorClientInvalidProvider(); + + const identities = provider.getConnectedIdentityDatas( + identityId, + call.request.getSearchTermList(), + ); + + for await (const identity of identities) { + const identityInfoMessage = new identitiesPB.Info(); + const providerMessage = new identitiesPB.Provider(); + providerMessage.setProviderId(identity.providerId); + providerMessage.setIdentityId(identity.identityId); + identityInfoMessage.setProvider(providerMessage); + identityInfoMessage.setName(identity.name ?? ''); + identityInfoMessage.setEmail(identity.email ?? ''); + identityInfoMessage.setUrl(identity.url ?? ''); + await genWritable.next(identityInfoMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default identitiesInfoGetConnected; diff --git a/src/client/service/identitiesProvidersList.ts b/src/client/service/identitiesProvidersList.ts new file mode 100644 index 000000000..0ccd1349e --- /dev/null +++ b/src/client/service/identitiesProvidersList.ts @@ -0,0 +1,36 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function identitiesProvidersList({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Provider(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const providers = identitiesManager.getProviders(); + response.setProviderId(JSON.stringify(Object.keys(providers))); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesProvidersList; + diff --git a/src/client/service/identitiesTokenDelete.ts b/src/client/service/identitiesTokenDelete.ts new file mode 100644 index 000000000..3313a59cc --- /dev/null +++ b/src/client/service/identitiesTokenDelete.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function identitiesTokenDelete({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await identitiesManager.delToken( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenDelete; diff --git a/src/client/service/identitiesTokenGet.ts b/src/client/service/identitiesTokenGet.ts new file mode 100644 index 000000000..d99cd3c08 --- /dev/null +++ b/src/client/service/identitiesTokenGet.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function identitiesTokenGet({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Token(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const tokens = await identitiesManager.getToken( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + response.setToken(JSON.stringify(tokens)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenGet; diff --git a/src/client/service/identitiesTokenPut.ts b/src/client/service/identitiesTokenPut.ts new file mode 100644 index 000000000..def2f0dfe --- /dev/null +++ b/src/client/service/identitiesTokenPut.ts @@ -0,0 +1,43 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function identitiesTokenPut({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall< + identitiesPB.TokenSpecific, + utilsPB.EmptyMessage + >, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const provider = call.request.getProvider(); + await identitiesManager.putToken( + provider?.getProviderId() as ProviderId, + provider?.getIdentityId() as IdentityId, + { accessToken: call.request.getToken() } as TokenData, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenPut; + diff --git a/src/client/service/index.ts b/src/client/service/index.ts new file mode 100644 index 000000000..b336fcea8 --- /dev/null +++ b/src/client/service/index.ts @@ -0,0 +1,186 @@ +import Logger from '@matrixai/logger'; +import type PolykeyAgent from '../../PolykeyAgent'; +import type { KeyManager } from '../../keys'; +import type { VaultManager } from '../../vaults'; +import type { NodeManager } from '../../nodes'; +import type { IdentitiesManager } from '../../identities'; +import type { GestaltGraph } from '../../gestalts'; +import type { SessionManager } from '../../sessions'; +import type { NotificationsManager } from '../../notifications'; +import type { Discovery } from '../../discovery'; +import type { Sigchain } from '../../sigchain'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { IClientServiceServer } from '../../proto/js/polykey/v1/client_service_grpc_pb'; +import type { FileSystem } from '../../types'; +import agentLockAll from './agentLockAll'; +import agentStatus from './agentStatus'; +import agentStop from './agentStop'; +import agentUnlock from './agentUnlock'; +import gestaltsActionsGetByIdentity from './gestaltsActionsGetByIdentity'; +import gestaltsActionsGetByNode from './gestaltsActionsGetByNode'; +import gestaltsActionsSetByIdentity from './gestaltsActionsSetByIdentity'; +import gestaltsActionsSetByNode from './gestaltsActionsSetByNode'; +import gestaltsActionsUnsetByIdentity from './gestaltsActionsUnsetByIdentity'; +import gestaltsActionsUnsetByNode from './gestaltsActionsUnsetByNode'; +import gestaltsDiscoveryByIdentity from './gestaltsDiscoveryByIdentity'; +import gestaltsDiscoveryByNode from './gestaltsDiscoveryByNode'; +import gestaltsGestaltGetByIdentity from './gestaltsGestaltGetByIdentity'; +import gestaltsGestaltGetByNode from './gestaltsGestaltGetByNode'; +import gestaltsGestaltList from './gestaltsGestaltList'; +import identitiesAuthenticate from './identitiesAuthenticate'; +import identitiesClaim from './identitiesClaim'; +import identitiesInfoGet from './identitiesInfoGet'; +import identitiesInfoGetConnected from './identitiesInfoGetConnected'; +import identitiesProvidersList from './identitiesProvidersList'; +import identitiesTokenDelete from './identitiesTokenDelete'; +import identitiesTokenGet from './identitiesTokenGet'; +import identitiesTokenPut from './identitiesTokenPut'; +import keysCertsChainGet from './keysCertsChainGet'; +import keysCertsGet from './keysCertsGet'; +import keysDecrypt from './keysDecrypt'; +import keysEncrypt from './keysEncrypt'; +import keysKeyPairRenew from './keysKeyPairRenew'; +import keysKeyPairReset from './keysKeyPairReset'; +import keysKeyPairRoot from './keysKeyPairRoot'; +import keysPasswordChange from './keysPasswordChange'; +import keysSign from './keysSign'; +import keysVerify from './keysVerify'; +import nodesAdd from './nodesAdd'; +import nodesClaim from './nodesClaim'; +import nodesFind from './nodesFind'; +import nodesPing from './nodesPing'; +import notificationsClear from './notificationsClear'; +import notificationsRead from './notificationsRead'; +import notificationsSend from './notificationsSend'; +import vaultsClone from './vaultsClone'; +import vaultsCreate from './vaultsCreate'; +import vaultsDelete from './vaultsDelete'; +import vaultsList from './vaultsList'; +import vaultsLog from './vaultsLog'; +import vaultsPermissions from './vaultsPermissions'; +import vaultsPermissionsSet from './vaultsPermissionsSet'; +import vaultsPermissionsUnset from './vaultsPermissionsUnset'; +import vaultsPull from './vaultsPull'; +import vaultsRename from './vaultsRename'; +import vaultsScan from './vaultsScan'; +import vaultsVersion from './vaultsVersion'; +import vaultsSecretsDelete from './vaultsSecretsDelete'; +import vaultsSecretsEdit from './vaultsSecretsEdit'; +import vaultsSecretsGet from './vaultsSecretsGet'; +import vaultsSecretsList from './vaultsSecretsList'; +import vaultsSecretsMkdir from './vaultsSecretsMkdir'; +import vaultsSecretsNew from './vaultsSecretsNew'; +import vaultsSecretsNewDir from './vaultsSecretsNewDir'; +import vaultsSecretsRename from './vaultsSecretsRename'; +import vaultsSecretsStat from './vaultsSecretsStat'; +import * as clientUtils from '../utils'; +import { ClientServiceService } from '../../proto/js/polykey/v1/client_service_grpc_pb'; + +function createService ( + { + keyManager, + sessionManager, + logger = new Logger(createService.name), + fs = require('fs'), + ...containerRest + }: { + pkAgent: PolykeyAgent; + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + identitiesManager: IdentitiesManager; + gestaltGraph: GestaltGraph; + sessionManager: SessionManager; + notificationsManager: NotificationsManager; + discovery: Discovery; + sigchain: Sigchain; + grpcServerClient: GRPCServer; + grpcServerAgent: GRPCServer; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + logger?: Logger; + fs?: FileSystem; + } +) { + const authenticate = clientUtils.authenticator( + sessionManager, + keyManager + ); + const container = { + ...containerRest, + keyManager, + sessionManager, + logger, + fs, + authenticate, + }; + const service: IClientServiceServer ={ + agentLockAll: agentLockAll(container), + agentStatus: agentStatus(container), + agentStop: agentStop(container), + agentUnlock: agentUnlock(container), + gestaltsActionsGetByIdentity: gestaltsActionsGetByIdentity(container), + gestaltsActionsGetByNode: gestaltsActionsGetByNode(container), + gestaltsActionsSetByIdentity: gestaltsActionsSetByIdentity(container), + gestaltsActionsSetByNode: gestaltsActionsSetByNode(container), + gestaltsActionsUnsetByIdentity: gestaltsActionsUnsetByIdentity(container), + gestaltsActionsUnsetByNode: gestaltsActionsUnsetByNode(container), + gestaltsDiscoveryByIdentity: gestaltsDiscoveryByIdentity(container), + gestaltsDiscoveryByNode: gestaltsDiscoveryByNode(container), + gestaltsGestaltGetByIdentity: gestaltsGestaltGetByIdentity(container), + gestaltsGestaltGetByNode: gestaltsGestaltGetByNode(container), + gestaltsGestaltList: gestaltsGestaltList(container), + identitiesAuthenticate: identitiesAuthenticate(container), + identitiesClaim: identitiesClaim(container), + identitiesInfoGet: identitiesInfoGet(container), + identitiesInfoGetConnected: identitiesInfoGetConnected(container), + identitiesProvidersList: identitiesProvidersList(container), + identitiesTokenDelete: identitiesTokenDelete(container), + identitiesTokenGet: identitiesTokenGet(container), + identitiesTokenPut: identitiesTokenPut(container), + keysCertsChainGet: keysCertsChainGet(container), + keysCertsGet: keysCertsGet(container), + keysDecrypt: keysDecrypt(container), + keysEncrypt: keysEncrypt(container), + keysKeyPairRenew: keysKeyPairRenew(container), + keysKeyPairReset: keysKeyPairReset(container), + keysKeyPairRoot: keysKeyPairRoot(container), + keysPasswordChange: keysPasswordChange(container), + keysSign: keysSign(container), + keysVerify: keysVerify(container), + nodesAdd: nodesAdd(container), + nodesClaim: nodesClaim(container), + nodesFind: nodesFind(container), + nodesPing: nodesPing(container), + notificationsClear: notificationsClear(container), + notificationsRead: notificationsRead(container), + notificationsSend: notificationsSend(container), + vaultsClone: vaultsClone(container), + vaultsCreate: vaultsCreate(container), + vaultsDelete: vaultsDelete(container), + vaultsList: vaultsList(container), + vaultsLog: vaultsLog(container), + vaultsPermissions: vaultsPermissions(container), + vaultsPermissionsSet: vaultsPermissionsSet(container), + vaultsPermissionsUnset: vaultsPermissionsUnset(container), + vaultsPull: vaultsPull(container), + vaultsRename: vaultsRename(container), + vaultsScan: vaultsScan(container), + vaultsVersion: vaultsVersion(container), + vaultsSecretsDelete: vaultsSecretsDelete(container), + vaultsSecretsEdit: vaultsSecretsEdit(container), + vaultsSecretsGet: vaultsSecretsGet(container), + vaultsSecretsList: vaultsSecretsList(container), + vaultsSecretsMkdir: vaultsSecretsMkdir(container), + vaultsSecretsNew: vaultsSecretsNew(container), + vaultsSecretsNewDir: vaultsSecretsNewDir(container), + vaultsSecretsRename: vaultsSecretsRename(container), + vaultsSecretsStat: vaultsSecretsStat(container), + }; + return service; +} + +export default createService; + +export { ClientServiceService }; diff --git a/src/client/service/keysCertsChainGet.ts b/src/client/service/keysCertsChainGet.ts new file mode 100644 index 000000000..6de9e12d2 --- /dev/null +++ b/src/client/service/keysCertsChainGet.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysCertsChainGet({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const certs: Array = await keyManager.getRootCertChainPems(); + let certMessage: keysPB.Certificate; + for (const cert of certs) { + certMessage = new keysPB.Certificate(); + certMessage.setCert(cert); + await genWritable.next(certMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default keysCertsChainGet; diff --git a/src/client/service/keysCertsGet.ts b/src/client/service/keysCertsGet.ts new file mode 100644 index 000000000..fc9e438ae --- /dev/null +++ b/src/client/service/keysCertsGet.ts @@ -0,0 +1,34 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysCertsGet({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Certificate(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const cert = keyManager.getRootCertPem(); + response.setCert(cert); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysCertsGet; diff --git a/src/client/service/keysDecrypt.ts b/src/client/service/keysDecrypt.ts new file mode 100644 index 000000000..2e5e601ee --- /dev/null +++ b/src/client/service/keysDecrypt.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysDecrypt({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const data = await keyManager.decryptWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setData(data.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysDecrypt; diff --git a/src/client/service/keysEncrypt.ts b/src/client/service/keysEncrypt.ts new file mode 100644 index 000000000..c092458b5 --- /dev/null +++ b/src/client/service/keysEncrypt.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysEncrypt({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const data = await keyManager.encryptWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setData(data.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysEncrypt; diff --git a/src/client/service/keysKeyPairRenew.ts b/src/client/service/keysKeyPairRenew.ts new file mode 100644 index 000000000..664e27637 --- /dev/null +++ b/src/client/service/keysKeyPairRenew.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { NodeManager } from '../../nodes'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { TLSConfig } from '../../network/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysKeyPairRenew({ + keyManager, + nodeManager, + fwdProxy, + revProxy, + grpcServerClient, + authenticate, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + grpcServerClient: GRPCServer; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Lock the nodeManager - because we need to do a database refresh too + await nodeManager.transaction(async (nodeManager) => { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await keyManager.renewRootKeyPair(call.request.getName()); + // Reset the TLS config with new keypair + certificate + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + fwdProxy.setTLSConfig(tlsConfig); + revProxy.setTLSConfig(tlsConfig); + grpcServerClient.setTLSConfig(tlsConfig); + // Finally, refresh the node buckets + await nodeManager.refreshBuckets(); + }); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairRenew; diff --git a/src/client/service/keysKeyPairReset.ts b/src/client/service/keysKeyPairReset.ts new file mode 100644 index 000000000..ceb662f0a --- /dev/null +++ b/src/client/service/keysKeyPairReset.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { NodeManager } from '../../nodes'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { TLSConfig } from '../../network/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysKeyPairReset({ + keyManager, + nodeManager, + fwdProxy, + revProxy, + grpcServerClient, + authenticate, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + grpcServerClient: GRPCServer; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Lock the nodeManager - because we need to do a database refresh too + await nodeManager.transaction(async (nodeManager) => { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await keyManager.resetRootKeyPair(call.request.getName()); + // Reset the TLS config with new keypair + certificate + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + fwdProxy.setTLSConfig(tlsConfig); + revProxy.setTLSConfig(tlsConfig); + grpcServerClient.setTLSConfig(tlsConfig); + // Finally, refresh the node buckets + await nodeManager.refreshBuckets(); + }); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairReset; diff --git a/src/client/service/keysKeyPairRoot.ts b/src/client/service/keysKeyPairRoot.ts new file mode 100644 index 000000000..7e785ec39 --- /dev/null +++ b/src/client/service/keysKeyPairRoot.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysKeyPairRoot({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.KeyPair(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const keyPair = keyManager.getRootKeyPairPem(); + response.setPublic(keyPair.publicKey); + response.setPrivate(keyPair.privateKey); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairRoot; diff --git a/src/client/service/keysPasswordChange.ts b/src/client/service/keysPasswordChange.ts new file mode 100644 index 000000000..80b128d3e --- /dev/null +++ b/src/client/service/keysPasswordChange.ts @@ -0,0 +1,34 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as sessionsPB from '../../proto/js/polykey/v1/sessions/sessions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function keysPasswordChange({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + await keyManager.changePassword(call.request.getPassword()); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysPasswordChange; diff --git a/src/client/service/keysSign.ts b/src/client/service/keysSign.ts new file mode 100644 index 000000000..b48702e5a --- /dev/null +++ b/src/client/service/keysSign.ts @@ -0,0 +1,36 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysSign({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const signature = await keyManager.signWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setSignature(signature.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysSign; diff --git a/src/client/service/keysVerify.ts b/src/client/service/keysVerify.ts new file mode 100644 index 000000000..70b858279 --- /dev/null +++ b/src/client/service/keysVerify.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysVerify({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const status = await keyManager.verifyWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + Buffer.from(call.request.getSignature(), 'binary'), + ); + response.setSuccess(status); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysVerify; diff --git a/src/client/service/nodesAdd.ts b/src/client/service/nodesAdd.ts new file mode 100644 index 000000000..8d65c03ff --- /dev/null +++ b/src/client/service/nodesAdd.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { NodeAddress } from '../../nodes/types'; +import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as networkUtils } from '../../network'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Adds a node ID -> node address mapping into the buckets database. + * This is an unrestricted add: no validity checks are made for the correctness + * of the passed ID or host/port. + */ +function nodesAdd ({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Validate the passed node ID and host + const validNodeId = nodesUtils.isNodeId(call.request.getNodeId()); + if (!validNodeId) { + throw new nodesErrors.ErrorInvalidNodeId(); + } + const validHost = networkUtils.isValidHost( + call.request.getAddress()!.getHost(), + ); + if (!validHost) { + throw new nodesErrors.ErrorInvalidHost(); + } + await nodeManager.setNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + { + host: call.request.getAddress()!.getHost(), + port: call.request.getAddress()!.getPort(), + } as NodeAddress + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesAdd; diff --git a/src/client/service/nodesClaim.ts b/src/client/service/nodesClaim.ts new file mode 100644 index 000000000..58c47cbf1 --- /dev/null +++ b/src/client/service/nodesClaim.ts @@ -0,0 +1,59 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { NotificationData } from '../../notifications/types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Checks whether there is an existing Gestalt Invitation from the other node. + * If not, send an invitation, if so, create a cryptolink claim between the + * other node and host node. + */ +function nodesClaim ({ + nodeManager, + notificationsManager, + authenticate, +}: { + nodeManager: NodeManager; + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const remoteNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + const gestaltInvite = await notificationsManager.findGestaltInvite( + remoteNodeId, + ); + // Check first whether there is an existing gestalt invite from the remote node + // or if we want to force an invitation rather than a claim + if (gestaltInvite === undefined || call.request.getForceInvite()) { + const data = { + type: 'GestaltInvite', + } as NotificationData; + await notificationsManager.sendNotification(remoteNodeId, data); + response.setSuccess(false); + } else { + // There is an existing invitation, and we want to claim the node + await nodeManager.claimNode(remoteNodeId); + response.setSuccess(true); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesClaim; diff --git a/src/client/service/nodesFind.ts b/src/client/service/nodesFind.ts new file mode 100644 index 000000000..2282fc350 --- /dev/null +++ b/src/client/service/nodesFind.ts @@ -0,0 +1,45 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { NodeAddress } from '../../nodes/types'; +import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Attempts to get the node address of a provided node ID (by contacting + * keynodes in the wider Polykey network). + * @throws ErrorNodeGraphNodeNotFound if node address cannot be found + */ +function nodesFind ({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.NodeAddress(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const nodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + const address = await nodeManager.findNode(nodeId); + response + .setNodeId(nodeId) + .setAddress( + new nodesPB.Address().setHost(address.host).setPort(address.port), + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesFind; diff --git a/src/client/service/nodesPing.ts b/src/client/service/nodesPing.ts new file mode 100644 index 000000000..d0ae7ea02 --- /dev/null +++ b/src/client/service/nodesPing.ts @@ -0,0 +1,40 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Checks if a remote node is online. + */ +function nodesPing ({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const status = await nodeManager.pingNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + ); + response.setSuccess(status); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesPing; diff --git a/src/client/service/notificationsClear.ts b/src/client/service/notificationsClear.ts new file mode 100644 index 000000000..dfb289648 --- /dev/null +++ b/src/client/service/notificationsClear.ts @@ -0,0 +1,33 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function notificationsClear ({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + await notificationsManager.clearNotifications(); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsClear; diff --git a/src/client/service/notificationsRead.ts b/src/client/service/notificationsRead.ts new file mode 100644 index 000000000..2673f1e6c --- /dev/null +++ b/src/client/service/notificationsRead.ts @@ -0,0 +1,73 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as grpcUtils } from '../../grpc'; +import * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; + +function notificationsRead ({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new notificationsPB.List(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const unread = call.request.getUnread(); + const order = call.request.getOrder() as 'newest' | 'oldest'; + const numberField = call.request.getNumber(); + let number: number | 'all'; + if (numberField === 'all') { + number = numberField; + } else { + number = parseInt(numberField); + } + const notifications = await notificationsManager.readNotifications({ + unread, + number, + order, + }); + const notifMessages: Array = []; + for (const notif of notifications) { + const notificationsMessage = new notificationsPB.Notification(); + switch (notif.data.type) { + case 'General': { + const generalMessage = new notificationsPB.General(); + generalMessage.setMessage(notif.data.message); + notificationsMessage.setGeneral(generalMessage); + break; + } + case 'GestaltInvite': { + notificationsMessage.setGestaltInvite('GestaltInvite'); + break; + } + case 'VaultShare': { + const vaultShareMessage = new notificationsPB.Share(); + vaultShareMessage.setVaultId(notif.data.vaultId); + vaultShareMessage.setVaultName(notif.data.vaultName); + vaultShareMessage.setActionsList(Object.keys(notif.data.actions)); + notificationsMessage.setVaultShare(vaultShareMessage); + break; + } + } + notificationsMessage.setSenderId(notif.senderId); + notificationsMessage.setIsRead(notif.isRead); + notifMessages.push(notificationsMessage); + } + response.setNotificationList(notifMessages); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsRead; diff --git a/src/client/service/notificationsSend.ts b/src/client/service/notificationsSend.ts new file mode 100644 index 000000000..9b2f35815 --- /dev/null +++ b/src/client/service/notificationsSend.ts @@ -0,0 +1,42 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as notificationsUtils } from '../../notifications'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; + +function notificationsSend({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const receivingId = nodesUtils.makeNodeId(call.request.getReceiverId()); + const data = { + type: 'General', + message: call.request.getData()?.getMessage(), + }; + const validatedData = + notificationsUtils.validateGeneralNotification(data); + await notificationsManager.sendNotification(receivingId, validatedData); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsSend; diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts new file mode 100644 index 000000000..294795fca --- /dev/null +++ b/src/client/service/vaultsClone.ts @@ -0,0 +1,49 @@ +import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsClone({ + authenticate +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Vault id + // const vaultId = parseVaultInput(vaultMessage, vaultManager); + // Node id + // const id = makeNodeId(nodeMessage.getNodeId()); + + throw Error('Not implemented'); + // FIXME, not fully implemented + // await vaultManager.cloneVault(vaultId, id); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsClone; diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts new file mode 100644 index 000000000..18a2cdef3 --- /dev/null +++ b/src/client/service/vaultsCreate.ts @@ -0,0 +1,42 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsCreate({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Vault(); + let vault: Vault; + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + vault = await vaultManager.createVault( + call.request.getNameOrId() as VaultName, + ); + response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsCreate; diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts new file mode 100644 index 000000000..96f783faf --- /dev/null +++ b/src/client/service/vaultsDelete.ts @@ -0,0 +1,51 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors, } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsDelete({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const vaultMessage = call.request; + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + await vaultManager.destroyVault(vaultId); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsDelete; diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts new file mode 100644 index 000000000..a5c9d11c4 --- /dev/null +++ b/src/client/service/vaultsList.ts @@ -0,0 +1,46 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsList({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + // Call.on('error', (e) => console.error(e)); + // call.on('close', () => console.log('Got close')); + // call.on('finish', () => console.log('Got finish')); + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaults = await vaultManager.listVaults(); + for await (const [vaultName, vaultId] of vaults) { + const vaultListMessage = new vaultsPB.List(); + vaultListMessage.setVaultName(vaultName); + vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + await genWritable.next(((_) => vaultListMessage)()); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsList; diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts new file mode 100644 index 000000000..4c38bef69 --- /dev/null +++ b/src/client/service/vaultsLog.ts @@ -0,0 +1,69 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsLog({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Getting the vault. + const vaultsLogMessage = call.request; + const vaultMessage = vaultsLogMessage.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + + // Getting the log + const depth = vaultsLogMessage.getLogDepth(); + let commitId: string | undefined = vaultsLogMessage.getCommitId(); + commitId = commitId ? commitId : undefined; + const log = await vault.log(depth, commitId); + + const vaultsLogEntryMessage = new vaultsPB.LogEntry(); + for (const entry of log) { + vaultsLogEntryMessage.setOid(entry.oid); + vaultsLogEntryMessage.setCommitter(entry.committer); + vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); + vaultsLogEntryMessage.setMessage(entry.message); + await genWritable.next(vaultsLogEntryMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsLog; diff --git a/src/client/service/vaultsPermissions.ts b/src/client/service/vaultsPermissions.ts new file mode 100644 index 000000000..ceafdf1c4 --- /dev/null +++ b/src/client/service/vaultsPermissions.ts @@ -0,0 +1,57 @@ +import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsPermissions({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + // Const node = nodeMessage.getNodeId(); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + // let perms: Record; + throw Error('Not implemented'); + // FIXME + // if (isNodeId(node)) { + // Perms = await vaultManager.getVaultPermissions(id, node); + // } else { + // Perms = await vaultManager.getVaultPermissions(id); + // } + // const permissionMessage = new vaultsPB.Permission(); + // For (const nodeId in perms) { + // permissionMessage.setNodeId(nodeId); + // if (perms[nodeId]['pull'] !== undefined) { + // permissionMessage.setAction('pull'); + // } + // await genWritable.next(permissionMessage); + // } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsPermissions; diff --git a/src/client/service/vaultsPermissionsSet.ts b/src/client/service/vaultsPermissionsSet.ts new file mode 100644 index 000000000..976b80d1a --- /dev/null +++ b/src/client/service/vaultsPermissionsSet.ts @@ -0,0 +1,45 @@ +import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsPermissionsSet({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const node = makeNodeId(nodeMessage.getNodeId()); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + throw Error('Not Implemented'); + // Await vaultManager.setVaultPermissions(node, id); // FIXME + const response = new utilsPB.StatusMessage(); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPermissionsSet; diff --git a/src/client/service/vaultsPermissionsUnset.ts b/src/client/service/vaultsPermissionsUnset.ts new file mode 100644 index 000000000..d1fc23b9e --- /dev/null +++ b/src/client/service/vaultsPermissionsUnset.ts @@ -0,0 +1,45 @@ +import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsPermissionsUnset({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const node = makeNodeId(nodeMessage.getNodeId()); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + throw Error('Not implemented'); + // Await vaultManager.unsetVaultPermissions(node, id); // FIXME + const response = new utilsPB.StatusMessage(); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPermissionsUnset; diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts new file mode 100644 index 000000000..81b43edf3 --- /dev/null +++ b/src/client/service/vaultsPull.ts @@ -0,0 +1,47 @@ +import type { Authenticate } from '../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsPull({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Vault name + // const vaultId = await parseVaultInput(vaultMessage, vaultManager); + // Node id + // const id = makeNodeId(nodeMessage.getNodeId()); + + // Await vaultManager.pullVault(vaultId, id); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPull; diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts new file mode 100644 index 000000000..69920a270 --- /dev/null +++ b/src/client/service/vaultsRename.ts @@ -0,0 +1,55 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + utils as vaultsUtils, + errors as vaultsErrors, +} from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsRename({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Vault(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const newName = call.request.getNewName() as VaultName; + await vaultManager.renameVault(vaultId, newName); + response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsRename; diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts new file mode 100644 index 000000000..226197cd6 --- /dev/null +++ b/src/client/service/vaultsScan.ts @@ -0,0 +1,42 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsScan({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + // Const nodeId = makeNodeId(call.request.getNodeId()); + + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaults = await vaultManager.listVaults(); + vaults.forEach(async (vaultId, vaultName) => { + const vaultListMessage = new vaultsPB.List(); + vaultListMessage.setVaultName(vaultName); + vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + await genWritable.next(vaultListMessage); + }); + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsScan; diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts new file mode 100644 index 000000000..75e56083e --- /dev/null +++ b/src/client/service/vaultsSecretsDelete.ts @@ -0,0 +1,58 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsDelete({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = call.request.getSecretName(); + await vaultOps.deleteSecret(vault, secretName); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsDelete; diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts new file mode 100644 index 000000000..91a5dfbb7 --- /dev/null +++ b/src/client/service/vaultsSecretsEdit.ts @@ -0,0 +1,64 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsEdit({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const secretMessage = call.request; + if (secretMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const vaultMessage = secretMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = secretMessage.getSecretName(); + const secretContent = Buffer.from(secretMessage.getSecretContent()); + await vaultOps.updateSecret(vault, secretName, secretContent); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsEdit; diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts new file mode 100644 index 000000000..95bbf0a7d --- /dev/null +++ b/src/client/service/vaultsSecretsGet.ts @@ -0,0 +1,58 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsGet({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new secretsPB.Secret(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = call.request.getSecretName(); + const secretContent = await vaultOps.getSecret(vault, secretName); + response.setSecretContent(secretContent); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsGet; diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts new file mode 100644 index 000000000..fd16e0d8a --- /dev/null +++ b/src/client/service/vaultsSecretsList.ts @@ -0,0 +1,59 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsList({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultMessage = call.request; + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secrets = await vaultOps.listSecrets(vault); + let secretMessage: secretsPB.Secret; + for (const secret of secrets) { + secretMessage = new secretsPB.Secret(); + secretMessage.setSecretName(secret); + await genWritable.next(secretMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsSecretsList; diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts new file mode 100644 index 000000000..2d9aa22a1 --- /dev/null +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -0,0 +1,60 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsMkdir({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMkdirMessge = call.request; + const vaultMessage = vaultMkdirMessge.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { + recursive: vaultMkdirMessge.getRecursive(), + }); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsMkdir; diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts new file mode 100644 index 000000000..cb8d2c1e3 --- /dev/null +++ b/src/client/service/vaultsSecretsNew.ts @@ -0,0 +1,59 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsNew({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secret = call.request.getSecretName(); + const content = Buffer.from(call.request.getSecretContent()); + await vaultOps.addSecret(vault, secret, content); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsNew; diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts new file mode 100644 index 000000000..fbe45597b --- /dev/null +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -0,0 +1,61 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsNewDir({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretsPath = call.request.getSecretDirectory(); + await vaultOps.addSecretDirectory(vault, secretsPath, fs); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsNewDir; diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts new file mode 100644 index 000000000..6be7b6a91 --- /dev/null +++ b/src/client/service/vaultsSecretsRename.ts @@ -0,0 +1,63 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { + vaultOps, + errors as vaultsErrors, +} from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsRename({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const secretMessage = call.request.getOldSecret(); + if (!secretMessage) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const vaultMessage = secretMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const oldSecret = secretMessage.getSecretName(); + const newSecret = call.request.getNewName(); + await vaultOps.renameSecret(vault, oldSecret, newSecret); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsRename; diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts new file mode 100644 index 000000000..6fd5d136d --- /dev/null +++ b/src/client/service/vaultsSecretsStat.ts @@ -0,0 +1,37 @@ +import type { Authenticate } from '../types'; +import type { Vault } from '../../vaults/types'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsSecretsStat({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Stat(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + // Const vaultMessage = call.request; + // Const id = await parseVaultInput(vaultMessage, vaultManager); + // const vault = await vaultManager.openVault(id); + // FIXME, reimplement this. + throw Error('Not Implemented'); + // Const stats = await vaultManager.vaultStats(id); + // response.setStats(JSON.stringify(stats));); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsStat; diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts new file mode 100644 index 000000000..c05f0ac1f --- /dev/null +++ b/src/client/service/vaultsVersion.ts @@ -0,0 +1,70 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsVersion({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.VersionResult(); + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultsVersionMessage = call.request; + + // Getting vault ID + const vaultMessage = vaultsVersionMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + + // Doing the deed + const vault = await vaultManager.openVault(vaultId); + const latestOid = (await vault.log())[0].oid; + const versionId = vaultsVersionMessage.getVersionId(); + + await vault.version(versionId); + const currentVersionId = (await vault.log(0, versionId))[0]?.oid; + + // Checking if latest version ID. + const isLatestVersion = latestOid === currentVersionId; + + // Creating message + response.setIsLatestVersion(isLatestVersion); + + // Sending message + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsVersion; diff --git a/src/client/types.ts b/src/client/types.ts new file mode 100644 index 000000000..61d5f0326 --- /dev/null +++ b/src/client/types.ts @@ -0,0 +1,10 @@ +import type * as grpc from '@grpc/grpc-js'; + +type Authenticate = ( + metadataClient: grpc.Metadata, + metadataServer?: grpc.Metadata, +) => Promise; + +export type { + Authenticate +}; diff --git a/src/client/utils/utils.ts b/src/client/utils/utils.ts index 581d511e0..3e2021cf9 100644 --- a/src/client/utils/utils.ts +++ b/src/client/utils/utils.ts @@ -6,6 +6,7 @@ import type { import type { KeyManager } from '../../keys'; import type { Session, SessionManager } from '../../sessions'; import type { SessionToken } from '../../sessions/types'; +import type { Authenticate } from '../types'; import * as grpc from '@grpc/grpc-js'; import * as base64 from 'multiformats/bases/base64'; import * as clientErrors from '../errors'; @@ -49,11 +50,6 @@ function sessionInterceptor(session: Session): Interceptor { return interceptor; } -type Authenticate = ( - metadataClient: grpc.Metadata, - metadataServer?: grpc.Metadata, -) => Promise; - function authenticator( sessionManager: SessionManager, keyManager: KeyManager, @@ -143,5 +139,3 @@ export { encodeAuthFromSession, decodeAuthToSession, }; - -export type { Authenticate }; diff --git a/src/config.ts b/src/config.ts index 1f48d623c..fa3bf0a81 100644 --- a/src/config.ts +++ b/src/config.ts @@ -55,7 +55,6 @@ const config = { */ defaults: { nodePath: getDefaultNodePath(), - statusBase: 'status.json', stateBase: 'state', stateVersionBase: 'version', diff --git a/src/grpc/GRPCServer.ts b/src/grpc/GRPCServer.ts index cb7b59b13..843cdab84 100644 --- a/src/grpc/GRPCServer.ts +++ b/src/grpc/GRPCServer.ts @@ -27,7 +27,7 @@ class GRPCServer { protected tlsConfig?: TLSConfig; protected _secured: boolean = false; - constructor({ logger }: { logger?: Logger }) { + constructor({ logger }: { logger?: Logger } = {}) { this.logger = logger ?? new Logger(this.constructor.name); } diff --git a/src/grpc/index.ts b/src/grpc/index.ts index 26d7451d0..16380a821 100644 --- a/src/grpc/index.ts +++ b/src/grpc/index.ts @@ -1,3 +1,9 @@ +/** + * Use this module when contacting Polykey + * If you use the upstream `@grpc/grpc-js`, it may give you mismatched dependencies + * For example the `Metadata` object has to be used when calling `PolykeyClient` + */ +export * as grpc from '@grpc/grpc-js'; export { default as GRPCServer } from './GRPCServer'; export { default as GRPCClient } from './GRPCClient'; export * as utils from './utils'; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index fa5375154..ea1c11386 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -16,7 +16,7 @@ interface IAgentServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsPermisssionsCheck extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsPermisssionsCheck"; +interface IAgentServiceService_IVaultsPermissionsCheck extends grpc.MethodDefinition { + path: "/polykey.v1.AgentService/VaultsPermissionsCheck"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -132,7 +132,7 @@ export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; vaultsScan: grpc.handleServerStreamingCall; - vaultsPermisssionsCheck: grpc.handleUnaryCall; + vaultsPermissionsCheck: grpc.handleUnaryCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; @@ -152,9 +152,9 @@ export interface IAgentServiceClient { vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -186,9 +186,9 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index ad4682787..782ed2f8e 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -209,8 +209,8 @@ vaultsGitInfoGet: { responseSerialize: serialize_polykey_v1_vaults_Vault, responseDeserialize: deserialize_polykey_v1_vaults_Vault, }, - vaultsPermisssionsCheck: { - path: '/polykey.v1.AgentService/VaultsPermisssionsCheck', + vaultsPermissionsCheck: { + path: '/polykey.v1.AgentService/VaultsPermissionsCheck', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.NodePermission, diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index e666e9903..2de9a871d 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -19,10 +19,10 @@ import * as polykey_v1_vaults_vaults_pb from "../../polykey/v1/vaults/vaults_pb" import * as polykey_v1_utils_utils_pb from "../../polykey/v1/utils/utils_pb"; interface IClientServiceService extends grpc.ServiceDefinition { + agentLockAll: IClientServiceService_IAgentLockAll; agentStatus: IClientServiceService_IAgentStatus; agentStop: IClientServiceService_IAgentStop; - sessionsUnlock: IClientServiceService_ISessionsUnlock; - sessionsLockAll: IClientServiceService_ISessionsLockAll; + agentUnlock: IClientServiceService_IAgentUnlock; nodesAdd: IClientServiceService_INodesAdd; nodesPing: IClientServiceService_INodesPing; nodesClaim: IClientServiceService_INodesClaim; @@ -82,6 +82,15 @@ interface IClientServiceService extends grpc.ServiceDefinition { + path: "/polykey.v1.ClientService/AgentLockAll"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IAgentStatus extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/AgentStatus"; requestStream: false; @@ -100,17 +109,8 @@ interface IClientServiceService_IAgentStop extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_ISessionsUnlock extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/SessionsUnlock"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_ISessionsLockAll extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/SessionsLockAll"; +interface IClientServiceService_IAgentUnlock extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/AgentUnlock"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -635,10 +635,10 @@ interface IClientServiceService_INotificationsClear extends grpc.MethodDefinitio export const ClientServiceService: IClientServiceService; export interface IClientServiceServer extends grpc.UntypedServiceImplementation { + agentLockAll: grpc.handleUnaryCall; agentStatus: grpc.handleUnaryCall; agentStop: grpc.handleUnaryCall; - sessionsUnlock: grpc.handleUnaryCall; - sessionsLockAll: grpc.handleUnaryCall; + agentUnlock: grpc.handleUnaryCall; nodesAdd: grpc.handleUnaryCall; nodesPing: grpc.handleUnaryCall; nodesClaim: grpc.handleUnaryCall; @@ -699,18 +699,18 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation } export interface IClientServiceClient { + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; @@ -877,18 +877,18 @@ export interface IClientServiceClient { export class ClientServiceClient extends grpc.Client implements IClientServiceClient { constructor(address: string, credentials: grpc.ChannelCredentials, options?: Partial); + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 28e446dc1..7ab4f8f36 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -479,31 +479,30 @@ function deserialize_polykey_v1_vaults_VersionResult(buffer_arg) { var ClientServiceService = exports.ClientServiceService = { // Agent -agentStatus: { - path: '/polykey.v1.ClientService/AgentStatus', +agentLockAll: { + path: '/polykey.v1.ClientService/AgentLockAll', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, - responseType: polykey_v1_agent_agent_pb.InfoMessage, + responseType: polykey_v1_utils_utils_pb.EmptyMessage, requestSerialize: serialize_polykey_v1_utils_EmptyMessage, requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, - responseSerialize: serialize_polykey_v1_agent_InfoMessage, - responseDeserialize: deserialize_polykey_v1_agent_InfoMessage, + responseSerialize: serialize_polykey_v1_utils_EmptyMessage, + responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, }, - agentStop: { - path: '/polykey.v1.ClientService/AgentStop', + agentStatus: { + path: '/polykey.v1.ClientService/AgentStatus', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, - responseType: polykey_v1_utils_utils_pb.EmptyMessage, + responseType: polykey_v1_agent_agent_pb.InfoMessage, requestSerialize: serialize_polykey_v1_utils_EmptyMessage, requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, - responseSerialize: serialize_polykey_v1_utils_EmptyMessage, - responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, + responseSerialize: serialize_polykey_v1_agent_InfoMessage, + responseDeserialize: deserialize_polykey_v1_agent_InfoMessage, }, - // Session -sessionsUnlock: { - path: '/polykey.v1.ClientService/SessionsUnlock', + agentStop: { + path: '/polykey.v1.ClientService/AgentStop', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, @@ -513,8 +512,8 @@ sessionsUnlock: { responseSerialize: serialize_polykey_v1_utils_EmptyMessage, responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, }, - sessionsLockAll: { - path: '/polykey.v1.ClientService/SessionsLockAll', + agentUnlock: { + path: '/polykey.v1.ClientService/AgentUnlock', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 5c27703ed..712ee5d8a 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -16,7 +16,7 @@ service AgentService { rpc VaultsGitInfoGet (polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.Vault); - rpc VaultsPermisssionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); + rpc VaultsPermissionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index 2f5d384fa..1edbd01d5 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -16,12 +16,10 @@ package polykey.v1; service ClientService { // Agent + rpc AgentLockAll (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); rpc AgentStatus(polykey.v1.utils.EmptyMessage) returns (polykey.v1.agent.InfoMessage); rpc AgentStop(polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); - - // Session - rpc SessionsUnlock (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); - rpc SessionsLockAll (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); + rpc AgentUnlock (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); // Nodes rpc NodesAdd(polykey.v1.nodes.NodeAddress) returns (polykey.v1.utils.EmptyMessage); diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index 6e1324ced..70491de03 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -12,8 +12,8 @@ import { ACL, errors as aclErrors } from '@/acl'; import { utils as keysUtils } from '@/keys'; import { utils as vaultsUtils } from '@/vaults'; -describe('ACL', () => { - const logger = new Logger(`${ACL.name} Test`, LogLevel.WARN, [ +describe(ACL.name, () => { + const logger = new Logger(`${ACL.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); let dataDir: string; diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index ca2572ca3..23cbfa487 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -28,9 +28,9 @@ import * as testAgentUtils from './utils'; import * as testUtils from '../utils'; import TestNodeConnection from '../nodes/TestNodeConnection'; -describe('GRPCClientAgent', () => { +describe(GRPCClientAgent.name, () => { const password = 'password'; - const logger = new Logger(`${GRPCClientAgent.name} Test`, LogLevel.WARN, [ + const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); const node1: NodeInfo = { @@ -199,10 +199,10 @@ describe('GRPCClientAgent', () => { const vaultPermMessage = new vaultsPB.NodePermission(); vaultPermMessage.setNodeId(node1.id); // VaultPermMessage.setVaultId(vault.vaultId); - const response = await client.vaultsPermisssionsCheck(vaultPermMessage); + const response = await client.vaultsPermissionsCheck(vaultPermMessage); expect(response.getPermission()).toBeFalsy(); // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = await client.vaultsPermisssionsCheck(vaultPermMessage); + const response2 = await client.vaultsPermissionsCheck(vaultPermMessage); expect(response2.getPermission()).toBeTruthy(); // Await vaultManager.deleteVault(vault.vaultId); }); diff --git a/tests/bootstrap/utils.test.ts b/tests/bootstrap/utils.test.ts index b0fbb60e9..2ef7253b3 100644 --- a/tests/bootstrap/utils.test.ts +++ b/tests/bootstrap/utils.test.ts @@ -2,8 +2,8 @@ import fs from 'fs'; import path from 'path'; import os from 'os'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import PolykeyAgent from '@/PolykeyAgent'; -import { utils as bootstrapUtils, errors as bootstrapErrors } from '@/bootstrap'; +import * as bootstrapUtils from '@/bootstrap/utils'; +import * as bootstrapErrors from '@/bootstrap/errors'; import { utils as keysUtils } from '@/keys'; import { errors as statusErrors } from '@/status'; import config from '@/config'; @@ -29,11 +29,8 @@ describe('bootstrap/utils', () => { mockedGenerateDeterministicKeyPair.mockRestore(); }); let dataDir: string; - // let nodePath: string; beforeEach(async () => { dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-')); - // nodePath = path.join(dataDir, 'Node'); - // await fs.promises.mkdir(nodePath); }); afterEach(async () => { await fs.promises.rm(dataDir, { diff --git a/tests/claims/utils.test.ts b/tests/claims/utils.test.ts index 49bcfaba7..f00a32bfb 100644 --- a/tests/claims/utils.test.ts +++ b/tests/claims/utils.test.ts @@ -13,8 +13,14 @@ import * as keysUtils from '@/keys/utils'; import * as testUtils from '../utils'; describe('claims/utils', () => { - const publicKey: PublicKeyPem = testUtils.globalKeyPairPem.publicKey; - const privateKey: PrivateKeyPem = testUtils.globalKeyPairPem.privateKey; + let publicKey: PublicKeyPem; + let privateKey: PrivateKeyPem; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + const globalKeyPairPem = keysUtils.keyPairToPem(globalKeyPair); + publicKey = globalKeyPairPem.publicKey; + privateKey = globalKeyPairPem.privateKey; + }); test('creates a claim (both node and identity)', async () => { const nodeClaim = await claimsUtils.createClaim({ privateKey, diff --git a/tests/client/GRPCClientClient.test.ts b/tests/client/GRPCClientClient.test.ts index f32216641..97f29fe1a 100644 --- a/tests/client/GRPCClientClient.test.ts +++ b/tests/client/GRPCClientClient.test.ts @@ -21,15 +21,15 @@ jest.mock('@/keys/utils', () => ({ jest.requireActual('@/keys/utils').generateKeyPair, })); -describe('GRPCClientClient', () => { +describe(GRPCClientClient.name, () => { const password = 'password'; - const logger = new Logger('GRPCClientClientTest', LogLevel.WARN, [ + const logger = new Logger(`${GRPCClientClient.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); let client: GRPCClientClient; let server: grpc.Server; let port: number; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let dataDir: string; let nodePath: string; let nodeId: NodeId; @@ -39,18 +39,28 @@ describe('GRPCClientClient', () => { path.join(os.tmpdir(), 'polykey-test-'), ); nodePath = path.join(dataDir, 'node'); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + + // THIS IS TESTING FROM THE client side level + // this should be its own directory + // tests from the client side API + + // tests from the server side API + + + + + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, logger: logger, }); - nodeId = polykeyAgent.nodeManager.getNodeId(); + nodeId = pkAgent.nodeManager.getNodeId(); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, }); const sessionTokenPath = path.join(nodePath, 'sessionToken'); const session = new Session({ sessionTokenPath, fs, logger }); - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); await session.start({ sessionToken, }); @@ -58,8 +68,8 @@ describe('GRPCClientClient', () => { afterAll(async () => { await client.destroy(); await testUtils.closeTestClientServer(server); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, diff --git a/tests/client/rpcAgent.test.ts b/tests/client/rpcAgent.test.ts deleted file mode 100644 index ff33db8cb..000000000 --- a/tests/client/rpcAgent.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { PolykeyAgent } from '@'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import { KeyManager } from '@/keys'; -import { ForwardProxy } from '@/network'; -import * as grpcUtils from '@/grpc/utils'; -import config from '@/config'; -import { Status } from '@/status'; -import * as testUtils from './utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ -describe('Agent client service', () => { - const password = 'password'; - const logger = new Logger('AgentClientServerTest', LogLevel.WARN, [ - new StreamHandler(), - ]); - let client: ClientServiceClient; - let server: grpc.Server; - let port: number; - let dataDir: string; - let polykeyAgent: PolykeyAgent; - let keyManager: KeyManager; - let passwordFile: string; - let callCredentials: grpc.Metadata; - - beforeAll(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - - passwordFile = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordFile, 'password'); - const keysPath = path.join(dataDir, 'keys'); - - keyManager = await KeyManager.createKeyManager({ - keysPath, - password, - logger, - }); - - const fwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, - }); - - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir, - logger, - fwdProxy, - keyManager, - }); - - [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, - secure: false, - }); - - client = await testUtils.openSimpleClientClient(port); - }, global.polykeyStartupTimeout); - afterAll(async () => { - await testUtils.closeTestClientServer(server); - testUtils.closeSimpleClientClient(client); - - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - await fs.promises.rm(passwordFile); - }); - beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); - callCredentials = testUtils.createCallCredentials(sessionToken); - }); - - test( - 'stopping the agent', - async () => { - const agentStop = grpcUtils.promisifyUnaryCall( - client, - client.agentStop, - ); - - const emptyMessage = new utilsPB.EmptyMessage(); - await agentStop(emptyMessage, callCredentials); - - const statusPath = path.join(polykeyAgent.nodePath, config.defaults.statusBase); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('DEAD', 10000); - }, - global.polykeyStartupTimeout * 2, - ); -}); diff --git a/tests/client/rpcGestalts.test.ts b/tests/client/rpcGestalts.test.ts index 3fd020e88..72e6fe96d 100644 --- a/tests/client/rpcGestalts.test.ts +++ b/tests/client/rpcGestalts.test.ts @@ -24,26 +24,12 @@ import * as nodesUtils from '@/nodes/utils'; import * as testUtils from './utils'; import TestProvider from '../identities/TestProvider'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Client service', () => { const password = 'password'; const logger = new Logger('ClientServerTest', LogLevel.WARN, [ @@ -53,7 +39,7 @@ describe('Client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let gestaltGraph: GestaltGraph; @@ -110,7 +96,7 @@ describe('Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -118,16 +104,16 @@ describe('Client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; - gestaltGraph = polykeyAgent.gestaltGraph; - identitiesManager = polykeyAgent.identitiesManager; + nodeManager = pkAgent.nodeManager; + gestaltGraph = pkAgent.gestaltGraph; + identitiesManager = pkAgent.identitiesManager; // Adding provider const testProvider = new TestProvider(); identitiesManager.registerProvider(testProvider); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -142,8 +128,8 @@ describe('Client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -152,7 +138,7 @@ describe('Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); afterEach(async () => { diff --git a/tests/client/rpcIdentities.test.ts b/tests/client/rpcIdentities.test.ts index 47da0d94c..b31cb5e1a 100644 --- a/tests/client/rpcIdentities.test.ts +++ b/tests/client/rpcIdentities.test.ts @@ -15,26 +15,12 @@ import * as grpcUtils from '@/grpc/utils'; import * as testUtils from './utils'; import TestProvider from '../identities/TestProvider'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Identities Client service', () => { const password = 'password'; const logger = new Logger('IdentitiesClientServerTest', LogLevel.WARN, [ @@ -44,7 +30,7 @@ describe('Identities Client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let identitiesManager: IdentitiesManager; let passwordFile: string; @@ -79,7 +65,7 @@ describe('Identities Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -87,14 +73,14 @@ describe('Identities Client service', () => { keyManager, }); - identitiesManager = polykeyAgent.identitiesManager; + identitiesManager = pkAgent.identitiesManager; // Adding provider. testProvider = new TestProvider(); identitiesManager.registerProvider(testProvider); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -104,8 +90,8 @@ describe('Identities Client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -114,7 +100,7 @@ describe('Identities Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); @@ -153,13 +139,13 @@ describe('Identities Client service', () => { step++; } expect( - await polykeyAgent.identitiesManager.getToken( + await pkAgent.identitiesManager.getToken( testToken.providerId, testToken.identityId, ), ).toEqual(testToken.tokenData); expect(genReadable.stream.destroyed).toBeTruthy(); - await polykeyAgent.identitiesManager.delToken( + await pkAgent.identitiesManager.delToken( testToken.providerId, testToken.identityId, ); @@ -293,9 +279,9 @@ describe('Identities Client service', () => { expect(claim.payload.data.type).toBe('identity'); expect(claim.payload.data.provider).toBe(testToken.providerId); expect(claim.payload.data.identity).toBe(testToken.identityId); - expect(claim.payload.data.node).toBe(polykeyAgent.nodeManager.getNodeId()); + expect(claim.payload.data.node).toBe(pkAgent.nodeManager.getNodeId()); - await polykeyAgent.identitiesManager.delToken( + await pkAgent.identitiesManager.delToken( testToken.providerId, testToken.identityId, ); diff --git a/tests/client/rpcKeys.test.ts b/tests/client/rpcKeys.test.ts index 14a926fba..b48c41832 100644 --- a/tests/client/rpcKeys.test.ts +++ b/tests/client/rpcKeys.test.ts @@ -14,26 +14,12 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Keys client service', () => { const password = 'password'; const logger = new Logger('KeysClientServerTest', LogLevel.WARN, [ @@ -44,7 +30,7 @@ describe('Keys client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let passwordFile: string; @@ -70,7 +56,7 @@ describe('Keys client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -78,10 +64,10 @@ describe('Keys client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -91,8 +77,8 @@ describe('Keys client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -101,7 +87,7 @@ describe('Keys client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); @@ -133,11 +119,11 @@ describe('Keys client service', () => { const keyPair = keyManager.getRootKeyPairPem(); const nodeId1 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig1 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig1 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig1 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig1 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig1 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig1 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig1: TLSConfig = { keyPrivatePem: keyPair.privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -153,11 +139,11 @@ describe('Keys client service', () => { const key = await getRootKeyPair(emptyMessage, callCredentials); const nodeId2 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig2 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig2 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig2 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig2 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig2 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig2 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig2: TLSConfig = { keyPrivatePem: key.getPrivate(), certChainPem: await keyManager.getRootCertChainPem(), @@ -182,11 +168,11 @@ describe('Keys client service', () => { const rootKeyPair1 = keyManager.getRootKeyPairPem(); const nodeId1 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig1 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig1 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig1 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig1 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig1 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig1 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig1: TLSConfig = { keyPrivatePem: rootKeyPair1.privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -200,11 +186,11 @@ describe('Keys client service', () => { const rootKeyPair2 = keyManager.getRootKeyPairPem(); const nodeId2 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig2 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig2 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig2 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig2 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig2 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig2 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig2: TLSConfig = { keyPrivatePem: rootKeyPair2.privateKey, certChainPem: await keyManager.getRootCertChainPem(), diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index 913c8467e..c0f9e33ac 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -23,26 +23,12 @@ import * as testUtils from './utils'; import * as testKeynodeUtils from '../utils'; import { sleep } from '@/utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Client service', () => { const password = 'password'; const logger = new Logger('rpcNodes Test', LogLevel.DEBUG, [ @@ -54,7 +40,7 @@ describe('Client service', () => { let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let polykeyServer: PolykeyAgent; @@ -91,7 +77,7 @@ describe('Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -102,10 +88,10 @@ describe('Client service', () => { }, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -114,7 +100,7 @@ describe('Client service', () => { polykeyServer = await testKeynodeUtils.setupRemoteKeynode({ logger: logger, }); - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -133,8 +119,8 @@ describe('Client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -143,18 +129,18 @@ describe('Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); - await polykeyAgent.notificationsManager.clearNotifications(); + await pkAgent.notificationsManager.clearNotifications(); await polykeyServer.notificationsManager.clearNotifications(); }); afterEach(async () => { await polykeyServer.start({ password }); - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: {}, vaults: {}, }); - await polykeyAgent.nodeManager.clearDB(); + await pkAgent.nodeManager.clearDB(); await polykeyServer.nodeManager.clearDB(); }); @@ -183,7 +169,7 @@ describe('Client service', () => { 'should ping a node (online + offline)', async () => { const serverNodeId = polykeyServer.nodeManager.getNodeId(); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); await polykeyServer.stop(); const statusPath = path.join(polykeyServer.nodePath, config.defaults.statusBase); const status = new Status({ @@ -207,7 +193,7 @@ describe('Client service', () => { await polykeyServer.start({ password: 'password' }); await status.waitFor('LIVE', 10000); // Update the details (changed because we started again) - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); const res2 = await nodesPing(nodeMessage, callCredentials); expect(res2.getSuccess()).toEqual(true); // Case 3: pre-existing connection no longer active, so offline @@ -243,7 +229,7 @@ describe('Client service', () => { test( 'should find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); // Case 2: node can be found on the remote node const nodeId = nodeId1; const nodeAddress: NodeAddress = { @@ -268,7 +254,7 @@ describe('Client service', () => { test( 'should fail to find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); // Case 3: node exhausts all contacts and cannot find node const nodeId = nodeId1; // Add a single dummy node to the server node graph database @@ -292,7 +278,7 @@ describe('Client service', () => { global.failedConnectionTimeout * 2, ); test('should send a gestalt invite (no existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); // Node Claim Case 1: No invitations have been received const nodesClaim = grpcUtils.promisifyUnaryCall( client, @@ -306,8 +292,8 @@ describe('Client service', () => { expect(res.getSuccess()).not.toBeTruthy(); }); test('should send a gestalt invite (existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, polykeyAgent); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(polykeyServer, pkAgent); // Node Claim Case 2: Already received an invite; force invite await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), @@ -327,7 +313,7 @@ describe('Client service', () => { expect(res.getSuccess()).not.toBeTruthy(); }); test('should claim node', async () => { - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -339,8 +325,8 @@ describe('Client service', () => { }, vaults: {}, }); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, polykeyAgent); + await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testKeynodeUtils.addRemoteDetails(polykeyServer, pkAgent); // Node Claim Case 3: Already received an invite; claim node await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), diff --git a/tests/client/rpcNotifications.test.ts b/tests/client/rpcNotifications.test.ts index ce9187334..9fba16876 100644 --- a/tests/client/rpcNotifications.test.ts +++ b/tests/client/rpcNotifications.test.ts @@ -1,5 +1,5 @@ import type * as grpc from '@grpc/grpc-js'; -import type { NodeInfo } from '@/nodes/types'; +import type { NodeInfo, NodeAddress } from '@/nodes/types'; import type { NodeManager } from '@/nodes'; import type { NotificationData } from '@/notifications/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; @@ -8,7 +8,6 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { utils as idUtils } from '@matrixai/id'; - import { PolykeyAgent } from '@'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as notificationsPB from '@/proto/js/polykey/v1/notifications/notifications_pb'; @@ -19,26 +18,12 @@ import * as vaultsUtils from '@/vaults/utils'; import * as testUtils from './utils'; import * as testKeynodeUtils from '../utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Notifications client service', () => { const password = 'password'; const logger = new Logger('NotificationsClientServerTest', LogLevel.WARN, [ @@ -48,7 +33,7 @@ describe('Notifications client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let passwordFile: string; @@ -75,7 +60,7 @@ describe('Notifications client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -83,10 +68,10 @@ describe('Notifications client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -101,8 +86,8 @@ describe('Notifications client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -111,7 +96,7 @@ describe('Notifications client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); @@ -123,16 +108,16 @@ describe('Notifications client service', () => { sender = await testKeynodeUtils.setupRemoteKeynode({ logger }); await sender.nodeManager.setNode(node1.id, { - host: polykeyAgent.revProxy.ingressHost, - port: polykeyAgent.revProxy.ingressPort, - }); + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + } as NodeAddress); await receiver.acl.setNodePerm(node1.id, { gestalt: { notify: null, }, vaults: {}, }); - await polykeyAgent.acl.setNodePerm(sender.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(sender.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -146,11 +131,11 @@ describe('Notifications client service', () => { afterEach(async () => { await receiver.notificationsManager.clearNotifications(); await sender.notificationsManager.clearNotifications(); - await polykeyAgent.notificationsManager.clearNotifications(); + await pkAgent.notificationsManager.clearNotifications(); }); test('should send notifications.', async () => { // Set up a remote node receiver and add its details to agent - await testKeynodeUtils.addRemoteDetails(polykeyAgent, receiver); + await testKeynodeUtils.addRemoteDetails(pkAgent, receiver); const notificationsSend = grpcUtils.promisifyUnaryCall( @@ -171,7 +156,7 @@ describe('Notifications client service', () => { type: 'General', message: 'msg', }); - expect(notifs[0].senderId).toEqual(polykeyAgent.nodeManager.getNodeId()); + expect(notifs[0].senderId).toEqual(pkAgent.nodeManager.getNodeId()); expect(notifs[0].isRead).toBeTruthy(); }); test('should read all notifications.', async () => { @@ -293,7 +278,7 @@ describe('Notifications client service', () => { // Call read notifications to check there are none const notifs = - await polykeyAgent.notificationsManager.readNotifications(); + await pkAgent.notificationsManager.readNotifications(); expect(notifs).toEqual([]); }); }); diff --git a/tests/client/rpcSessions.test.ts b/tests/client/rpcSessions.test.ts index 947d87fd9..89240d8cb 100644 --- a/tests/client/rpcSessions.test.ts +++ b/tests/client/rpcSessions.test.ts @@ -27,7 +27,7 @@ describe('Sessions client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let passwordFile: string; let callCredentials: grpc.Metadata; @@ -52,7 +52,7 @@ describe('Sessions client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -61,7 +61,7 @@ describe('Sessions client service', () => { }); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -71,8 +71,8 @@ describe('Sessions client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -81,30 +81,30 @@ describe('Sessions client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); test('can request a session', async () => { const unlock = grpcUtils.promisifyUnaryCall( client, - client.sessionsUnlock, + client.agentUnlock, ); const pCall = unlock(new utilsPB.EmptyMessage(), callCredentials); const meta = await pCall.meta; const token = clientUtils.decodeAuthToSession(meta); - const result = await polykeyAgent.sessionManager.verifyToken(token!); + const result = await pkAgent.sessionManager.verifyToken(token!); expect(result).toBeTruthy(); }); test('can lock all sessions', async () => { const lockall = grpcUtils.promisifyUnaryCall( client, - client.sessionsLockAll, + client.agentLockAll, ); await lockall(new utilsPB.EmptyMessage(), callCredentials); const prevToken = clientUtils.decodeAuthToSession(callCredentials); - const result = await polykeyAgent.sessionManager.verifyToken(prevToken!); + const result = await pkAgent.sessionManager.verifyToken(prevToken!); expect(result).toBeFalsy(); }); }); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 9c1c28542..5b051f25f 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -18,26 +18,12 @@ import * as vaultsUtils from '@/vaults/utils'; import { vaultOps } from '@/vaults'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Vaults client service', () => { const password = 'password'; const logger = new Logger('VaultsClientServerTest', LogLevel.WARN, [ @@ -55,7 +41,7 @@ describe('Vaults client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let vaultManager: VaultManager; let passwordFile: string; @@ -81,7 +67,7 @@ describe('Vaults client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -89,10 +75,10 @@ describe('Vaults client service', () => { keyManager, }); - vaultManager = polykeyAgent.vaultManager; + vaultManager = pkAgent.vaultManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -102,8 +88,8 @@ describe('Vaults client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -112,7 +98,7 @@ describe('Vaults client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); afterEach(async () => { diff --git a/tests/client/service/agentStop.test.ts b/tests/client/service/agentStop.test.ts new file mode 100644 index 000000000..d3f4dd9dc --- /dev/null +++ b/tests/client/service/agentStop.test.ts @@ -0,0 +1,167 @@ +import type { Host, Port } from '@/network/types'; +import type { SessionToken } from '@/sessions/types'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { PolykeyAgent } from '@'; +import { utils as keysUtils } from '@/keys'; +import { GRPCServer } from '@/grpc'; +import { Status } from '@/status'; +import { + GRPCClientClient, + ClientServiceService, + utils as clientUtils, + errors as clientErrors +} from '@/client'; +import agentStop from '@/client/service/agentStop'; +import config from '@/config'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as testUtils from '../../utils'; + +describe('agentStop', () => { + const logger = new Logger('agentStop test', LogLevel.WARN, [ + new StreamHandler(), + ]); + const password = 'helloworld'; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); + let dataDir: string; + let nodePath: string; + let pkAgent: PolykeyAgent; + let grpcServer: GRPCServer; + let grpcClient: GRPCClientClient; + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-')); + nodePath = path.join(dataDir, 'polykey'); + // Note that by doing this, the agent the call is stopping is a separate agent + pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + const authenticate = clientUtils.authenticator( + pkAgent.sessionManager, + pkAgent.keyManager + ); + const clientService = { + agentStop: agentStop({ + authenticate, + pkAgent: pkAgent as unknown as PolykeyAgent, + }) + }; + grpcServer = new GRPCServer({ logger }); + await grpcServer.start({ + services: [[ClientServiceService, clientService]], + host: '127.0.0.1' as Host, + port: 0 as Port, + }); + grpcClient = await GRPCClientClient.createGRPCClientClient({ + nodeId: pkAgent.keyManager.getNodeId(), + host: '127.0.0.1' as Host, + port: grpcServer.port, + logger + }); + }); + afterEach(async () => { + await grpcClient.destroy(); + await grpcServer.stop(); + await pkAgent.stop(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test('stops the agent with password', async () => { + const statusPath = path.join(nodePath, config.defaults.statusBase); + const status = new Status({ + statusPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + const response = await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromPassword(password) + ); + expect(response).toBeInstanceOf(utilsPB.EmptyMessage); + // While the `agentStop` is asynchronous + // There is a synchronous switch to `running` + expect(pkAgent.running).toBe(false); + // It may already be stopping + expect(await status.readStatus()).toMatchObject({ + status: expect.stringMatching(/LIVE|STOPPING|DEAD/) + }); + await status.waitFor('DEAD'); + expect(pkAgent.running).toBe(false); + }); + test('stops the agent with token', async () => { + const token = await pkAgent.sessionManager.createToken(); + const statusPath = path.join(nodePath, config.defaults.statusBase); + const status = new Status({ + statusPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + const response = await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromSession(token) + ); + expect(response).toBeInstanceOf(utilsPB.EmptyMessage); + // While the `agentStop` is asynchronous + // There is a synchronous switch to `running` + expect(pkAgent.running).toBe(false); + // It may already be stopping + expect(await status.readStatus()).toMatchObject({ + status: expect.stringMatching(/LIVE|STOPPING|DEAD/) + }); + await status.waitFor('DEAD'); + expect(pkAgent.running).toBe(false); + }); + test('cannot stop the agent if not authenticated', async () => { + const statusPath = path.join(nodePath, config.defaults.statusBase); + const status = new Status({ + statusPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + await expect(async () => { + await grpcClient.agentStop( + request, + ); + }).rejects.toThrow(clientErrors.ErrorClientAuthMissing); + expect(pkAgent.running).toBe(true); + await expect(async () => { + await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromPassword('wrong password') + ); + }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); + expect(pkAgent.running).toBe(true); + await expect(async () => { + await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromSession('wrong token' as SessionToken) + ); + }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); + expect(pkAgent.running).toBe(true); + expect(await status.readStatus()).toMatchObject({ + status: 'LIVE' + }); + }); +}) diff --git a/tests/client/utils.ts b/tests/client/utils.ts index 7f7181daa..5ba0079cd 100644 --- a/tests/client/utils.ts +++ b/tests/client/utils.ts @@ -16,35 +16,35 @@ import { promisify } from '@/utils'; import * as grpcUtils from '@/grpc/utils'; async function openTestClientServer({ - polykeyAgent, + pkAgent, secure, }: { - polykeyAgent: PolykeyAgent; + pkAgent: PolykeyAgent; secure?: boolean; }) { const _secure = secure ?? true; const clientService: IClientServiceServer = createClientService({ - polykeyAgent, - keyManager: polykeyAgent.keyManager, - vaultManager: polykeyAgent.vaultManager, - nodeManager: polykeyAgent.nodeManager, - identitiesManager: polykeyAgent.identitiesManager, - gestaltGraph: polykeyAgent.gestaltGraph, - sessionManager: polykeyAgent.sessionManager, - notificationsManager: polykeyAgent.notificationsManager, - discovery: polykeyAgent.discovery, - sigchain: polykeyAgent.sigchain, - fwdProxy: polykeyAgent.fwdProxy, - revProxy: polykeyAgent.revProxy, - grpcServerClient: polykeyAgent.grpcServerClient, - grpcServerAgent: polykeyAgent.grpcServerAgent, - fs: polykeyAgent.fs, + pkAgent, + keyManager: pkAgent.keyManager, + vaultManager: pkAgent.vaultManager, + nodeManager: pkAgent.nodeManager, + identitiesManager: pkAgent.identitiesManager, + gestaltGraph: pkAgent.gestaltGraph, + sessionManager: pkAgent.sessionManager, + notificationsManager: pkAgent.notificationsManager, + discovery: pkAgent.discovery, + sigchain: pkAgent.sigchain, + fwdProxy: pkAgent.fwdProxy, + revProxy: pkAgent.revProxy, + grpcServerClient: pkAgent.grpcServerClient, + grpcServerAgent: pkAgent.grpcServerAgent, + fs: pkAgent.fs, }); const callCredentials = _secure ? grpcUtils.serverSecureCredentials( - polykeyAgent.keyManager.getRootKeyPairPem().privateKey, - await polykeyAgent.keyManager.getRootCertChainPem(), + pkAgent.keyManager.getRootKeyPairPem().privateKey, + await pkAgent.keyManager.getRootCertChainPem(), ) : grpcUtils.serverInsecureCredentials(); diff --git a/tests/grpc/GRPCServer.test.ts b/tests/grpc/GRPCServer.test.ts index ac7f18c69..a3fb2103c 100644 --- a/tests/grpc/GRPCServer.test.ts +++ b/tests/grpc/GRPCServer.test.ts @@ -1,26 +1,26 @@ +import type { Authenticate } from '@/client/types'; import type { Host, Port } from '@/network/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; - import { GRPCServer, utils as grpcUtils } from '@/grpc'; -import { KeyManager } from '@/keys'; -import { utils as networkUtils } from '@/network'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { SessionManager } from '@/sessions'; +import * as networkUtils from '@/network/utils'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as grpcErrors from '@/grpc/errors'; -import { SessionManager } from '@/sessions'; import * as clientUtils from '@/client/utils'; -import * as keysUtils from '@/keys/utils'; -import * as utils from './utils'; +import * as testGrpcUtils from './utils'; +import * as testUtils from '../utils'; -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(testUtils.globalKeyPair); +jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(testUtils.globalKeyPair); describe('GRPCServer', () => { const password = 'password'; @@ -28,7 +28,7 @@ describe('GRPCServer', () => { let keyManager: KeyManager; let db: DB; let sessionManager: SessionManager; - let authenticate: clientUtils.Authenticate; + let authenticate: Authenticate; const logger = new Logger('GRPCServer Test', LogLevel.WARN, [ new StreamHandler(), ]); @@ -99,8 +99,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -128,8 +128,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -147,7 +147,7 @@ describe('GRPCServer', () => { clientKeyPair.privateKey, 31536000, ); - const client = await utils.openTestClientSecure( + const client = await testGrpcUtils.openTestClientSecure( nodeIdServer, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -163,7 +163,7 @@ describe('GRPCServer', () => { expect(pCall.call.getPeer()).toBe(`dns:127.0.0.1:${server.port}`); const m_ = await pCall; expect(m_.getChallenge()).toBe(m.getChallenge()); - utils.closeTestClientSecure(client); + testGrpcUtils.closeTestClientSecure(client); await server.stop(); }); test('changing the private key and certificate on the fly', async () => { @@ -180,8 +180,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -200,7 +200,7 @@ describe('GRPCServer', () => { ); // First client connection const nodeIdServer1 = networkUtils.certNodeId(serverCert1); - const client1 = await utils.openTestClientSecure( + const client1 = await testGrpcUtils.openTestClientSecure( nodeIdServer1, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -237,7 +237,7 @@ describe('GRPCServer', () => { expect(m2_.getChallenge()).toBe(m2.getChallenge()); // Second client connection const nodeIdServer2 = networkUtils.certNodeId(serverCert2); - const client2 = await utils.openTestClientSecure( + const client2 = await testGrpcUtils.openTestClientSecure( nodeIdServer2, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -253,8 +253,8 @@ describe('GRPCServer', () => { expect(pCall3.call.getPeer()).toBe(`dns:127.0.0.1:${server.port}`); const m3_ = await pCall3; expect(m3_.getChallenge()).toBe(m3.getChallenge()); - utils.closeTestClientSecure(client1); - utils.closeTestClientSecure(client2); + testGrpcUtils.closeTestClientSecure(client1); + testGrpcUtils.closeTestClientSecure(client2); await server.stop(); }); test('authenticated commands acquire a token', async () => { @@ -271,8 +271,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -290,7 +290,7 @@ describe('GRPCServer', () => { clientKeyPair.privateKey, 31536000, ); - const client = await utils.openTestClientSecure( + const client = await testGrpcUtils.openTestClientSecure( nodeIdServer, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -310,7 +310,7 @@ describe('GRPCServer', () => { expect(typeof token).toBe('string'); expect(token!.length > 0).toBe(true); expect(await sessionManager.verifyToken(token!)).toBe(true); - utils.closeTestClientSecure(client); + testGrpcUtils.closeTestClientSecure(client); await server.stop(); }); }); diff --git a/tests/utils.ts b/tests/utils.ts index f1d55e41b..0979f4e77 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -8,7 +8,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyAgent } from '@'; import { Status } from '@/status'; import { utils as keysUtils } from '@/keys'; -import { GRPCClientClient, Metadata, utils as clientUtils } from '@/client'; +import { GRPCClientClient, utils as clientUtils } from '@/client'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import { sleep } from '@/utils'; import config from '@/config'; From 3c8860c8e391e3577c5fb7957b353e92c49d768a Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 20 Dec 2021 21:23:52 +1100 Subject: [PATCH 08/28] Fixing tests, grpc passing --- tests/grpc/GRPCServer.test.ts | 24 ++++++++++++++---------- tests/grpc/utils/testService.ts | 4 ++-- tests/grpc/utils/utils.ts | 6 +++--- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/tests/grpc/GRPCServer.test.ts b/tests/grpc/GRPCServer.test.ts index a3fb2103c..49ba78a28 100644 --- a/tests/grpc/GRPCServer.test.ts +++ b/tests/grpc/GRPCServer.test.ts @@ -15,24 +15,26 @@ import * as clientUtils from '@/client/utils'; import * as testGrpcUtils from './utils'; import * as testUtils from '../utils'; -jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(testUtils.globalKeyPair); -jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(testUtils.globalKeyPair); - describe('GRPCServer', () => { + const logger = new Logger('GRPCServer Test', LogLevel.WARN, [ + new StreamHandler(), + ]); const password = 'password'; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let dataDir: string; let keyManager: KeyManager; let db: DB; let sessionManager: SessionManager; let authenticate: Authenticate; - const logger = new Logger('GRPCServer Test', LogLevel.WARN, [ - new StreamHandler(), - ]); beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -70,6 +72,8 @@ describe('GRPCServer', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('GRPCServer readiness', async () => { const server = new GRPCServer({ diff --git a/tests/grpc/utils/testService.ts b/tests/grpc/utils/testService.ts index b6ee75236..8d126025d 100644 --- a/tests/grpc/utils/testService.ts +++ b/tests/grpc/utils/testService.ts @@ -4,7 +4,7 @@ * streaming, client streaming and bidirectional streaming * @module */ - +import type { Authenticate } from '@/client/types'; import type { SessionToken } from '@/sessions/types'; import type { ITestServiceServer } from '@/proto/js/polykey/v1/test_service_grpc_pb'; import Logger from '@matrixai/logger'; @@ -17,7 +17,7 @@ function createTestService({ authenticate, logger = new Logger('TestService'), }: { - authenticate: clientUtils.Authenticate; + authenticate: Authenticate; logger?: Logger; }) { const testService: ITestServiceServer = { diff --git a/tests/grpc/utils/utils.ts b/tests/grpc/utils/utils.ts index 8ab8e295a..9aa2bff36 100644 --- a/tests/grpc/utils/utils.ts +++ b/tests/grpc/utils/utils.ts @@ -1,6 +1,6 @@ import type Logger from '@matrixai/logger'; +import type { Authenticate } from '@/client/types'; import type { NodeId } from '@/nodes/types'; -import type * as clientUtils from '@/client/utils'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '@/grpc'; import { promisify } from '@/utils'; @@ -11,7 +11,7 @@ import { import createTestService from './testService'; async function openTestServer( - authenticate: clientUtils.Authenticate, + authenticate: Authenticate, logger?: Logger, ): Promise<[grpc.Server, number]> { const testService = createTestService({ authenticate, logger }); @@ -80,7 +80,7 @@ function closeTestClientSecure(client: TestServiceClient) { async function openTestServerSecure( keyPrivatePem, certChainPem, - authenticate: clientUtils.Authenticate, + authenticate: Authenticate, logger?: Logger, ): Promise<[grpc.Server, number]> { const testService = createTestService({ authenticate, logger }); From c490e4689c564bd632c977ec8412f416ff14372c Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 20 Dec 2021 21:30:46 +1100 Subject: [PATCH 09/28] Adapting seed nodes test to global agent --- tests/bin/agent/start.test.ts | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index 2950d0f34..8af379c8f 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -8,6 +8,7 @@ import { Status, errors as statusErrors } from '@/status'; import config from '@/config'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('start', () => { const logger = new Logger('start test', LogLevel.WARN, [new StreamHandler()]); @@ -571,12 +572,8 @@ describe('start', () => { global.defaultTimeout * 2, ); describe('seed nodes', () => { - let seedNodeClose; const connTimeoutTime = 500; - let seedNodeId; const seedNodeHost = '127.0.0.1'; - let seedNodePort; - const dummySeed1Id = nodesUtils.makeNodeId( 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', ); @@ -587,26 +584,27 @@ describe('start', () => { ); const dummySeed2Host = '128.0.0.1'; const dummySeed2Port = 1314; - + let globalAgentDir; + let globalAgentClose; + let seedNodeId; + let seedNodePort; beforeAll(async () => { - seedNodeClose = await testBinUtils.pkAgent([ - '--connection-timeout', - connTimeoutTime.toString(), - '--ingress-host', - seedNodeHost, - ]); + ({ + globalAgentDir, + globalAgentClose + } = await testUtils.setupGlobalAgent(logger)); const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), fs, logger, }); - const statusInfo = await status.waitFor('LIVE', 5000); + const statusInfo = await status.waitFor('LIVE'); // Get the dynamic seed node components seedNodeId = statusInfo.data.nodeId; seedNodePort = statusInfo.data.ingressPort; }, global.maxTimeout); afterAll(async () => { - await seedNodeClose(); + await globalAgentClose(); }); test( From 84823f52ddddc28fec11d3fbe3319ce4bee27165 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 20 Dec 2021 21:49:24 +1100 Subject: [PATCH 10/28] Fixing tests, notifications, sessions and sigchain is working --- tests/client/GRPCClientClient.test.ts | 43 +++++++++---------- .../NotificationsManager.test.ts | 2 +- tests/sessions/SessionManager.test.ts | 20 ++++++--- tests/sigchain/Sigchain.test.ts | 39 +++++++++++------ 4 files changed, 60 insertions(+), 44 deletions(-) diff --git a/tests/client/GRPCClientClient.test.ts b/tests/client/GRPCClientClient.test.ts index 97f29fe1a..b3a43ec7a 100644 --- a/tests/client/GRPCClientClient.test.ts +++ b/tests/client/GRPCClientClient.test.ts @@ -6,26 +6,24 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { GRPCClientClient } from '@/client'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import { PolykeyAgent } from '@'; +import { utils as keysUtils } from '@/keys'; import { Status } from '@/status'; -import * as binProcessors from '@/bin/utils/processors'; import { Session } from '@/sessions'; -import config from '@/config'; import { errors as clientErrors } from '@/client'; -import * as testUtils from './utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import config from '@/config'; +import * as binProcessors from '@/bin/utils/processors'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as testClientUtils from './utils'; +import * as testUtils from '../utils'; describe(GRPCClientClient.name, () => { const password = 'password'; const logger = new Logger(`${GRPCClientClient.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let client: GRPCClientClient; let server: grpc.Server; let port: number; @@ -35,27 +33,24 @@ describe(GRPCClientClient.name, () => { let nodeId: NodeId; let session: Session; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); nodePath = path.join(dataDir, 'node'); - - // THIS IS TESTING FROM THE client side level - // this should be its own directory - // tests from the client side API - - // tests from the server side API - - - - pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, logger: logger, }); nodeId = pkAgent.nodeManager.getNodeId(); - [server, port] = await testUtils.openTestClientServer({ + [server, port] = await testClientUtils.openTestClientServer({ pkAgent, }); const sessionTokenPath = path.join(nodePath, 'sessionToken'); @@ -64,16 +59,18 @@ describe(GRPCClientClient.name, () => { await session.start({ sessionToken, }); - }, global.polykeyStartupTimeout); + }); afterAll(async () => { await client.destroy(); - await testUtils.closeTestClientServer(server); + await testClientUtils.closeTestClientServer(server); await pkAgent.stop(); await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('cannot be called when destroyed', async () => { client = await GRPCClientClient.createGRPCClientClient({ diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index 6ef1a048a..4077082a6 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -204,7 +204,7 @@ describe('NotificationsManager', () => { ingressHost: receiverHost, tlsConfig: revTLSConfig, }); - receiverIngressPort = revProxy.ingressPort; + receiverIngressPort = revProxy.getIngressPort(); }, global.polykeyStartupTimeout * 2); beforeEach(async () => { diff --git a/tests/sessions/SessionManager.test.ts b/tests/sessions/SessionManager.test.ts index beaca9f0f..d8688da85 100644 --- a/tests/sessions/SessionManager.test.ts +++ b/tests/sessions/SessionManager.test.ts @@ -6,20 +6,17 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { KeyManager } from '@/keys'; import SessionManager from '@/sessions/SessionManager'; import * as sessionsErrors from '@/sessions/errors'; -import { sleep } from '@/utils'; import * as keysUtils from '@/keys/utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import { sleep } from '@/utils'; +import * as testUtils from '../utils'; describe('SessionManager', () => { const password = 'password'; const logger = new Logger(`${SessionManager.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; /** * Shared db, keyManager for all tests */ @@ -27,6 +24,13 @@ describe('SessionManager', () => { let db: DB; let keyManager: KeyManager; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -56,6 +60,8 @@ describe('SessionManager', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('session manager readiness', async () => { const sessionManager = await SessionManager.createSessionManager({ diff --git a/tests/sigchain/Sigchain.test.ts b/tests/sigchain/Sigchain.test.ts index c38a92333..69f690670 100644 --- a/tests/sigchain/Sigchain.test.ts +++ b/tests/sigchain/Sigchain.test.ts @@ -6,29 +6,36 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { Sigchain } from '@/sigchain'; import * as claimsUtils from '@/claims/utils'; import * as sigchainErrors from '@/sigchain/errors'; -import { makeCrypto } from '../utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as testUtils from '../utils'; describe('Sigchain', () => { - const password = 'password'; const logger = new Logger('Sigchain Test', LogLevel.WARN, [ new StreamHandler(), ]); + const password = 'password'; + const srcNodeId = 'NodeId1' as NodeId; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let dataDir: string; let keyManager: KeyManager; let db: DB; - const srcNodeId = 'NodeId1' as NodeId; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -43,7 +50,13 @@ describe('Sigchain', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); }); afterEach(async () => { From b6f3ace97d86db0017ee513bd229dd8e006a45fd Mon Sep 17 00:00:00 2001 From: Joshua Karp Date: Tue, 21 Dec 2021 11:05:14 +1100 Subject: [PATCH 11/28] Adding comment about empty default seed nodes in pkStdio, pkExec, etc, and defaulting as empty in setupGlobalAgent --- tests/bin/utils.ts | 16 ++++++++++++++++ tests/utils.ts | 1 + 2 files changed, 17 insertions(+) diff --git a/tests/bin/utils.ts b/tests/bin/utils.ts index 24905b260..a5d74f300 100644 --- a/tests/bin/utils.ts +++ b/tests/bin/utils.ts @@ -43,6 +43,10 @@ async function pkStdio( }> { cwd = cwd ?? (await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-'))); + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; // Parse the arguments of process.stdout.write and process.stderr.write const parseArgs = (args) => { @@ -132,6 +136,10 @@ async function pkExec( ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), @@ -197,6 +205,10 @@ async function pkSpawn( ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), @@ -260,6 +272,10 @@ async function pkExpect({ ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), diff --git a/tests/utils.ts b/tests/utils.ts index 0979f4e77..5b4742ee9 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -114,6 +114,7 @@ async function setupGlobalAgent( keysConfig: { rootKeyPairBits: 1024 }, + seedNodes: {}, // explicitly no seed nodes on startup logger, }); statusInfo = await status.readStatus(); From 9d269912f70d811e7364fde07774bde96c86e6bb Mon Sep 17 00:00:00 2001 From: Joshua Karp Date: Tue, 21 Dec 2021 13:37:24 +1100 Subject: [PATCH 12/28] Splitting nodes.test.ts --- tests/bin/nodes/add.test.ts | 123 +++++++++++ tests/bin/nodes/claim.test.ts | 138 ++++++++++++ tests/bin/nodes/find.test.ts | 185 ++++++++++++++++ tests/bin/nodes/nodes.test.ts | 394 ---------------------------------- tests/bin/nodes/ping.test.ts | 157 ++++++++++++++ 5 files changed, 603 insertions(+), 394 deletions(-) create mode 100644 tests/bin/nodes/add.test.ts create mode 100644 tests/bin/nodes/claim.test.ts create mode 100644 tests/bin/nodes/find.test.ts delete mode 100644 tests/bin/nodes/nodes.test.ts create mode 100644 tests/bin/nodes/ping.test.ts diff --git a/tests/bin/nodes/add.test.ts b/tests/bin/nodes/add.test.ts new file mode 100644 index 000000000..e715e1481 --- /dev/null +++ b/tests/bin/nodes/add.test.ts @@ -0,0 +1,123 @@ +import type { Host, Port } from '@/network/types'; +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('add', () => { + const password = 'password'; + const logger = new Logger('add test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + + const validNodeId = nodesUtils.makeNodeId( + 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', + ); + const invalidNodeId = 'INVALIDID' as NodeId; + const validHost = '0.0.0.0'; + const invalidHost = 'INVALIDHOST'; + const port = 55555; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterEach(async () => { + await polykeyAgent.nodeManager.clearDB(); + }); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + + test('add a node', async () => { + const commands = genCommands([ + 'add', + validNodeId, + validHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + + // Checking if node was added. + const res = await polykeyAgent.nodeManager.getNode(validNodeId); + expect(res).toBeTruthy(); + expect(res!.host).toEqual(validHost); + expect(res!.port).toEqual(port); + }); + test( + 'fail to add a node (invalid node ID)', + async () => { + const commands = genCommands([ + 'add', + invalidNodeId, + validHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('Invalid node ID.'); + }, + global.failedConnectionTimeout, + ); + test( + 'fail to add a node (invalid IP address)', + async () => { + const commands = genCommands([ + 'add', + validNodeId, + invalidHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('Invalid IP address.'); + + // Checking if node was added. + const res = await polykeyAgent.nodeManager.getNode(validNodeId); + expect(res).toBeUndefined(); + }, + global.failedConnectionTimeout, + ); +}); diff --git a/tests/bin/nodes/claim.test.ts b/tests/bin/nodes/claim.test.ts new file mode 100644 index 000000000..a0dd54331 --- /dev/null +++ b/tests/bin/nodes/claim.test.ts @@ -0,0 +1,138 @@ +import type { Host, Port } from '@/network/types'; +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('claim', () => { + const password = 'password'; + const logger = new Logger('claim test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + + let keynodeId: NodeId; + let remoteOnlineNodeId: NodeId; + let remoteOnlineHost: Host; + let remoteOnlinePort: Port; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + keynodeId = polykeyAgent.nodeManager.getNodeId(); + // Setting up a remote keynode + remoteOnline = await testUtils.setupRemoteKeynode({ + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); + remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + + await remoteOnline.nodeManager.setNode(keynodeId, { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }); + await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + await remoteOnline.acl.setNodePerm(keynodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 2); + + afterEach(async () => { + await polykeyAgent.notificationsManager.clearNotifications(); + await remoteOnline.notificationsManager.clearNotifications(); + await polykeyAgent.sigchain.clearDB(); + await remoteOnline.sigchain.clearDB(); + }); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await testUtils.cleanupRemoteKeynode(remoteOnline); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test( + 'send a gestalt invite', + async () => { + const commands = genCommands(['claim', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('Gestalt Invite'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }, + global.polykeyStartupTimeout * 4, + ); + test('send a gestalt invite (force invite)', async () => { + await remoteOnline.notificationsManager.sendNotification(keynodeId, { + type: 'GestaltInvite', + }); + // Needs to be forced, as the local node has already received an invitation + const commands = genCommands([ + 'claim', + remoteOnlineNodeId, + '--force-invite', + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('Gestalt Invite'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }); + test('claim the remote node', async () => { + await remoteOnline.notificationsManager.sendNotification(keynodeId, { + type: 'GestaltInvite', + }); + // Received an invitation, so will attempt to perform the claiming process + const commands = genCommands(['claim', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + console.log('result', result.exitCode, result.stderr, result.stdout); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('cryptolink claim'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }); +}); diff --git a/tests/bin/nodes/find.test.ts b/tests/bin/nodes/find.test.ts new file mode 100644 index 000000000..2c346576c --- /dev/null +++ b/tests/bin/nodes/find.test.ts @@ -0,0 +1,185 @@ +import type { Host, Port } from '@/network/types'; +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('find', () => { + const password = 'password'; + const logger = new Logger('find test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + let remoteOffline: PolykeyAgent; + + let keynodeId: NodeId; + let remoteOnlineNodeId: NodeId; + let remoteOfflineNodeId: NodeId; + + let remoteOnlineHost: Host; + let remoteOnlinePort: Port; + let remoteOfflineHost: Host; + let remoteOfflinePort: Port; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + keynodeId = polykeyAgent.nodeManager.getNodeId(); + + // Setting up a remote keynode + remoteOnline = await testUtils.setupRemoteKeynode({ + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); + remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + + // Setting up an offline remote keynode + remoteOffline = await testUtils.setupRemoteKeynode({ + logger, + }); + remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); + remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); + remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); + await remoteOffline.stop(); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await testUtils.cleanupRemoteKeynode(remoteOnline); + await testUtils.cleanupRemoteKeynode(remoteOffline); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + + test('find an online node', async () => { + const commands = genCommands(['find', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Found node at'); + expect(result.stdout).toContain(remoteOnlineHost); + expect(result.stdout).toContain(remoteOnlinePort); + + // Checking json format. + const commands2 = genCommands([ + 'find', + remoteOnlineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain( + `Found node at ${remoteOnlineHost}:${remoteOnlinePort}`, + ); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(remoteOnlineNodeId); + }); + test('find an offline node', async () => { + const commands = genCommands(['find', remoteOfflineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Found node at'); + expect(result.stdout).toContain(remoteOfflineHost); + expect(result.stdout).toContain(remoteOfflinePort); + + // Checking json format. + const commands2 = genCommands([ + 'find', + remoteOfflineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain( + `Found node at ${remoteOfflineHost}:${remoteOfflinePort}`, + ); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(remoteOfflineNodeId); + }); + test( + 'fail to find an unknown node', + async () => { + const unknownNodeId = nodesUtils.makeNodeId( + 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', + ); + const commands = genCommands(['find', unknownNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(1); + expect(result.stdout).toContain(`Failed to find node ${unknownNodeId}`); + + // Checking json format. + const commands2 = genCommands([ + 'find', + unknownNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(1); + expect(result2.stdout).toContain(`message`); + expect(result2.stdout).toContain( + `Failed to find node ${unknownNodeId}`, + ); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(unknownNodeId); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('0'); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('false'); + }, + global.failedConnectionTimeout * 2, + ); +}); diff --git a/tests/bin/nodes/nodes.test.ts b/tests/bin/nodes/nodes.test.ts deleted file mode 100644 index d345d5528..000000000 --- a/tests/bin/nodes/nodes.test.ts +++ /dev/null @@ -1,394 +0,0 @@ -import type { Host, Port } from '@/network/types'; -import type { NodeId, NodeAddress } from '@/nodes/types'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import PolykeyAgent from '@/PolykeyAgent'; -import { makeNodeId } from '@/nodes/utils'; -import * as testBinUtils from '../utils'; -import * as testUtils from '../../utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('CLI Nodes', () => { - const password = 'password'; - const logger = new Logger('pkStdio Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let nodePath: string; - let passwordFile: string; - let polykeyAgent: PolykeyAgent; - let remoteOnline: PolykeyAgent; - let remoteOffline: PolykeyAgent; - - let keynodeId: NodeId; - let remoteOnlineNodeId: NodeId; - let remoteOfflineNodeId: NodeId; - - let remoteOnlineHost: Host; - let remoteOnlinePort: Port; - let remoteOfflineHost: Host; - let remoteOfflinePort: Port; - - const nodeId1 = makeNodeId( - 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', - ); - const nodeId2 = makeNodeId( - 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', - ); - const nodeId3 = makeNodeId( - 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug', - ); - - // Helper functions - function genCommands(options: Array) { - return ['nodes', ...options, '-np', nodePath]; - } - - beforeAll(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - nodePath = path.join(dataDir, 'keynode'); - passwordFile = path.join(dataDir, 'passwordFile'); - await fs.promises.writeFile(passwordFile, 'password'); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: nodePath, - logger: logger, - }); - keynodeId = polykeyAgent.nodeManager.getNodeId(); - - // Setting up a remote keynode - remoteOnline = await testUtils.setupRemoteKeynode({ - logger, - }); - remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); - remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); - remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); - - // Setting up an offline remote keynode - remoteOffline = await testUtils.setupRemoteKeynode({ - logger, - }); - remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); - remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); - remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); - await remoteOffline.stop(); - - // Authorize session - await testBinUtils.pkStdio( - ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], - {}, - nodePath, - ); - }, global.polykeyStartupTimeout * 3); - afterAll(async () => { - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - await testUtils.cleanupRemoteKeynode(remoteOnline); - await testUtils.cleanupRemoteKeynode(remoteOffline); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - describe('commandClaimNode', () => { - beforeAll(async () => { - await remoteOnline.nodeManager.setNode(keynodeId, { - host: polykeyAgent.revProxy.getIngressHost(), - port: polykeyAgent.revProxy.getIngressPort(), - } as NodeAddress); - await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { - gestalt: { - notify: null, - }, - vaults: {}, - }); - await remoteOnline.acl.setNodePerm(keynodeId, { - gestalt: { - notify: null, - }, - vaults: {}, - }); - }); - afterEach(async () => { - await polykeyAgent.notificationsManager.clearNotifications(); - await remoteOnline.notificationsManager.clearNotifications(); - await polykeyAgent.sigchain.clearDB(); - await remoteOnline.sigchain.clearDB(); - }); - afterAll(async () => { - await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { - gestalt: {}, - vaults: {}, - }); - await remoteOnline.acl.setNodePerm(keynodeId, { - gestalt: {}, - vaults: {}, - }); - await remoteOnline.nodeManager.clearDB(); - }); - test( - 'Should send a gestalt invite', - async () => { - const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testBinUtils.pkStdio(commands); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('Gestalt Invite'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }, - global.polykeyStartupTimeout * 4, - ); - test('Should send a gestalt invite (force invite)', async () => { - await remoteOnline.notificationsManager.sendNotification(keynodeId, { - type: 'GestaltInvite', - }); - const commands = genCommands([ - 'claim', - remoteOnlineNodeId, - '--force-invite', - ]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('Gestalt Invite'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }); - test('Should claim remote node', async () => { - await remoteOnline.notificationsManager.sendNotification(keynodeId, { - type: 'GestaltInvite', - }); - const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('cryptolink claim'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }); - }); - describe('commandPingNode', () => { - test( - 'Should return failure when pinging an offline node', - async () => { - const commands = genCommands(['ping', remoteOfflineNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(1); // Should fail with no response. for automation purposes. - expect(result.stdout).toContain('No response received'); - - // Checking for json output - const commands2 = genCommands([ - 'ping', - remoteOfflineNodeId, - '--format', - 'json', - ]); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(1); // Should fail with no response. for automation purposes. - expect(result2.stdout).toContain('No response received'); - }, - global.failedConnectionTimeout * 2, - ); - test( - "Should return failure if can't find the node", - async () => { - const fakeNodeId = nodeId1; - const commands = genCommands(['ping', fakeNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); // Should fail if node doesn't exist. - expect(result.stdout).toContain('Failed to resolve node ID'); - - // Json format. - const commands2 = genCommands(['ping', fakeNodeId, '--format', 'json']); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).not.toBe(0); // Should fail if node doesn't exist. - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('false'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain('Failed to resolve node ID'); - }, - global.failedConnectionTimeout * 2, - ); - test('Should return success when pinging a live node', async () => { - const commands = genCommands(['ping', remoteOnlineNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Node is Active.'); - - // Checking for Json output. - const commands2 = genCommands([ - 'ping', - remoteOnlineNodeId, - '--format', - 'json', - ]); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain('Node is Active'); - }); - }); - describe('commandFindNode', () => { - test('Should find an online node', async () => { - const commands = genCommands(['find', remoteOnlineNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Found node at'); - expect(result.stdout).toContain(remoteOnlineHost); - expect(result.stdout).toContain(remoteOnlinePort); - - // Checking json format. - const commands2 = genCommands([ - 'find', - remoteOnlineNodeId, - '--format', - 'json', - ]); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain( - `Found node at ${remoteOnlineHost}:${remoteOnlinePort}`, - ); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(remoteOnlineNodeId); - }); - test('Should find an offline node', async () => { - const commands = genCommands(['find', remoteOfflineNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Found node at'); - expect(result.stdout).toContain(remoteOfflineHost); - expect(result.stdout).toContain(remoteOfflinePort); - - // Checking json format. - const commands2 = genCommands([ - 'find', - remoteOfflineNodeId, - '--format', - 'json', - ]); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain( - `Found node at ${remoteOfflineHost}:${remoteOfflinePort}`, - ); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(remoteOfflineNodeId); - }); - test( - 'Should fail to find an unknown node', - async () => { - const unknownNodeId = nodeId2; - const commands = genCommands(['find', unknownNodeId]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(1); - expect(result.stdout).toContain(`Failed to find node ${unknownNodeId}`); - - // Checking json format. - const commands2 = genCommands([ - 'find', - unknownNodeId, - '--format', - 'json', - ]); - const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(1); - expect(result2.stdout).toContain(`message`); - expect(result2.stdout).toContain( - `Failed to find node ${unknownNodeId}`, - ); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(unknownNodeId); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('0'); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('false'); - }, - global.failedConnectionTimeout * 2, - ); - }); - describe('commandAddNode', () => { - const validNodeId = nodeId3; - const invalidNodeId = 'INVALIDID' as NodeId; - const validHost = '0.0.0.0'; - const invalidHost = 'INVALIDHOST'; - const port = 55555; - afterEach(async () => { - await polykeyAgent.nodeManager.clearDB(); - }); - afterAll(async () => { - // Restore removed nodes - await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); - await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); - }); - test('Should add the node', async () => { - const commands = genCommands([ - 'add', - validNodeId, - validHost, - port.toString(), - ]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - - // Checking if node was added. - const res = await polykeyAgent.nodeManager.getNode(validNodeId); - expect(res).toBeTruthy(); - expect(res!.host).toEqual(validHost); - expect(res!.port).toEqual(port); - }); - test( - 'Should fail to add the node (invalid node ID)', - async () => { - const commands = genCommands([ - 'add', - invalidNodeId, - validHost, - port.toString(), - ]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); - expect(result.stderr).toContain('Invalid node ID.'); - }, - global.failedConnectionTimeout, - ); - test( - 'Should fail to add the node (invalid IP address)', - async () => { - const commands = genCommands([ - 'add', - validNodeId, - invalidHost, - port.toString(), - ]); - const result = await testBinUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); - expect(result.stderr).toContain('Invalid IP address.'); - - // Checking if node was added. - const res = await polykeyAgent.nodeManager.getNode(validNodeId); - expect(res).toBeUndefined(); - }, - global.failedConnectionTimeout, - ); - }); -}); diff --git a/tests/bin/nodes/ping.test.ts b/tests/bin/nodes/ping.test.ts new file mode 100644 index 000000000..1c094a1d2 --- /dev/null +++ b/tests/bin/nodes/ping.test.ts @@ -0,0 +1,157 @@ +import type { Host, Port } from '@/network/types'; +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('ping', () => { + const password = 'password'; + const logger = new Logger('ping test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + let remoteOffline: PolykeyAgent; + + let keynodeId: NodeId; + let remoteOnlineNodeId: NodeId; + let remoteOfflineNodeId: NodeId; + + let remoteOnlineHost: Host; + let remoteOnlinePort: Port; + let remoteOfflineHost: Host; + let remoteOfflinePort: Port; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + keynodeId = polykeyAgent.nodeManager.getNodeId(); + + // Setting up a remote keynode + remoteOnline = await testUtils.setupRemoteKeynode({ + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); + remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + + // Setting up an offline remote keynode + remoteOffline = await testUtils.setupRemoteKeynode({ + logger, + }); + remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); + remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); + remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); + await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); + await remoteOffline.stop(); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await testUtils.cleanupRemoteKeynode(remoteOnline); + await testUtils.cleanupRemoteKeynode(remoteOffline); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test( + 'fail when pinging an offline node', + async () => { + const commands = genCommands(['ping', remoteOfflineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(1); // Should fail with no response. for automation purposes. + expect(result.stdout).toContain('No response received'); + + // Checking for json output + const commands2 = genCommands([ + 'ping', + remoteOfflineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(1); // Should fail with no response. for automation purposes. + expect(result2.stdout).toContain('No response received'); + }, + global.failedConnectionTimeout * 2, + ); + test( + 'fail if node cannot be found', + async () => { + const fakeNodeId = nodesUtils.makeNodeId( + 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', + ); + const commands = genCommands(['ping', fakeNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); // Should fail if node doesn't exist. + expect(result.stdout).toContain('Failed to resolve node ID'); + + // Json format. + const commands2 = genCommands(['ping', fakeNodeId, '--format', 'json']); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).not.toBe(0); // Should fail if node doesn't exist. + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('false'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain('Failed to resolve node ID'); + }, + global.failedConnectionTimeout * 2, + ); + test('succeed when pinging a live node', async () => { + const commands = genCommands(['ping', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Node is Active.'); + + // Checking for Json output. + const commands2 = genCommands([ + 'ping', + remoteOnlineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain('Node is Active'); + }); +}); From 613212e378fc06bce83fed5b272e82cf006092c0 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Tue, 21 Dec 2021 13:55:03 +1100 Subject: [PATCH 13/28] Refactoring ForwardProxy and ReverseProxy with more expansive error handling and timeout handling --- src/http/utils.ts | 4 +- src/network/Connection.ts | 34 +- src/network/ConnectionForward.ts | 222 ++- src/network/ConnectionReverse.ts | 261 ++- src/network/ForwardProxy.ts | 110 +- src/network/ReverseProxy.ts | 56 +- src/network/errors.ts | 8 +- test-destroy.ts | 50 + test-end-destroy-client.ts | 44 + test-end-destroy.ts | 44 + tests/grpc/utils/GRPCClientTest.ts | 2 - tests/network/ForwardProxy.test.ts | 2926 +++++++++++++++++++--------- tests/network/ReverseProxy.test.ts | 385 ++-- tests/network/index.test.ts | 99 +- 14 files changed, 2972 insertions(+), 1273 deletions(-) create mode 100644 test-destroy.ts create mode 100644 test-end-destroy-client.ts create mode 100644 test-end-destroy.ts diff --git a/src/http/utils.ts b/src/http/utils.ts index 5308a92b4..a83e5dc21 100644 --- a/src/http/utils.ts +++ b/src/http/utils.ts @@ -58,8 +58,6 @@ function terminatingHttpServer( return terminating; } - // Why the fuck do we take this out of promise... - // Even tho it works... let resolveTerminating; let rejectTerminating; terminating = new Promise((resolve, reject) => { @@ -69,7 +67,7 @@ function terminatingHttpServer( // On new request. server.on('request', (incomingMessage, outgoingMessage) => { - // If this new request have not been responded. Close Connection. + // If this new request have not been responded. Close Connection. if (!outgoingMessage.headersSent) { outgoingMessage.setHeader('connection', 'close'); } diff --git a/src/network/Connection.ts b/src/network/Connection.ts index 34e31b290..97aa44091 100644 --- a/src/network/Connection.ts +++ b/src/network/Connection.ts @@ -10,7 +10,24 @@ abstract class Connection { public readonly port: Port; public readonly address: Address; public readonly tlsConfig: Readonly; - public readonly timeoutTime: number; + /** + * Time used for keep-alive timeout + */ + public readonly keepAliveTimeoutTime: number; + /** + * Time used to gracefully wait for teardown + * Used for both UTP and client sockets in forward + * Used for both UTP and server sockets in reverse + */ + public readonly endTime: number; + /** + * Time used between each ping or pong message for hole-punching + */ + public readonly punchIntervalTime: number; + /** + * Time used between each ping or pong message for keep-alive + */ + public readonly keepAliveIntervalTime: number; protected logger: Logger; protected timeout: ReturnType; @@ -21,14 +38,20 @@ abstract class Connection { host, port, tlsConfig, - timeoutTime = 20000, + keepAliveTimeoutTime = 20000, + endTime = 1000, + punchIntervalTime = 1000, + keepAliveIntervalTime = 1000, logger, }: { utpSocket: UTP; host: Host; port: Port; tlsConfig: TLSConfig; - timeoutTime?: number; + keepAliveTimeoutTime?: number; + endTime?: number; + punchIntervalTime?: number; + keepAliveIntervalTime?: number; logger?: Logger; }) { const address = networkUtils.buildAddress(host, port); @@ -41,7 +64,10 @@ abstract class Connection { this.port = port; this.tlsConfig = tlsConfig; this.address = address; - this.timeoutTime = timeoutTime; + this.keepAliveTimeoutTime = keepAliveTimeoutTime; + this.endTime = endTime; + this.punchIntervalTime = punchIntervalTime; + this.keepAliveIntervalTime = keepAliveIntervalTime; } get composed(): boolean { diff --git a/src/network/ConnectionForward.ts b/src/network/ConnectionForward.ts index 0d7c2c7f6..b3d92c65a 100644 --- a/src/network/ConnectionForward.ts +++ b/src/network/ConnectionForward.ts @@ -12,7 +12,7 @@ import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; import { utils as keysUtils } from '../keys'; -import { promise } from '../utils'; +import { promise, timerStart, timerStop } from '../utils'; type ConnectionsForward = { ingress: Map; @@ -23,32 +23,89 @@ interface ConnectionForward extends StartStop {} @StartStop() class ConnectionForward extends Connection { public readonly nodeId: NodeId; - public readonly pingIntervalTime: number; + public readonly endTime: number; protected connections: ConnectionsForward; protected pingInterval: ReturnType; protected utpConn: UTPConnection; protected tlsSocket: TLSSocket; + protected clientSocket?: Socket; protected clientHost: Host; protected clientPort: Port; protected clientAddress: Address; protected serverCertChain: Array; protected resolveReadyP: (value: void) => void; + protected handleMessage = async ( + data: Buffer, + remoteInfo: { address: string; port: number }, + ) => { + // Ignore messages not intended for this target + if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { + return; + } + let msg: NetworkMessage; + try { + msg = networkUtils.unserializeNetworkMessage(data); + } catch (e) { + return; + } + // Don't reset timeout until timeout is initialised + if (this.timeout != null) { + // Any message should reset the timeout + this.stopKeepAliveTimeout(); + this.startKeepAliveTimeout(); + } + if (msg.type === 'ping') { + this.resolveReadyP(); + // Respond with ready message + await this.send(networkUtils.pongBuffer); + } + }; + + protected handleError = async (e: Error) => { + this.logger.warn(`Forward Error: ${e.toString()}`); + await this.stop(); + }; + + /** + * Handles receiving `end` event for `this.tlsSocket` from reverse + * Handler is removed and not executed when `end` is initiated here + */ + protected handleEnd = async () => { + this.logger.debug('Receives tlsSocket ending'); + if (this.utpConn.destroyed) { + this.tlsSocket.destroy(); + this.logger.debug('Destroyed tlsSocket'); + } else { + this.logger.debug('Responds tlsSocket ending'); + this.tlsSocket.end(); + this.tlsSocket.destroy(); + this.logger.debug('Responded tlsSocket ending'); + } + await this.stop(); + }; + + /** + * Handles `close` event for `this.tlsSocket` + * Destroying `this.tlsSocket` triggers the close event + * If already stopped, then this does nothing + */ + protected handleClose = async () => { + await this.stop(); + }; + public constructor({ nodeId, connections, - pingIntervalTime = 1000, ...rest }: { nodeId: NodeId; connections: ConnectionsForward; - pingIntervalTime?: number; } & AbstractConstructorParameters[0]) { super(rest); this.nodeId = nodeId; this.connections = connections; - this.pingIntervalTime = pingIntervalTime; } public async start({ @@ -69,7 +126,12 @@ class ConnectionForward extends Connection { const handleStartError = (e) => { rejectErrorP(e); }; - this.utpConn = this.utpSocket.connect(this.port, this.host); + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used + this.utpConn = this.utpSocket.connect(this.port, this.host, { + allowHalfOpen: true, + }); this.tlsSocket = tls.connect( { key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), @@ -90,13 +152,18 @@ class ConnectionForward extends Connection { await this.send(networkUtils.pingBuffer); punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - }, 1000); + }, this.punchIntervalTime); await Promise.race([ Promise.all([readyP, secureConnectP]).then(() => {}), errorP, ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { + // Destroy the socket before calling stop + // The stop will try to do a graceful end + // if the socket is not already destroyed + // However at this point the socket is not actually established + this.tlsSocket.destroy(); await this.stop(); throw new networkErrors.ErrorConnectionStart(e.message, { code: e.code, @@ -106,7 +173,12 @@ class ConnectionForward extends Connection { } finally { clearInterval(punchInterval); } + this.tlsSocket.on('error', this.handleError); + this.tlsSocket.off('error', handleStartError); if (timer?.timedOut) { + // Destroy the socket + // At this point the socket is not actually established + this.tlsSocket.destroy(); await this.stop(); throw new networkErrors.ErrorConnectionStartTimeout(); } @@ -117,25 +189,38 @@ class ConnectionForward extends Connection { await this.stop(); throw e; } - this.tlsSocket.off('error', handleStartError); - this.tlsSocket.on('error', this.handleError); - await this.startPingInterval(); + await this.startKeepAliveInterval(); this.serverCertChain = serverCertChain; this.connections.ingress.set(this.address, this); - this.startTimeout(); + this.startKeepAliveTimeout(); this.logger.info('Started Connection Forward'); } + /** + * Repeated invocations are noops + */ public async stop(): Promise { this.logger.info('Stopping Connection Forward'); this._composed = false; - this.stopTimeout(); - this.stopPingInterval(); + this.stopKeepAliveTimeout(); + this.stopKeepAliveInterval(); this.utpSocket.off('message', this.handleMessage); + const endPs: Array> = []; if (!this.tlsSocket.destroyed) { - this.tlsSocket.end(); - this.tlsSocket.destroy(); + this.logger.debug('Sends tlsSocket ending'); + this.tlsSocket.unpipe(); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.tlsSocket, this.endTime)); + } + if (this.clientSocket != null && !this.clientSocket.destroyed) { + this.logger.debug('Sends clientSocket ending'); + this.clientSocket.unpipe(); + // Graceful exit has its own end handler + this.clientSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.clientSocket, this.endTime)); } + await Promise.all(endPs); this.connections.ingress.delete(this.address); this.connections.client.delete(this.clientAddress); this.logger.info('Stopped Connection Forward'); @@ -148,29 +233,25 @@ class ConnectionForward extends Connection { throw new networkErrors.ErrorConnectionComposed(); } this._composed = true; + this.clientSocket = clientSocket; this.logger.info('Composing Connection Forward'); - this.tlsSocket.on('error', (e) => { - if (!clientSocket.destroyed) { - clientSocket.destroy(e); - } + clientSocket.on('error', async (e) => { + this.logger.warn(`Client Error: ${e.toString()}`); + await this.stop(); }); - this.tlsSocket.on('close', () => { - clientSocket.destroy(); - }); - clientSocket.on('end', () => { + clientSocket.on('end', async () => { + this.logger.debug('Receives clientSocket ending'); + this.logger.debug('Responds clientSocket ending'); clientSocket.end(); - }); - clientSocket.on('error', (e) => { - if (!this.tlsSocket.destroyed) { - this.tlsSocket.emit('error', e); - } clientSocket.destroy(); + this.logger.debug('Responded clientSocket ending'); + await this.stop(); }); - clientSocket.on('close', () => { - this.tlsSocket.destroy(); + clientSocket.on('close', async () => { + await this.stop(); }); - this.tlsSocket.pipe(clientSocket); - clientSocket.pipe(this.tlsSocket); + this.tlsSocket.pipe(clientSocket, { end: false }); + clientSocket.pipe(this.tlsSocket, { end: false }); const clientAddressInfo = clientSocket.address() as AddressInfo; this.clientHost = clientAddressInfo.address as Host; this.clientPort = clientAddressInfo.port as Port; @@ -210,78 +291,43 @@ class ConnectionForward extends Connection { return this.serverCertChain.map((c) => networkUtils.certNodeId(c)); } - protected async startPingInterval(): Promise { + protected async startKeepAliveInterval(): Promise { await this.send(networkUtils.pingBuffer); this.pingInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - }, this.pingIntervalTime); + }, this.keepAliveIntervalTime); } - protected stopPingInterval() { + protected stopKeepAliveInterval() { clearInterval(this.pingInterval); } - protected startTimeout() { + protected startKeepAliveTimeout() { this.timeout = setTimeout(() => { - this.tlsSocket.emit( - 'error', - new networkErrors.ErrorConnectionTimeout() - ); - }, this.timeoutTime); + this.tlsSocket.emit('error', new networkErrors.ErrorConnectionTimeout()); + }, this.keepAliveTimeoutTime); } - protected stopTimeout() { + protected stopKeepAliveTimeout() { clearTimeout(this.timeout); } - protected handleMessage = async ( - data: Buffer, - remoteInfo: { address: string; port: number }, - ) => { - // Ignore messages not intended for this target - if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { - return; - } - let msg: NetworkMessage; - try { - msg = networkUtils.unserializeNetworkMessage(data); - } catch (e) { - return; - } - // Don't reset timeout until timeout is initialised - if (this.timeout != null) { - // Any message should reset the timeout - this.stopTimeout(); - this.startTimeout(); - } - if (msg.type === 'ping') { - this.resolveReadyP(); - // Respond with ready message - await this.send(networkUtils.pongBuffer); - } - }; - - protected handleError = (e: Error) => { - this.logger.warn(`Connection Error: ${e.toString()}`); - this.tlsSocket.destroy(); - }; - - /** - * Destroying the server socket triggers the close event - */ - protected handleClose = async () => { - await this.stop(); - }; - - protected handleEnd = () => { - if (this.utpConn.destroyed) { - // The utp connection may already be destroyed - this.tlsSocket.destroy(); + protected async endGracefully(socket: Socket, timeout: number) { + const { p: endP, resolveP: resolveEndP } = promise(); + socket.once('end', resolveEndP); + socket.end(); + const timer = timerStart(timeout); + await Promise.race([endP, timer.timerP]); + socket.removeListener('end', resolveEndP); + if (timer.timedOut) { + socket.emit('error', new networkErrors.ErrorConnectionEndTimeout()); } else { - // Prevent half open connections - this.tlsSocket.end(); + timerStop(timer); } - }; + // Must be destroyed if timed out + // If not timed out, force destroy the socket due to buggy tlsSocket and utpConn + socket.destroy(); + } } export default ConnectionForward; diff --git a/src/network/ConnectionReverse.ts b/src/network/ConnectionReverse.ts index 698dda4d5..60b071a9f 100644 --- a/src/network/ConnectionReverse.ts +++ b/src/network/ConnectionReverse.ts @@ -12,7 +12,7 @@ import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; import { utils as keysUtils } from '../keys'; -import { promise } from '../utils'; +import { promise, timerStart, timerStop } from '../utils'; type ConnectionsReverse = { egress: Map; @@ -27,12 +27,67 @@ class ConnectionReverse extends Connection { protected connections: ConnectionsReverse; protected serverSocket: Socket; + protected tlsSocket?: Socket; protected proxyHost: Host; protected proxyPort: Port; protected proxyAddress: Address; protected clientCertChain: Array; protected resolveReadyP: (value: void) => void; + protected handleMessage = async ( + data: Buffer, + remoteInfo: { address: string; port: number }, + ) => { + // Ignore messages not intended for this target + if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { + return; + } + let msg: NetworkMessage; + try { + msg = networkUtils.unserializeNetworkMessage(data); + } catch (e) { + return; + } + // Don't reset timeout until timeout is initialised + if (this.timeout != null) { + // Any message should reset the timeout + this.stopKeepAliveTimeout(); + this.startKeepAliveTimeout(); + } + if (msg.type === 'ping') { + await this.send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + this.resolveReadyP(); + } + }; + + protected handleError = async (e: Error) => { + this.logger.warn(`Server Error: ${e.toString()}`); + await this.stop(); + }; + + /** + * Handles receiving `end` event for `this.serverSocket` from server + * Handler is removed and not executed when `end` is initiated here + */ + protected handleEnd = async () => { + this.logger.debug('Receives serverSocket ending'); + this.logger.debug('Responds serverSocket ending'); + this.serverSocket.end(); + this.serverSocket.destroy(); + this.logger.debug('Responded serverSocket ending'); + await this.stop(); + }; + + /** + * Handles `close` event for `this.serverSocket` + * Destroying `this.serverSocket` triggers the close event + * If already stopped, then this does nothing + */ + protected handleClose = async () => { + await this.stop(); + }; + public constructor({ serverHost, serverPort, @@ -90,13 +145,18 @@ class ConnectionReverse extends Connection { await this.send(networkUtils.pingBuffer); punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - }, 1000); + }, this.punchIntervalTime); await Promise.race([ readyP, errorP, ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { + // Destroy the socket before calling stop + // The stop will try to do a graceful end + // if the socket is not already destroyed + // However at this point the socket is not actually established + this.serverSocket.destroy(); await this.stop(); throw new networkErrors.ErrorConnectionStart(e.message, { code: e.code, @@ -106,32 +166,42 @@ class ConnectionReverse extends Connection { } finally { clearInterval(punchInterval); } + this.serverSocket.on('error', this.handleError); + this.serverSocket.off('error', handleStartError); if (timer?.timedOut) { await this.stop(); throw new networkErrors.ErrorConnectionStartTimeout(); } - this.serverSocket.off('error', handleStartError); - this.serverSocket.on('error', this.handleError); this.connections.egress.set(this.address, this); this.connections.proxy.set(this.proxyAddress, this); - this.startTimeout(); + this.startKeepAliveTimeout(); this.logger.info('Started Connection Reverse'); } /** - * The close event should run the stop * Repeated invocations are noops */ public async stop() { this.logger.info('Stopping Connection Reverse'); this._composed = false; - this.stopTimeout(); + this.stopKeepAliveTimeout(); this.utpSocket.off('message', this.handleMessage); + const endPs: Array> = []; if (!this.serverSocket.destroyed) { - // console.log('SENDING END TO serverSocket'); - this.serverSocket.end(); - this.serverSocket.destroy(); + this.logger.debug('Sends serverSocket ending'); + this.serverSocket.unpipe(); + // Graceful exit has its own end handler + this.serverSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.serverSocket, this.endTime)); } + if (this.tlsSocket != null && !this.tlsSocket.destroyed) { + this.logger.debug('Sends tlsSocket ending'); + this.tlsSocket.unpipe(); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.tlsSocket, this.endTime)); + } + await Promise.all(endPs); this.connections.egress.delete(this.address); this.connections.proxy.delete(this.proxyAddress); this.logger.info('Stopped Connection Reverse'); @@ -173,69 +243,68 @@ class ConnectionReverse extends Connection { ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { + // Hard close the tls socket + if (!tlsSocket.destroyed) { + tlsSocket.end(); + tlsSocket.destroy(); + } throw new networkErrors.ErrorConnectionCompose(e.message, { code: e.code, errno: e.errno, syscall: e.syscall, }); } + tlsSocket.on('error', async (e) => { + this.logger.warn(`Reverse Error: ${e.toString()}`); + await this.stop(); + }); + tlsSocket.off('error', handleComposeError); + // TODO:, this location is problematic + // IF stop is called in the middle of composition + // It feels like we need to LOCK the operations + // So that if you're composing, you cannot stop + // And when stopping you cannot compose + // At this point, graceful exit can be done for the tls socket + this.tlsSocket = tlsSocket; if (timer?.timedOut) { + // Hard close the tls socket + if (!tlsSocket.destroyed) { + tlsSocket.end(); + tlsSocket.destroy(); + } throw new networkErrors.ErrorConnectionComposeTimeout(); } const clientCertChain = networkUtils.getCertificateChain(tlsSocket); - networkUtils.verifyClientCertificateChain(clientCertChain); - tlsSocket.off('error', handleComposeError); - // Propagate end, error, close and data - tlsSocket.on('end', () => { - if (utpConn.destroyed) { - // The utp connection may already be destroyed - tlsSocket.destroy(); - } else { - - // console.log('DESTROYED', tlsSocket.destroyed); - // // @ts-ignore - // console.log('PENDING', tlsSocket.pending); - // // @ts-ignore - // console.log('READYSTATE', tlsSocket.readyState); - // console.log('CONNECTING', tlsSocket.connecting); - // console.log('ALLOW HALF OPEN', tlsSocket.allowHalfOpen); - // console.log('ALLOW HALF OPEN utpConn', utpConn.allowHalfOpen); - // console.log('ENDED?', tlsSocket.writableEnded); - // console.log('FINISHED?', tlsSocket.writableFinished); - // console.log('ENDED?', utpConn.writableEnded); - // console.log('FINISHED?', utpConn.writableFinished); - // console.log(utpConn); - - // Prevent half open connections + try { + networkUtils.verifyClientCertificateChain(clientCertChain); + } catch (e) { + // Hard close the tls socket + if (!tlsSocket.destroyed) { tlsSocket.end(); - - // utpConn.end(); - // console.log("ENDED TLS SOCKET AGAIN?"); - + tlsSocket.destroy(); } - }); - tlsSocket.on('error', (e) => { + throw e; + } - // console.log('EMITTING ERROR ON TLS SOCKET', e); - if (!this.serverSocket.destroyed) { - this.serverSocket.emit('error', e); - } - tlsSocket.destroy(); - }); - tlsSocket.on('close', () => { - this.serverSocket.destroy(); - }); - this.serverSocket.on('error', (e) => { - if (!tlsSocket.destroyed) { - tlsSocket.destroy(e); + tlsSocket.on('end', async () => { + this.logger.debug('Receives tlsSocket ending'); + if (utpConn.destroyed) { + tlsSocket.destroy(); + this.logger.debug('Destroyed tlsSocket'); + } else { + this.logger.debug('Responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + this.logger.debug('Responded tlsSocket ending'); } + await this.stop(); }); - this.serverSocket.on('close', () => { - tlsSocket.destroy(); + tlsSocket.on('close', async () => { + await this.stop(); }); - tlsSocket.pipe(this.serverSocket); - this.serverSocket.pipe(tlsSocket); + tlsSocket.pipe(this.serverSocket, { end: false }); + this.serverSocket.pipe(tlsSocket, { end: false }); this.clientCertChain = clientCertChain; this.logger.info('Composed Connection Reverse'); } catch (e) { @@ -268,62 +337,42 @@ class ConnectionReverse extends Connection { return this.clientCertChain.map((c) => networkUtils.certNodeId(c)); } - protected startTimeout() { - this.timeout = setTimeout(() => { - this.serverSocket.emit( - 'error', - new networkErrors.ErrorConnectionTimeout(), - ); - }, this.timeoutTime); + protected startKeepAliveTimeout() { + this.timeout = setTimeout(async () => { + // This is more precisely an error for reverse + // However it may not yet be established + const e = new networkErrors.ErrorConnectionTimeout(); + if (this.tlsSocket != null && !this.tlsSocket.destroyed) { + this.tlsSocket.emit('error', e); + } else { + // The composition has not occurred yet + // This means we have timed out waiting for a composition + this.logger.warn(`Reverse Error: ${e.toString()}`); + await this.stop(); + } + }, this.keepAliveTimeoutTime); } - protected stopTimeout() { + protected stopKeepAliveTimeout() { clearTimeout(this.timeout); } - protected handleMessage = async ( - data: Buffer, - remoteInfo: { address: string; port: number }, - ) => { - // Ignore messages not intended for this target - if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { - return; + protected async endGracefully(socket: Socket, timeout: number) { + const { p: endP, resolveP: resolveEndP } = promise(); + socket.once('end', resolveEndP); + socket.end(); + const timer = timerStart(timeout); + await Promise.race([endP, timer.timerP]); + socket.removeListener('end', resolveEndP); + if (timer.timedOut) { + socket.emit('error', new networkErrors.ErrorConnectionEndTimeout()); + } else { + timerStop(timer); } - let msg: NetworkMessage; - try { - msg = networkUtils.unserializeNetworkMessage(data); - } catch (e) { - return; - } - // Don't reset timeout until timeout is initialised - if (this.timeout != null) { - // Any message should reset the timeout - this.stopTimeout(); - this.startTimeout(); - } - if (msg.type === 'ping') { - await this.send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - this.resolveReadyP(); - } - }; - - protected handleError = (e: Error) => { - this.logger.warn(`Connection Error: ${e.toString()}`); - this.serverSocket.destroy(); - }; - - /** - * Destroying the server socket triggers the close event - */ - protected handleClose = async () => { - await this.stop(); - }; - - protected handleEnd = () => { - // Prevent half open connections - this.serverSocket.end(); - }; + // Must be destroyed if timed out + // If not timed out, force destroy the socket due to buggy tlsSocket and utpConn + socket.destroy(); + } } export default ConnectionReverse; diff --git a/src/network/ForwardProxy.ts b/src/network/ForwardProxy.ts index 35e9587ee..4d10f8b2b 100644 --- a/src/network/ForwardProxy.ts +++ b/src/network/ForwardProxy.ts @@ -12,15 +12,17 @@ import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import ConnectionForward from './ConnectionForward'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; -import { promisify, sleep, timerStart, timerStop } from '../utils'; +import { promisify, timerStart, timerStop } from '../utils'; interface ForwardProxy extends StartStop {} @StartStop() class ForwardProxy { public readonly authToken: string; public readonly connConnectTime: number; - public readonly connTimeoutTime: number; - public readonly connPingIntervalTime: number; + public readonly connKeepAliveTimeoutTime: number; + public readonly connEndTime: number; + public readonly connPunchIntervalTime: number; + public readonly connKeepAliveIntervalTime: number; protected logger: Logger; protected proxyHost: Host; @@ -39,23 +41,28 @@ class ForwardProxy { constructor({ authToken, connConnectTime = 20000, - connTimeoutTime = 20000, - connPingIntervalTime = 1000, + connKeepAliveTimeoutTime = 20000, + connEndTime = 1000, + connPunchIntervalTime = 1000, + connKeepAliveIntervalTime = 1000, logger, }: { authToken: string; connConnectTime?: number; - connTimeoutTime?: number; - connPingIntervalTime?: number; - + connKeepAliveTimeoutTime?: number; + connEndTime?: number; + connPunchIntervalTime?: number; + connKeepAliveIntervalTime?: number; logger?: Logger; }) { this.logger = logger ?? new Logger(ForwardProxy.name); this.logger.info('Creating Forward Proxy'); this.authToken = authToken; this.connConnectTime = connConnectTime; - this.connTimeoutTime = connTimeoutTime; - this.connPingIntervalTime = connPingIntervalTime; + this.connKeepAliveTimeoutTime = connKeepAliveTimeoutTime; + this.connEndTime = connEndTime; + this.connPunchIntervalTime = connPunchIntervalTime; + this.connKeepAliveIntervalTime = connKeepAliveIntervalTime; this.server = http.createServer(); this.server.on('request', this.handleRequest); this.server.on('connect', this.handleConnect); @@ -83,7 +90,10 @@ class ForwardProxy { this.logger.info( `Starting Forward Proxy from ${proxyAddress} to ${egressAddress}`, ); - const utpSocket = UTP({ allowHalfOpen: false }); + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used + const utpSocket = UTP({ allowHalfOpen: true }); const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); await utpSocketBind(egressPort, egressHost); egressPort = utpSocket.address().port; @@ -105,15 +115,20 @@ class ForwardProxy { public async stop(): Promise { this.logger.info('Stopping Forward Proxy Server'); + // Ensure no new connections are created + this.server.removeAllListeners('connect'); + this.server.on('connect', async (_request, clientSocket) => { + const clientSocketEnd = promisify(clientSocket.end).bind(clientSocket); + await clientSocketEnd('HTTP/1.1 503 Service Unavailable\r\n' + '\r\n'); + clientSocket.destroy(); + }); + const connStops: Array> = []; + for (const [_, conn] of this.connections.ingress) { + connStops.push(conn.stop()); + } const serverClose = promisify(this.server.close).bind(this.server); await serverClose(); - // Ensure no new connections are created while this is iterating - await Promise.all( - Array.from(this.connections.ingress, ([, conn]) => conn.stop()), - ); - // Delay socket close by about 1 second - // this gives some time for the end/FIN packets to be sent - await sleep(1000); + await Promise.all(connStops); // Even when all connections are destroyed // the utp socket sometimes hangs in closing // here we asynchronously close and unreference it @@ -195,6 +210,14 @@ class ForwardProxy { this.tlsConfig = tlsConfig; } + /** + * Manually opens a connection with the ForwardProxy + * Usually you just use HTTP Connect requests to trigger handleConnect + * This will default to using `this.connConnectTime` if + * timer is not set or set to `undefined` + * It will only stop the timer if using the default timer + * Set timer to `null` explicitly to wait forever + */ @ready(new networkErrors.ErrorForwardProxyNotRunning()) public async openConnection( nodeId: NodeId, @@ -202,6 +225,10 @@ class ForwardProxy { ingressPort: Port, timer?: Timer, ): Promise { + let timer_ = timer; + if (timer === undefined) { + timer_ = timerStart(this.connConnectTime); + } const ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); let lock = this.connectionLocks.get(ingressAddress); if (lock == null) { @@ -210,8 +237,11 @@ class ForwardProxy { } const release = await lock.acquire(); try { - await this.establishConnection(nodeId, ingressHost, ingressPort, timer); + await this.establishConnection(nodeId, ingressHost, ingressPort, timer_); } finally { + if (timer === undefined) { + timerStop(timer_!); + } release(); this.connectionLocks.delete(ingressAddress); } @@ -311,9 +341,43 @@ class ForwardProxy { timer, ); } catch (e) { + if (e instanceof networkErrors.ErrorConnectionStartTimeout) { + if (!clientSocket.destroyed) { + await clientSocketEnd('HTTP/1.1 504 Gateway Timeout\r\n' + '\r\n'); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorConnectionStart) { + if (!clientSocket.destroyed) { + await clientSocketEnd('HTTP/1.1 502 Bad Gateway\r\n' + '\r\n'); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorCertChain) { + if (!clientSocket.destroyed) { + await clientSocketEnd( + 'HTTP/1.1 526 Invalid SSL Certificate\r\n' + '\r\n', + ); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorConnectionTimeout) { + if (!clientSocket.destroyed) { + await clientSocketEnd( + 'HTTP/1.1 524 A Timeout Occurred\r\n' + '\r\n', + ); + clientSocket.destroy(e); + } + return; + } if (e instanceof networkErrors.ErrorConnection) { if (!clientSocket.destroyed) { - await clientSocketEnd('HTTP/1.1 400 Bad Request\r\n' + '\r\n'); + await clientSocketEnd( + 'HTTP/1.1 500 Internal Server Error\r\n' + '\r\n', + ); clientSocket.destroy(e); } return; @@ -372,12 +436,14 @@ class ForwardProxy { conn = new ConnectionForward({ nodeId, connections: this.connections, - pingIntervalTime: this.connPingIntervalTime, utpSocket: this.utpSocket, host: ingressHost, port: ingressPort, tlsConfig: this.tlsConfig, - timeoutTime: this.connTimeoutTime, + keepAliveTimeoutTime: this.connKeepAliveTimeoutTime, + endTime: this.connEndTime, + punchIntervalTime: this.connPunchIntervalTime, + keepAliveIntervalTime: this.connKeepAliveIntervalTime, logger: this.logger.getChild( `${ConnectionForward.name} ${ingressAddress}`, ), diff --git a/src/network/ReverseProxy.ts b/src/network/ReverseProxy.ts index af443da57..c00c9b05d 100644 --- a/src/network/ReverseProxy.ts +++ b/src/network/ReverseProxy.ts @@ -10,13 +10,15 @@ import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import ConnectionReverse from './ConnectionReverse'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; -import { promisify, sleep, timerStart, timerStop } from '../utils'; +import { promisify, timerStart, timerStop } from '../utils'; interface ReverseProxy extends StartStop {} @StartStop() class ReverseProxy { public readonly connConnectTime: number; - public readonly connTimeoutTime: number; + public readonly connKeepAliveTimeoutTime: number; + public readonly connEndTime: number; + public readonly connPunchIntervalTime: number; protected logger: Logger; protected ingressHost: Host; @@ -33,17 +35,23 @@ class ReverseProxy { constructor({ connConnectTime = 20000, - connTimeoutTime = 20000, + connKeepAliveTimeoutTime = 20000, + connEndTime = 1000, + connPunchIntervalTime = 1000, logger, }: { connConnectTime?: number; - connTimeoutTime?: number; + connKeepAliveTimeoutTime?: number; + connEndTime?: number; + connPunchIntervalTime?: number; logger?: Logger; }) { this.logger = logger ?? new Logger(ReverseProxy.name); this.logger.info('Creating Reverse Proxy'); this.connConnectTime = connConnectTime; - this.connTimeoutTime = connTimeoutTime; + this.connKeepAliveTimeoutTime = connKeepAliveTimeoutTime; + this.connEndTime = connEndTime; + this.connPunchIntervalTime = connPunchIntervalTime; this.logger.info('Created Reverse Proxy'); } @@ -68,11 +76,14 @@ class ReverseProxy { this.logger.info( `Starting Reverse Proxy from ${ingressAddress} to ${serverAddress}`, ); + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used const utpSocket = UTP.createServer( { allowHalfOpen: true, }, - this.handleConnection + this.handleConnection, ); const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); await utpSocketListen(ingressPort, ingressHost); @@ -92,13 +103,17 @@ class ReverseProxy { public async stop(): Promise { this.logger.info('Stopping Reverse Proxy'); - // Ensure no new connections are created while this is iterating - await Promise.all( - Array.from(this.connections.egress, ([, conn]) => conn.stop()), - ); - // Delay socket close by about 1 second - // this gives some time for the end/FIN packets to be sent - await sleep(1000); + // Ensure no new connections are created + this.utpSocket.removeAllListeners('connection'); + this.utpSocket.on('connection', (utpConn: UTPConnection) => { + utpConn.end(); + utpConn.destroy(); + }); + const connStops: Array> = []; + for (const [_, conn] of this.connections.egress) { + connStops.push(conn.stop()); + } + await Promise.all(connStops); // Even when all connections are destroyed // the utp socket sometimes hangs in closing // here we asynchronously close and unreference it @@ -187,6 +202,10 @@ class ReverseProxy { egressPort: Port, timer?: Timer, ): Promise { + let timer_ = timer; + if (timer === undefined) { + timer_ = timerStart(this.connConnectTime); + } const egressAddress = networkUtils.buildAddress(egressHost, egressPort); let lock = this.connectionLocks.get(egressAddress); if (lock == null) { @@ -195,8 +214,11 @@ class ReverseProxy { } const release = await lock.acquire(); try { - await this.establishConnection(egressHost, egressPort, timer); + await this.establishConnection(egressHost, egressPort, timer_); } finally { + if (timer === undefined) { + timerStop(timer_!); + } release(); this.connectionLocks.delete(egressAddress); } @@ -261,7 +283,7 @@ class ReverseProxy { throw e; } if (!utpConn.destroyed) { - utpConn.destroy(e); + utpConn.emit('error', e); } else { this.logger.warn( `Failed connection from ${egressAddress} - ${e.toString()}`, @@ -306,7 +328,9 @@ class ReverseProxy { host: egressHost, port: egressPort, tlsConfig: this.tlsConfig, - timeoutTime: this.connTimeoutTime, + keepAliveTimeoutTime: this.connKeepAliveTimeoutTime, + endTime: this.connEndTime, + punchIntervalTime: this.connPunchIntervalTime, logger: this.logger.getChild( `${ConnectionReverse.name} ${egressAddress}`, ), diff --git a/src/network/errors.ts b/src/network/errors.ts index d017704d2..868b482d0 100644 --- a/src/network/errors.ts +++ b/src/network/errors.ts @@ -58,8 +58,13 @@ class ErrorConnectionTimeout extends ErrorConnection { exitCode = sysexits.UNAVAILABLE; } +class ErrorConnectionEndTimeout extends ErrorConnection { + description = 'Connection end timed out'; + exitCode = sysexits.UNAVAILABLE; +} + /** - * Used by ConnectionForward + * Used by ConnectionForward and ConnectionReverse */ class ErrorConnectionStart extends ErrorConnection { description = 'Connection start failed'; @@ -141,6 +146,7 @@ export { ErrorConnectionNotComposed, ErrorConnectionMessageParse, ErrorConnectionTimeout, + ErrorConnectionEndTimeout, ErrorConnectionStart, ErrorConnectionStartTimeout, ErrorConnectionCompose, diff --git a/test-destroy.ts b/test-destroy.ts new file mode 100644 index 000000000..e074e7dab --- /dev/null +++ b/test-destroy.ts @@ -0,0 +1,50 @@ +import net from 'net'; +import { utils as keysUtils } from './src/keys'; +import { utils as networkUtils } from './src/network'; + +async function main () { + + const clientKeyPair = await keysUtils.generateKeyPair(1024); + const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + const clientCert = keysUtils.generateCertificate( + clientKeyPair.publicKey, + clientKeyPair.privateKey, + clientKeyPair.privateKey, + 12332432423, + ); + const clientCertPem = keysUtils.certToPem(clientCert); + const clientNodeId = networkUtils.certNodeId(clientCert); + + let socket; + const p = new Promise((resolve) => { + socket = net.connect( + { + port: 80, + host: '142.250.66.206', + allowHalfOpen: false + }, + () => { + resolve(); + } + ); + socket.on('close', () => { + console.log('CLOSE EVENT EMITTED'); + }); + }); + + const p2 = new Promise((resolve) => { + socket.on('end', () => { + resolve(); + }); + }); + socket.end(); + await p2; + + console.log('allow half open', socket.allowHalfOpen); + console.log('ready state', socket.readyState); + console.log('destroyed', socket.destroyed); + + +} + +main(); diff --git a/test-end-destroy-client.ts b/test-end-destroy-client.ts new file mode 100644 index 000000000..46ada616e --- /dev/null +++ b/test-end-destroy-client.ts @@ -0,0 +1,44 @@ +import { utils as keysUtils } from './src/keys'; +import net from 'net'; +import tls from 'tls'; + +async function main () { + + const clientKeyPair = await keysUtils.generateKeyPair(1024); + const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + const clientCert = keysUtils.generateCertificate( + clientKeyPair.publicKey, + clientKeyPair.privateKey, + clientKeyPair.privateKey, + 86400, + ); + const clientCertPem = keysUtils.certToPem(clientCert); + + const socket = net.createConnection({ + port: 55555, + host: '127.0.0.1', + allowHalfOpen: true + }, () => { + + const tlsSocket = tls.connect( + { + key: Buffer.from(clientKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(clientCertPem, 'ascii'), + socket: socket, + rejectUnauthorized: false, + }, + () => { + + tlsSocket.on('end', () => { + console.log('RECEIVED END AFTER SENDING end'); + }); + + console.log('SENDING END'); + tlsSocket.end(); + + }, + ); + }); +} + +main(); diff --git a/test-end-destroy.ts b/test-end-destroy.ts new file mode 100644 index 000000000..2d38217ed --- /dev/null +++ b/test-end-destroy.ts @@ -0,0 +1,44 @@ +import { utils as keysUtils } from './src/keys'; +import net from 'net'; +import tls from 'tls'; + +async function main () { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const server = net.createServer({ allowHalfOpen: true }, (c) => { + console.log('received connection'); + const tlsSocket = new tls.TLSSocket(c, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.once('secure', () => { + console.log('established secure conn'); + }); + tlsSocket.on('end', async () => { + console.log('received end'); + tlsSocket.end(() => { + console.log('HELLO WORLD'); + }); + console.log('destroying'); + tlsSocket.destroy(); + }); + tlsSocket.on('close', () => { + console.log('destroyed'); + }); + }); + server.listen(55555, () => { + console.log('server bound'); + }); +} + +main(); diff --git a/tests/grpc/utils/GRPCClientTest.ts b/tests/grpc/utils/GRPCClientTest.ts index ba96a4882..ddcc21dbe 100644 --- a/tests/grpc/utils/GRPCClientTest.ts +++ b/tests/grpc/utils/GRPCClientTest.ts @@ -63,9 +63,7 @@ class GRPCClientTest extends GRPCClient { } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready() diff --git a/tests/network/ForwardProxy.test.ts b/tests/network/ForwardProxy.test.ts index bfa787330..7f2d9d641 100644 --- a/tests/network/ForwardProxy.test.ts +++ b/tests/network/ForwardProxy.test.ts @@ -1,5 +1,5 @@ import type { Socket } from 'net'; - +import type { KeyPairPem } from '@/keys/types'; import type { Host, Port } from '@/network/types'; import type { NodeId } from '@/nodes/types'; import http from 'http'; @@ -13,80 +13,69 @@ import { errors as networkErrors, } from '@/network'; import * as keysUtils from '@/keys/utils'; -import { promisify, promise, timerStart, timerStop } from '@/utils'; +import { promisify, promise, timerStart, timerStop, poll } from '@/utils'; import * as testUtils from '../utils'; -describe('ForwardProxy', () => { - - // @ts-ignore - console.log(global.globalDataDir); +/** + * Mock HTTP Connect Request + * This is what clients to the ForwardProxy should be doing + * Returns the network socket established + * @throws Error on failure to connect, may contain status code as message + */ +async function httpConnect( + host: string, + port: number, + token: string, + path: string, +): Promise { + const tokenEncoded = Buffer.from(token, 'utf-8').toString('base64'); + const socket = await new Promise((resolve, reject) => { + const req = http.request({ + method: 'CONNECT', + path: path, + host: host, + port: port, + headers: { + 'Proxy-Authorization': `Basic ${tokenEncoded}`, + }, + }); + req.end(); + req.once('connect', (res, clientSocket) => { + if (res.statusCode === 200) { + resolve(clientSocket); + } else { + reject(new Error(res.statusCode!.toString())); + } + }); + req.once('error', (e) => { + reject(e); + }); + }); + return socket; +} - const logger = new Logger('ForwardProxy Test', LogLevel.WARN, [ +describe(ForwardProxy.name, () => { + const logger = new Logger(`${ForwardProxy.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); - const keyPairPem = testUtils.globalKeyPairPem; - const cert = keysUtils.generateCertificate( - testUtils.globalKeyPair.publicKey, - testUtils.globalKeyPair.privateKey, - testUtils.globalKeyPair.privateKey, - 86400 - ); - const certPem = keysUtils.certToPem(cert); - - // Helper functions - // async function connect( - // host: string, - // port: number, - // token: string, - // path: string, - // ): Promise { - // const socket = await new Promise((resolve, reject) => { - // const req = http.request({ - // method: 'CONNECT', - // path: path, - // host: host, - // port: port, - // headers: { - // 'Proxy-Authorization': `Basic ${token}`, - // }, - // }); - // req.end(); - // req.once('connect', (res, clientSocket) => { - // if (res.statusCode === 200) { - // resolve(clientSocket); - // } else { - // reject(new Error(res.statusCode!.toString())); - // } - // }); - // req.once('error', (e) => { - // reject(e); - // }); - // }); - // return socket; - // } - - // beforeEach(async () => { - // authToken = 'sdafjs8'; - // fwdProxy = new ForwardProxy({ - // authToken, - // logger, - // }); - // await fwdProxy.start({ - // proxyHost: '::1' as Host, - // tlsConfig: { - // keyPrivatePem: keyPairPem.privateKey, - // certChainPem: certPem, - // }, - // }); - // }); - // afterEach(async () => { - // await fwdProxy.stop(); - // }); - + const authToken = 'abc123'; + let keyPairPem: KeyPairPem; + let certPem: string; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + keyPairPem = keysUtils.keyPairToPem(globalKeyPair); + const cert = keysUtils.generateCertificate( + globalKeyPair.publicKey, + globalKeyPair.privateKey, + globalKeyPair.privateKey, + 86400 + ); + certPem = keysUtils.certToPem(cert); + }); test('forward proxy readiness', async () => { const fwdProxy = new ForwardProxy({ - authToken: '', - logger: logger, + authToken, + logger, }); // Should be a noop (already stopped) await fwdProxy.stop(); @@ -96,6 +85,13 @@ describe('ForwardProxy', () => { certChainPem: certPem, }, }); + expect(typeof fwdProxy.getProxyHost()).toBe('string'); + expect(typeof fwdProxy.getProxyPort()).toBe('number'); + expect(fwdProxy.getProxyPort()).toBeGreaterThan(0); + expect(typeof fwdProxy.getEgressHost()).toBe('string'); + expect(typeof fwdProxy.getEgressPort()).toBe('number'); + expect(fwdProxy.getEgressPort()).toBeGreaterThan(0); + expect(fwdProxy.getConnectionCount()).toBe(0); // Should be a noop (already started) await fwdProxy.start({ tlsConfig: { @@ -110,11 +106,1385 @@ describe('ForwardProxy', () => { await expect(async () => { await fwdProxy.closeConnection('::1' as Host, 1 as Port); }).rejects.toThrow(networkErrors.ErrorForwardProxyNotRunning); + // Start it again + await fwdProxy.start({ + proxyHost: '::1' as Host, + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + expect(fwdProxy.getProxyHost()).toBe('::1'); + await fwdProxy.stop(); }); - - test('starting and stopping the forward proxy', async () => { + test('HTTP CONNECT bad request failures to the forward proxy', async () => { + // The forward proxy will emit error logs when this occurs + // In production these connect errors should never happen + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy CONNECT bad request'), + }); + await fwdProxy.start({ + proxyHost: '::1' as Host, + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + // Incorrect auth token + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + 'incorrect auth token', + `127.0.0.1:80?nodeId=${encodeURIComponent('SOMENODEID')}`, + ), + ).rejects.toThrow('407'); + // No node id + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + '127.0.0.1:80', + ), + ).rejects.toThrow('400'); + // Missing target + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `?nodeId=${encodeURIComponent('SOMENODEID')}`, + ), + ).rejects.toThrow('400'); + await fwdProxy.stop(); + }); + test('connection to port 0 fails', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy port 0'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + // Cannot open connection to port 0 + await expect(() => + fwdProxy.openConnection('abc' as NodeId, '127.0.0.1' as Host, 0 as Port), + ).rejects.toThrow(networkErrors.ErrorConnectionStart); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:0?nodeId=${encodeURIComponent('abc')}`, + ) + ).rejects.toThrow('502'); + await fwdProxy.stop(); + }); + test('connection start timeout due to hanging remote', async () => { + // 1 seconds to wait to establish a connection + // Must reduce the ping interval time to 100ms + // Also reduce the end tome to 100ms + // So that we can test timeouts quicker + const fwdProxy = new ForwardProxy({ + authToken, + connConnectTime: 1000, + connKeepAliveIntervalTime: 100, + connEndTime: 100, + logger: logger.getChild('ForwardProxy connection timeout'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + // This UTP server will just hang and not respond + let recievedCount = 0; + const utpSocketHang = UTP.createServer(() => { + recievedCount++; + }); + const utpSocketHangListen = promisify(utpSocketHang.listen).bind( + utpSocketHang, + ); + await utpSocketHangListen(0, '127.0.0.1'); + const utpSocketHangPort = utpSocketHang.address().port; + await expect(() => + fwdProxy.openConnection( + 'abc' as NodeId, + '127.0.0.1' as Host, + utpSocketHangPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); + expect(recievedCount).toBe(1); + // Can override the timer + const timer = timerStart(2000); + await expect(() => + fwdProxy.openConnection( + 'abc' as NodeId, + '127.0.0.1' as Host, + utpSocketHangPort as Port, + timer, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); + timerStop(timer); + expect(recievedCount).toBe(2); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:${utpSocketHangPort}?nodeId=${encodeURIComponent('abc')}`, + ), + ).rejects.toThrow('504'); + expect(recievedCount).toBe(3); + utpSocketHang.close(); + utpSocketHang.unref(); + await fwdProxy.stop(); + }); + test('connection reset due to ending remote', async () => { const fwdProxy = new ForwardProxy({ - authToken: 'abc', + authToken, + logger: logger.getChild('ForwardProxy connection reset'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + // This UTP Server will immediately end and destroy + // the connection upon receiving a connection + let recievedCount = 0; + const utpSocketEnd = UTP.createServer((utpConn) => { + recievedCount++; + utpConn.end(); + utpConn.destroy(); + }); + const utpSocketEndListen = promisify(utpSocketEnd.listen).bind( + utpSocketEnd, + ); + await utpSocketEndListen(0, '127.0.0.1'); + const utpSocketEndPort = utpSocketEnd.address().port; + await expect(() => + fwdProxy.openConnection( + 'abc' as NodeId, + '127.0.0.1' as Host, + utpSocketEndPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStart); + expect(recievedCount).toBe(1); + // The actual error is UTP_ECONNRESET to be precise + await expect(() => + fwdProxy.openConnection( + 'abc' as NodeId, + '127.0.0.1' as Host, + utpSocketEndPort as Port, + ), + ).rejects.toThrow(/UTP_ECONNRESET/); + expect(recievedCount).toBe(2); + // 502 Bad Gateway on HTTP Connect + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:${utpSocketEndPort}?nodeId=${encodeURIComponent('abc')}`, + ), + ).rejects.toThrow('502'); + expect(recievedCount).toBe(3); + utpSocketEnd.close(); + utpSocketEnd.unref(); + await fwdProxy.stop(); + }); + test('open connection fails due to missing certificates', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy missing certificates'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + // This is a TLS handshake failure + await expect(() => + fwdProxy.openConnection( + 'somerandomnodeid' as NodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStart); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // The TLS socket throw an error because there's no suitable signature algorithm + expect(tlsSocketError.mock.calls.length).toBe(1); + // expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty('code', 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM'); + // The TLS socket end event never was emitted + expect(tlsSocketEnd.mock.calls.length).toBe(0); + // The TLS socket close event is emitted with error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(true); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT fails due to missing certificates', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy missing certificates'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + // This is an TLS handshake failure + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent('somerandomnodeid')}`, + ) + ).rejects.toThrow('502'); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // The TLS socket throw an error because there's no suitable signature algorithm + expect(tlsSocketError.mock.calls.length).toBe(1); + // expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty('code', 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM'); + // The TLS socket end event never was emitted + expect(tlsSocketEnd.mock.calls.length).toBe(0); + // The TLS socket close event is emitted with error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(true); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection fails due to invalid node id', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy invalid node id'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + let secured = false; + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(() => + fwdProxy.openConnection( + 'somerandomnodeid' as NodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); + await expect(remoteReadyP).resolves.toBeUndefined(); + expect(secured).toBe(true); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // No TLS socket errors this time + // The client side figured that the node id is incorect + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the tls socket is ended from the client side + expect(tlsSocketEnd.mock.calls.length).toBe(1); + // The TLS socket close event is emitted without error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT fails due to invalid node id', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy invalid node id'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + let secured = false; + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent('somerandomnodeid')}`, + ) + ).rejects.toThrow('526'); + await expect(remoteReadyP).resolves.toBeUndefined(); + expect(secured).toBe(true); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // No TLS socket errors this time + // The client side figured taht the node id is incorect + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the tls socket is ended from the client side + expect(tlsSocketEnd.mock.calls.length).toBe(1); + // The TLS socket close event is emitted without error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection success - forward initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy open connection success - forward initiates end'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + await fwdProxy.closeConnection( + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection success - reverse initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + connEndTime: 5000, + logger: logger.getChild('ForwardProxy open connection success - reverse initiates end'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // Will use this to simulate reverse initiating end + let tlsSocket_: tls.TLSSocket; + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + utpConn.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + // Start the graceful ending of the tls socket + logger.debug('Reverse: begins tlsSocket ending'); + const { p: endP, resolveP: resolveEndP, } = promise(); + tlsSocket_!.removeAllListeners('end'); + tlsSocket_!.once('end', resolveEndP); + tlsSocket_!.end(); + await endP; + // Force destroy the socket due to buggy tlsSocket and utpConn + tlsSocket_!.destroy(); + logger.debug('Reverse: finishes tlsSocket ending'); + await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the reverse side initiates the end + // Therefore, this handler is removed + expect(tlsSocketEnd.mock.calls.length).toBe(0); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success - forward initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy HTTP CONNECT success - forward initiates end'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // Normal sockets defaults to `allowHalfOpen: false` + // Therefore this isn't strictly necessary + // Here we are just adding it in ensure consistent behaviour + // If this wasn't done by default, then there should be an error + // emitted on the ConnectionForward tlsSocket as ErrorConnectionEndTimeout + const clientSocketEnd = jest.fn(); + clientSocket.on('end', () => { + clientSocketEnd(); + clientSocket.end(); + }); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + await fwdProxy.closeConnection( + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(0); + expect(clientSocketEnd.mock.calls.length).toBe(1); + await expect(localClosedP).resolves.toBeUndefined(); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success - reverse initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy HTTP CONNECT success - reverse initiates end'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // Will use this to simulate reverse initiating end + let tlsSocket_: tls.TLSSocket; + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // Normal sockets defaults to `allowHalfOpen: false` + // Therefore this isn't strictly necessary + // Here we are just adding it in ensure consistent behaviour + // If this wasn't done by default, then there should be an error + // emitted on the ConnectionForward tlsSocket as ErrorConnectionEndTimeout + const clientSocketEnd = jest.fn(); + clientSocket.on('end', () => { + clientSocketEnd(); + clientSocket.end(); + }); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + // Start the graceful ending of the tls socket + logger.debug('Reverse: begins tlsSocket ending'); + const { p: endP, resolveP: resolveEndP, } = promise(); + tlsSocket_!.removeAllListeners('end'); + tlsSocket_!.once('end', resolveEndP); + tlsSocket_!.end(); + await endP; + // Force destroy the socket due to buggy tlsSocket and utpConn + tlsSocket_!.destroy(); + logger.debug('Reverse: finishes tlsSocket ending'); + await expect(localClosedP).resolves.toBeUndefined(); + expect(clientSocketEnd.mock.calls.length).toBe(1); + await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the reverse side initiates the end + // Therefore, this handler is removed + expect(tlsSocketEnd.mock.calls.length).toBe(0); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success - client initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy HTTP CONNECT success - client initiates end'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + const { p: endP, resolveP: resolveEndP, } = promise(); + // By default net sockets have `allowHalfOpen: false` + // Here we override the behaviour by removing the end listener + // And replacing it with our own, and remember to also force destroy + clientSocket.removeAllListeners('end'); + clientSocket.on('end', () => { + resolveEndP(); + clientSocket.destroy(); + }); + logger.debug('Client: begins clientSocket ending'); + clientSocket.end(); + await endP; + logger.debug('Client: finishes clientSocket ending'); + await expect(localClosedP).resolves.toBeUndefined(); + await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success by opening connection first', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, logger, }); await fwdProxy.start({ @@ -123,812 +1493,630 @@ describe('ForwardProxy', () => { certChainPem: certPem, }, }); - expect(typeof fwdProxy.getProxyHost()).toBe('string'); - expect(typeof fwdProxy.getProxyPort()).toBe('number'); - expect(fwdProxy.getProxyPort()).toBeGreaterThan(0); - expect(typeof fwdProxy.getEgressHost()).toBe('string'); - expect(typeof fwdProxy.getEgressPort()).toBe('number'); - expect(fwdProxy.getEgressPort()).toBeGreaterThan(0); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, + ); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + await fwdProxy.closeConnection( + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(localClosedP).resolves.toBeUndefined(); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection keepalive timeout', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + connKeepAliveTimeoutTime: 1000, + connKeepAliveIntervalTime: 100, + logger: logger.getChild('ForwardProxy open connection keepalive timeout'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + // Do nothing here + // To trigger keep alive timeout + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(fwdProxy.getConnectionCount()).toBe(1); + // When ErrorConnectionTimeout is triggered + // This results in the destruction of the socket + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); await fwdProxy.stop(); + }); + test('HTTP CONNECT keepalive timeout', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + connKeepAliveTimeoutTime: 1000, + connKeepAliveIntervalTime: 100, + logger: logger.getChild('ForwardProxy HTTP CONNECT keepalive timeout'), + }); await fwdProxy.start({ - proxyHost: '::1' as Host, tlsConfig: { keyPrivatePem: keyPairPem.privateKey, certChainPem: certPem, }, }); - expect(fwdProxy.getProxyHost()).toBe('::1'); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer( + async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + // Do nothing here + // To trigger keep alive timeout + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + expect(fwdProxy.getConnectionCount()).toBe(1); + // When ErrorConnectionTimeout is triggered + // This results in the destruction of the socket + await expect(localClosedP).resolves.toBeUndefined(); + await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('stopping the proxy with open connections', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpSocket = UTP.createServer( + async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP(); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + } + ); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(fwdProxy.getConnectionCount()).toBe(1); + await fwdProxy.stop(); + expect(fwdProxy.getConnectionCount()).toBe(0); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await expect(remoteClosedP).resolves.toBeUndefined(); + }); + test('open connection to multiple servers', async () => { + // First server keys + const serverKeyPair1 = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem1 = keysUtils.keyPairToPem(serverKeyPair1); + const serverCert1 = keysUtils.generateCertificate( + serverKeyPair1.publicKey, + serverKeyPair1.privateKey, + serverKeyPair1.privateKey, + 86400, + ); + const serverCertPem1 = keysUtils.certToPem(serverCert1); + const serverNodeId1 = networkUtils.certNodeId(serverCert1); + // Second server keys + const serverKeyPair2 = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem2 = keysUtils.keyPairToPem(serverKeyPair2); + const serverCert2 = keysUtils.generateCertificate( + serverKeyPair2.publicKey, + serverKeyPair2.privateKey, + serverKeyPair2.privateKey, + 86400, + ); + const serverCertPem2 = keysUtils.certToPem(serverCert2); + const serverNodeId2 = networkUtils.certNodeId(serverCert2); + const fwdProxy = new ForwardProxy({ + authToken, + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + // First signals + const { p: remoteReadyP1, resolveP: resolveRemoteReadyP1 } = + promise(); + const { p: remoteClosedP1, resolveP: resolveRemoteClosedP1 } = + promise(); + // Second signals + const { p: remoteReadyP2, resolveP: resolveRemoteReadyP2 } = + promise(); + const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = + promise(); + const utpSocket1 = UTP.createServer( + async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem1, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP1(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send1(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send1(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP1; + clearInterval(punchInterval); + } + ); + const handleMessage1 = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send1(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP1(); + } + }; + utpSocket1.on('message', handleMessage1); + const send1 = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket1.send).bind(utpSocket1); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen1 = promisify(utpSocket1.listen).bind(utpSocket1); + await utpSocketListen1(0, '127.0.0.1'); + const utpSocketHost1 = utpSocket1.address().address; + const utpSocketPort1 = utpSocket1.address().port; + const utpSocket2 = UTP.createServer( + async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem2, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP2(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send2(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send2(networkUtils.pingBuffer); + }, 2000); + await remoteReadyP2; + clearInterval(punchInterval); + } + ); + const handleMessage2 = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send2(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP2(); + } + }; + utpSocket2.on('message', handleMessage2); + const send2 = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket2.send).bind(utpSocket2); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen2 = promisify(utpSocket2.listen).bind(utpSocket2); + await utpSocketListen2(0, '127.0.0.1'); + const utpSocketHost2 = utpSocket2.address().address; + const utpSocketPort2 = utpSocket2.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId1, + utpSocketHost1 as Host, + utpSocketPort1 as Port, + ); + await fwdProxy.openConnection( + serverNodeId2, + utpSocketHost2 as Host, + utpSocketPort2 as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(2); + await expect(remoteReadyP1).resolves.toBeUndefined(); + await expect(remoteReadyP2).resolves.toBeUndefined(); + await fwdProxy.closeConnection( + utpSocketHost1 as Host, + utpSocketPort1 as Port, + ); + await fwdProxy.closeConnection( + utpSocketHost2 as Host, + utpSocketPort2 as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP1).resolves.toBeUndefined(); + await expect(remoteClosedP2).resolves.toBeUndefined(); + utpSocket1.off('message', handleMessage1); + utpSocket1.close(); + utpSocket1.unref(); + utpSocket2.off('message', handleMessage2); + utpSocket2.close(); + utpSocket2.unref(); await fwdProxy.stop(); }); - - - // test('connect failures to the forward proxy', async () => { - // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - // // Incorrect auth token - // await expect(() => - // connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // 'sdfisojfo', - // `127.0.0.1:80?nodeId=${encodeURIComponent('SOMENODEID')}`, - // ), - // ).rejects.toThrow('407'); - // // No node id - // await expect(() => - // connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // authTokenEncoded, - // '127.0.0.1:80', - // ), - // ).rejects.toThrow('400'); - // // Missing target - // await expect(() => - // connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // authTokenEncoded, - // `?nodeId=${encodeURIComponent('123')}`, - // ), - // ).rejects.toThrow('400'); - // // Targetting an un-used port - // await expect(() => - // connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // authTokenEncoded, - // `127.0.0.1:0?nodeId=${encodeURIComponent('123')}`, - // ), - // ).rejects.toThrow('400'); - // await fwdProxy.stop(); - // }); - // test('open connection to port 0 fails', async () => { - // // Cannot open connection to port 0 - // await expect(() => - // fwdProxy.openConnection('abc' as NodeId, '127.0.0.1' as Host, 0 as Port), - // ).rejects.toThrow(networkErrors.ErrorConnectionStart); - // await fwdProxy.stop(); - // }); - // test('open connection timeout due to hanging remote', async () => { - // // This UTP server will just hang and not respond - // let receivedConnection = false; - // const utpSocketHang = UTP.createServer(() => { - // receivedConnection = true; - // }); - // const utpSocketHangListen = promisify(utpSocketHang.listen).bind( - // utpSocketHang, - // ); - // await utpSocketHangListen(0, '127.0.0.1'); - // const utpSocketHangPort = utpSocketHang.address().port; - // const timer = timerStart(3000); - // await expect(() => - // fwdProxy.openConnection( - // 'abc' as NodeId, - // '127.0.0.1' as Host, - // utpSocketHangPort as Port, - // timer, - // ), - // ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); - // timerStop(timer); - // expect(receivedConnection).toBe(true); - // utpSocketHang.close(); - // utpSocketHang.unref(); - // await fwdProxy.stop(); - // }); - // test('open connection reset due to ending remote', async () => { - // // This UTP Server will immediately end and destroy - // // the connection upon receiving a connection - // let receivedConnection = false; - // const utpSocketEnd = UTP.createServer((utpConn) => { - // receivedConnection = true; - // utpConn.end(); - // utpConn.destroy(); - // }); - // const utpSocketEndListen = promisify(utpSocketEnd.listen).bind( - // utpSocketEnd, - // ); - // await utpSocketEndListen(0, '127.0.0.1'); - // const utpSocketEndPort = utpSocketEnd.address().port; - // await expect(() => - // fwdProxy.openConnection( - // 'abc' as NodeId, - // '127.0.0.1' as Host, - // utpSocketEndPort as Port, - // ), - // ).rejects.toThrow(networkErrors.ErrorConnectionStart); - // expect(receivedConnection).toBe(true); - // // The actual error is UTP_ECONNRESET to be precise - // await expect(() => - // fwdProxy.openConnection( - // 'abc' as NodeId, - // '127.0.0.1' as Host, - // utpSocketEndPort as Port, - // ), - // ).rejects.toThrow(/UTP_ECONNRESET/); - // utpSocketEnd.close(); - // utpSocketEnd.unref(); - // await fwdProxy.stop(); - // }); - // test.only('open connection fails due to missing certificates', async () => { - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // // This UTP server will hold the connection - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('close', () => { - // console.log('CLOSE EVENT'); - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // console.log('END EVENT'); - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // expect(fwdProxy.connectionCount).toBe(0); - // // This is an SSL handshake failure - // await expect(() => - // fwdProxy.openConnection( - // 'somerandomnodeid' as NodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ), - // ).rejects.toThrow(networkErrors.ErrorConnectionStart); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await fwdProxy.stop(); - // }); - // test('open connection fails due to invalid node id', async () => { - // const serverKeyPair = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - // const serverCert = keysUtils.generateCertificate( - // serverKeyPair.publicKey, - // serverKeyPair.privateKey, - // serverKeyPair.privateKey, - // 86400, - // ); - // const serverCertPem = keysUtils.certToPem(serverCert); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // // This UTP server will hold the connection - // let secured = false; - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('secure', () => { - // secured = true; - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // expect(fwdProxy.connectionCount).toBe(0); - // await expect(() => - // fwdProxy.openConnection( - // 'somerandomnodeid' as NodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ), - // ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); - // await expect(remoteReadyP).resolves.toBeUndefined(); - // // The secure event won't be fired - // // because the connection will be ended before that happens - // expect(secured).toBe(false); - // expect(fwdProxy.connectionCount).toBe(0); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await fwdProxy.stop(); - // }); - // test('open connection success', async () => { - // const serverKeyPair = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - // const serverCert = keysUtils.generateCertificate( - // serverKeyPair.publicKey, - // serverKeyPair.privateKey, - // serverKeyPair.privateKey, - // 86400, - // ); - // const serverCertPem = keysUtils.certToPem(serverCert); - // const serverNodeId = networkUtils.certNodeId(serverCert); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - // promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // // This UTP server will hold the connection - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('secure', () => { - // resolveRemoteSecureP(); - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // expect(fwdProxy.connectionCount).toBe(0); - // await fwdProxy.openConnection( - // serverNodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // await expect(remoteReadyP).resolves.toBeUndefined(); - // await expect(remoteSecureP).resolves.toBeUndefined(); - // // Opening a duplicate connection is noop - // await fwdProxy.openConnection( - // serverNodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // expect(fwdProxy.connectionCount).toBe(1); - // await fwdProxy.closeConnection( - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // expect(fwdProxy.connectionCount).toBe(0); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await fwdProxy.stop(); - // }); - // test('connect success by opening connection first', async () => { - // const serverKeyPair = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - // const serverCert = keysUtils.generateCertificate( - // serverKeyPair.publicKey, - // serverKeyPair.privateKey, - // serverKeyPair.privateKey, - // 86400, - // ); - // const serverCertPem = keysUtils.certToPem(serverCert); - // const serverNodeId = networkUtils.certNodeId(serverCert); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - // promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // // This UTP server will hold the connection - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('secure', () => { - // resolveRemoteSecureP(); - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // await fwdProxy.openConnection( - // serverNodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // await expect(remoteReadyP).resolves.toBeUndefined(); - // await expect(remoteSecureP).resolves.toBeUndefined(); - // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - // const clientSocket = await connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // authTokenEncoded, - // `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( - // serverNodeId, - // )}`, - // ); - // expect(clientSocket).toBeInstanceOf(net.Socket); - // expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - // expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); - // const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); - // clientSocket.on('close', () => { - // resolveLocalClosedP(); - // }); - // await fwdProxy.closeConnection( - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // await expect(localClosedP).resolves.toBeUndefined(); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await fwdProxy.stop(); - // }); - // test('connect success by direct connection', async () => { - // const serverKeyPair = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - // const serverCert = keysUtils.generateCertificate( - // serverKeyPair.publicKey, - // serverKeyPair.privateKey, - // serverKeyPair.privateKey, - // 86400, - // ); - // const serverCertPem = keysUtils.certToPem(serverCert); - // const serverNodeId = networkUtils.certNodeId(serverCert); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - // promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // // This UTP server will hold the connection - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('secure', () => { - // resolveRemoteSecureP(); - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - // const clientSocket = await connect( - // fwdProxy.proxyHost, - // fwdProxy.proxyPort, - // authTokenEncoded, - // `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( - // serverNodeId, - // )}`, - // ); - // await expect(remoteReadyP).resolves.toBeUndefined(); - // await expect(remoteSecureP).resolves.toBeUndefined(); - // expect(clientSocket).toBeInstanceOf(net.Socket); - // expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - // expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); - // const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); - // clientSocket.on('close', () => { - // resolveLocalClosedP(); - // }); - // await fwdProxy.closeConnection( - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // await expect(localClosedP).resolves.toBeUndefined(); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await fwdProxy.stop(); - // }); - // test('stopping the proxy with open connections', async () => { - // const serverKeyPair = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - // const serverCert = keysUtils.generateCertificate( - // serverKeyPair.publicKey, - // serverKeyPair.privateKey, - // serverKeyPair.privateKey, - // 86400, - // ); - // const serverCertPem = keysUtils.certToPem(serverCert); - // const serverNodeId = networkUtils.certNodeId(serverCert); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); - // const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = - // promise(); - // const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = - // promise(); - // const utpSocket = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('secure', () => { - // resolveRemoteSecureP(); - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP(); - // } - // }; - // utpSocket.on('message', handleMessage); - // const send = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - // await utpSocketListen(0, '127.0.0.1'); - // const utpSocketHost = utpSocket.address().address; - // const utpSocketPort = utpSocket.address().port; - // expect(fwdProxy.connectionCount).toBe(0); - // await fwdProxy.openConnection( - // serverNodeId, - // utpSocketHost as Host, - // utpSocketPort as Port, - // ); - // await expect(remoteReadyP).resolves.toBeUndefined(); - // await expect(remoteSecureP).resolves.toBeUndefined(); - // expect(fwdProxy.connectionCount).toBe(1); - // await fwdProxy.stop(); - // expect(fwdProxy.connectionCount).toBe(0); - // utpSocket.off('message', handleMessage); - // utpSocket.close(); - // utpSocket.unref(); - // await expect(remoteClosedP).resolves.toBeUndefined(); - // }); - // test('open connection to multiple servers', async () => { - // // First server keys - // const serverKeyPair1 = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem1 = keysUtils.keyPairToPem(serverKeyPair1); - // const serverCert1 = keysUtils.generateCertificate( - // serverKeyPair1.publicKey, - // serverKeyPair1.privateKey, - // serverKeyPair1.privateKey, - // 86400, - // ); - // const serverCertPem1 = keysUtils.certToPem(serverCert1); - // const serverNodeId1 = networkUtils.certNodeId(serverCert1); - // // Second server keys - // const serverKeyPair2 = await keysUtils.generateKeyPair(4096); - // const serverKeyPairPem2 = keysUtils.keyPairToPem(serverKeyPair2); - // const serverCert2 = keysUtils.generateCertificate( - // serverKeyPair2.publicKey, - // serverKeyPair2.privateKey, - // serverKeyPair2.privateKey, - // 86400, - // ); - // const serverCertPem2 = keysUtils.certToPem(serverCert2); - // const serverNodeId2 = networkUtils.certNodeId(serverCert2); - // const egressHost = fwdProxy.egressHost; - // const egressPort = fwdProxy.egressPort; - // // First signals - // const { p: remoteReadyP1, resolveP: resolveRemoteReadyP1 } = - // promise(); - // const { p: remoteClosedP1, resolveP: resolveRemoteClosedP1 } = - // promise(); - // // Second signals - // const { p: remoteReadyP2, resolveP: resolveRemoteReadyP2 } = - // promise(); - // const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = - // promise(); - // const utpSocket1 = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem1, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP1(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send1(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send1(networkUtils.pingBuffer); - // }, 1000); - // await remoteReadyP1; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage1 = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send1(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP1(); - // } - // }; - // utpSocket1.on('message', handleMessage1); - // const send1 = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket1.send).bind(utpSocket1); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen1 = promisify(utpSocket1.listen).bind(utpSocket1); - // await utpSocketListen1(0, '127.0.0.1'); - // const utpSocketHost1 = utpSocket1.address().address; - // const utpSocketPort1 = utpSocket1.address().port; - // const utpSocket2 = UTP.createServer( - // { - // allowHalfOpen: false, - // }, - // async (utpConn) => { - // const tlsSocket = new tls.TLSSocket(utpConn, { - // key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), - // cert: Buffer.from(serverCertPem2, 'ascii'), - // isServer: true, - // requestCert: true, - // rejectUnauthorized: false, - // }); - // tlsSocket.on('close', () => { - // resolveRemoteClosedP2(); - // }); - // // AllowHalfOpen is buggy - // // this ends the connection in case it doesn't work - // tlsSocket.on('end', () => { - // tlsSocket.end(); - // }); - // await send2(networkUtils.pingBuffer); - // const punchInterval = setInterval(async () => { - // await send2(networkUtils.pingBuffer); - // }, 2000); - // await remoteReadyP2; - // clearInterval(punchInterval); - // } - // ); - // const handleMessage2 = async (data: Buffer) => { - // const msg = networkUtils.unserializeNetworkMessage(data); - // if (msg.type === 'ping') { - // await send2(networkUtils.pongBuffer); - // } else if (msg.type === 'pong') { - // resolveRemoteReadyP2(); - // } - // }; - // utpSocket2.on('message', handleMessage2); - // const send2 = async (data: Buffer) => { - // const utpSocketSend = promisify(utpSocket2.send).bind(utpSocket2); - // await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); - // }; - // const utpSocketListen2 = promisify(utpSocket2.listen).bind(utpSocket2); - // await utpSocketListen2(0, '127.0.0.1'); - // const utpSocketHost2 = utpSocket2.address().address; - // const utpSocketPort2 = utpSocket2.address().port; - // expect(fwdProxy.connectionCount).toBe(0); - // await fwdProxy.openConnection( - // serverNodeId1, - // utpSocketHost1 as Host, - // utpSocketPort1 as Port, - // ); - // await fwdProxy.openConnection( - // serverNodeId2, - // utpSocketHost2 as Host, - // utpSocketPort2 as Port, - // ); - // expect(fwdProxy.connectionCount).toBe(2); - // await expect(remoteReadyP1).resolves.toBeUndefined(); - // await expect(remoteReadyP2).resolves.toBeUndefined(); - // await fwdProxy.closeConnection( - // utpSocketHost1 as Host, - // utpSocketPort1 as Port, - // ); - // await fwdProxy.closeConnection( - // utpSocketHost2 as Host, - // utpSocketPort2 as Port, - // ); - // expect(fwdProxy.connectionCount).toBe(0); - // await expect(remoteClosedP1).resolves.toBeUndefined(); - // await expect(remoteClosedP2).resolves.toBeUndefined(); - // utpSocket1.off('message', handleMessage1); - // utpSocket1.close(); - // utpSocket1.unref(); - // utpSocket2.off('message', handleMessage2); - // utpSocket2.close(); - // utpSocket2.unref(); - // await fwdProxy.stop(); - // }); }); diff --git a/tests/network/ReverseProxy.test.ts b/tests/network/ReverseProxy.test.ts index f319d66b4..0f62c724f 100644 --- a/tests/network/ReverseProxy.test.ts +++ b/tests/network/ReverseProxy.test.ts @@ -1,5 +1,4 @@ import type { AddressInfo } from 'net'; - import type { Host, Port } from '@/network/types'; import type { KeyPairPem } from '@/keys/types'; import net from 'net'; @@ -12,55 +11,72 @@ import { errors as networkErrors, } from '@/network'; import * as keysUtils from '@/keys/utils'; -import { promisify, promise, timerStart, timerStop, sleep } from '@/utils'; - -describe('ReverseProxy', () => { - const logger = new Logger('ReverseProxy Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let keyPairPem: KeyPairPem, certPem: string; +import { promisify, promise, timerStart, timerStop, poll, sleep } from '@/utils'; +import * as testUtils from '../utils'; - // Helper functions - function server(end: boolean = false) { - const { p: serverConnP, resolveP: resolveServerConnP } = promise(); - const { p: serverConnClosedP, resolveP: resolveServerConnClosedP } = - promise(); - const server = net.createServer((conn) => { - resolveServerConnP(); - conn.once('close', () => { - resolveServerConnClosedP(); - }); - if (end) { - conn.end(); +/** + * Mock TCP server + * This is the server that the ReverseProxy will be proxying to + */ +function tcpServer(end: boolean = false) { + const { p: serverConnP, resolveP: resolveServerConnP } = promise(); + const { p: serverConnEndP, resolveP: resolveServerConnEndP } = promise(); + const { p: serverConnClosedP, resolveP: resolveServerConnClosedP } = + promise(); + const server = net.createServer({ + allowHalfOpen: false + }, (conn) => { + resolveServerConnP(); + conn.on('end', () => { + resolveServerConnEndP(); + conn.end(); + conn.destroy(); + }); + conn.once('close', () => { + resolveServerConnClosedP(); + }); + if (end) { + conn.removeAllListeners('end'); + conn.on('end', () => { + resolveServerConnEndP(); conn.destroy(); - } - }); - const serverClose = promisify(server.close).bind(server); - const serverListen = promisify(server.listen).bind(server); - const serverHost = () => { - return (server.address() as AddressInfo).address as Host; - }; - const serverPort = () => { - return (server.address() as AddressInfo).port as Port; - }; - return { - serverListen, - serverClose, - serverConnP, - serverConnClosedP, - serverHost, - serverPort, - }; - } + }); + conn.end(); + } + }); + const serverClose = promisify(server.close).bind(server); + const serverListen = promisify(server.listen).bind(server); + const serverHost = () => { + return (server.address() as AddressInfo).address as Host; + }; + const serverPort = () => { + return (server.address() as AddressInfo).port as Port; + }; + return { + serverListen, + serverClose, + serverConnP, + serverConnEndP, + serverConnClosedP, + serverHost, + serverPort, + }; +} +describe(ReverseProxy.name, () => { + const logger = new Logger(`${ReverseProxy.name} test`, LogLevel.DEBUG, [ + new StreamHandler(), + ]); + let keyPairPem: KeyPairPem + let certPem: string; beforeAll(async () => { - const keyPair = await keysUtils.generateKeyPair(4096); - keyPairPem = keysUtils.keyPairToPem(keyPair); + const globalKeyPair = await testUtils.setupGlobalKeypair(); + keyPairPem = keysUtils.keyPairToPem(globalKeyPair); const cert = keysUtils.generateCertificate( - keyPair.publicKey, - keyPair.privateKey, - keyPair.privateKey, - 86400, + globalKeyPair.publicKey, + globalKeyPair.privateKey, + globalKeyPair.privateKey, + 86400 ); certPem = keysUtils.certToPem(cert); }); @@ -68,7 +84,6 @@ describe('ReverseProxy', () => { const revProxy = new ReverseProxy({ logger: logger, }); - // Should be a noop await revProxy.stop(); await revProxy.start({ @@ -80,19 +95,13 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - await revProxy.stop(); - expect(() => { - revProxy.ingressHost; - }).toThrow(networkErrors.ErrorReverseProxyNotStarted); - expect(() => { - revProxy.getConnectionInfoByProxy('::1' as Host, 1 as Port); - }).toThrow(networkErrors.ErrorReverseProxyNotStarted); - }); - test('starting and stopping the reverse proxy', async () => { - const revProxy = new ReverseProxy({ - logger: logger, - }); - // Starting the rev proxy doesn't start a connection to the server + expect(typeof revProxy.getServerHost()).toBe('string'); + expect(typeof revProxy.getServerPort()).toBe('number'); + expect(revProxy.getServerPort()).toBeGreaterThan(0); + expect(typeof revProxy.getIngressHost()).toBe('string'); + expect(typeof revProxy.getIngressPort()).toBe('number'); + expect(revProxy.getIngressPort()).toBeGreaterThan(0); + // Should be a noop (already started) await revProxy.start({ serverHost: '::1' as Host, serverPort: 1 as Port, @@ -102,15 +111,14 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - expect(typeof revProxy.ingressHost).toBe('string'); - expect(typeof revProxy.ingressPort).toBe('number'); - expect(revProxy.ingressPort).toBeGreaterThan(0); - expect(typeof revProxy.serverHost).toBe('string'); - expect(typeof revProxy.serverPort).toBe('number'); - expect(revProxy.connectionCount).toBe(0); - expect(revProxy.serverHost).toBe('::1'); - expect(revProxy.serverPort).toBe(1); await revProxy.stop(); + expect(() => { + revProxy.getIngressHost(); + }).toThrow(networkErrors.ErrorReverseProxyNotRunning); + expect(() => { + revProxy.getConnectionInfoByProxy('::1' as Host, 1 as Port); + }).toThrow(networkErrors.ErrorReverseProxyNotRunning); + // Start it again await revProxy.start({ serverHost: '::1' as Host, serverPort: 1 as Port, @@ -120,12 +128,12 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - expect(revProxy.ingressHost).toBe('127.0.0.1'); + expect(revProxy.getServerHost()).toBe('::1'); await revProxy.stop(); }); test('open connection to port 0 fails', async () => { const revProxy = new ReverseProxy({ - logger: logger, + logger: logger.getChild('ReverseProxy port 0'), }); const { serverListen, @@ -134,7 +142,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -164,7 +172,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -207,7 +215,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -218,8 +226,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -236,7 +244,7 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort as Port); await expect(serverConnP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); @@ -257,7 +265,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -268,8 +276,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); // First client const utpSocket1 = UTP(); const handleMessage1 = async (data: Buffer) => { @@ -304,10 +312,10 @@ describe('ReverseProxy', () => { const utpSocketPort2 = utpSocket2.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort1 as Port); await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort2 as Port); - expect(revProxy.connectionCount).toBe(2); + expect(revProxy.getConnectionCount()).toBe(2); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort1 as Port); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort2 as Port); - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); await expect(serverConnP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); utpSocket1.off('message', handleMessage1); @@ -328,10 +336,11 @@ describe('ReverseProxy', () => { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(true); + } = tcpServer(true); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -342,8 +351,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -360,38 +369,39 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await expect(serverConnP).resolves.toBeUndefined(); - // The server closed the connection + // The server receives the end confirmation for graceful exit + await expect(serverConnEndP).resolves.toBeUndefined(); + // The server is closed await expect(serverConnClosedP).resolves.toBeUndefined(); - // Wait for the end signal to be received - await sleep(2000); // The rev proxy won't have this connection - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await revProxy.stop(); await serverClose(); }); - test('connect timeout due to hanging client', async () => { - // ConnConnectTime will affect ErrorConnectionComposeTimeout during compose - // connTimeoutTime will affect ErrorConnectionTimeout which is needed + test.only('connect timeout due to hanging client', async () => { + // `connConnectTime` will affect ErrorConnectionComposeTimeout + // `connKeepAliveTimeoutTime` will affect ErrorConnectionTimeout which is needed // because failing to connect to the open connection // doesn't automatically mean the connection is destroyed const revProxy = new ReverseProxy({ connConnectTime: 3000, - connTimeoutTime: 3000, + connKeepAliveTimeoutTime: 3000, logger: logger, }); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -402,8 +412,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -420,8 +430,9 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); // This retries multiple times + // This will eventually fail and trigger a ErrorConnectionComposeTimeout const utpConn = utpSocket.connect(ingressPort, ingressHost); utpConn.setTimeout(2000, () => { utpConn.emit('error', new Error('TIMED OUT')); @@ -438,12 +449,36 @@ describe('ReverseProxy', () => { }); // The client connection times out await expect(utpConnErrorP).rejects.toThrow(/TIMED OUT/); + + console.log('000000'); + await utpConnClosedP; - // Wait for the open connection to timeout - await sleep(3000); - expect(revProxy.connectionCount).toBe(0); + + console.log('AAAA'); + await expect(serverConnP).resolves.toBeUndefined(); + + console.log('BBBB'); + + await expect(serverConnEndP).resolves.toBeUndefined(); + + console.log('CCCC'); + await expect(serverConnClosedP).resolves.toBeUndefined(); + + console.log('DDDDD'); + + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -451,23 +486,23 @@ describe('ReverseProxy', () => { await serverClose(); }); test('connect fails due to missing client certificates', async () => { - // ConnConnectTime will affect ErrorConnectionComposeTimeout during compose - // connTimeoutTime will affect ErrorConnectionTimeout which is needed - // because failing to connect to the open connection + // `connKeepAliveTimeoutTime` will affect ErrorConnectionTimeout + // Note that failing to connect to the open connection // doesn't automatically mean the connection is destroyed + // reverse proxy keeps the connection alive until `connKeepAliveTimeoutTime` expires const revProxy = new ReverseProxy({ - connConnectTime: 3000, - connTimeoutTime: 3000, + connKeepAliveTimeoutTime: 2000, logger: logger, }); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -478,8 +513,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -496,17 +531,12 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); const { p: tlsSocketClosedP, resolveP: resolveTlsSocketClosedP } = promise(); const utpConn = utpSocket.connect(ingressPort, ingressHost); - // This will propagate the error to tlsSocket - utpConn.setTimeout(2000, () => { - utpConn.emit('error', new Error('TIMED OUT')); - }); - // TLS socket without a certificate - // should also cause a timeout - // the secure event never occurs + // This will send an empty certificate chain + // Expect `ErrorCertChainEmpty` let secureConnection = false; const tlsSocket = tls.connect( { @@ -527,6 +557,7 @@ describe('ReverseProxy', () => { tlsSocket.destroy(); } else { tlsSocket.end(); + tlsSocket.destroy(); } }); tlsSocket.on('close', () => { @@ -537,11 +568,21 @@ describe('ReverseProxy', () => { // We won't receive an error because it will be closed expect(errored).toBe(false); expect(secureConnection).toBe(true); - // Wait for the open connection to timeout - await sleep(3000); - expect(revProxy.connectionCount).toBe(0); await expect(serverConnP).resolves.toBeUndefined(); + // Eventually `ErrorConnectionTimeout` occurs, and these will be gracefully closed + await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect(poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100 + )).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -549,7 +590,7 @@ describe('ReverseProxy', () => { await serverClose(); }); test('connect success', async () => { - const clientKeyPair = await keysUtils.generateKeyPair(4096); + const clientKeyPair = await keysUtils.generateKeyPair(1024); const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( clientKeyPair.publicKey, @@ -562,10 +603,11 @@ describe('ReverseProxy', () => { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0, '127.0.0.1'); const revProxy = new ReverseProxy({ logger: logger, @@ -579,13 +621,13 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const { p: clientReadyP, resolveP: resolveClientReadyP } = promise(); const { p: clientSecureConnectP, resolveP: resolveClientSecureConnectP } = promise(); const { p: clientCloseP, resolveP: resolveClientCloseP } = promise(); - const utpSocket = UTP({ allowHalfOpen: false }); + const utpSocket = UTP({ allowHalfOpen: true }); const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -614,45 +656,59 @@ describe('ReverseProxy', () => { resolveClientSecureConnectP(); }, ); + let tlsSocketEnded = false; tlsSocket.on('end', () => { - logger.info('GOT THE END EVENT'); + tlsSocketEnded = true; if (utpConn.destroyed) { tlsSocket.destroy(); } else { tlsSocket.end(); + tlsSocket.destroy(); } }); tlsSocket.on('close', () => { resolveClientCloseP(); }); await send(networkUtils.pingBuffer); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await clientReadyP; await clientSecureConnectP; await serverConnP; await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); await clientCloseP; + await serverConnEndP; await serverConnClosedP; + expect(tlsSocketEnded).toBe(true); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await revProxy.stop(); await serverClose(); }); - test('stopping the proxy with open connections', async () => { - const revProxy = new ReverseProxy({ - logger: logger, - }); + test.only('stopping the proxy with open connections', async () => { + const clientKeyPair = await keysUtils.generateKeyPair(1024); + const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + const clientCert = keysUtils.generateCertificate( + clientKeyPair.publicKey, + clientKeyPair.privateKey, + clientKeyPair.privateKey, + 86400, + ); + const clientCertPem = keysUtils.certToPem(clientCert); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); - await serverListen(0); + } = tcpServer(); + await serverListen(0, '127.0.0.1'); + const revProxy = new ReverseProxy({ + logger: logger, + }); await revProxy.start({ serverHost: serverHost(), serverPort: serverPort(), @@ -662,12 +718,18 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; - const utpSocket = UTP(); + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); + const { p: clientReadyP, resolveP: resolveClientReadyP } = promise(); + const { p: clientSecureConnectP, resolveP: resolveClientSecureConnectP } = + promise(); + const { p: clientCloseP, resolveP: resolveClientCloseP } = promise(); + const utpSocket = UTP({ allowHalfOpen: true }); + const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { + resolveClientReadyP(); await send(networkUtils.pongBuffer); } }; @@ -676,15 +738,60 @@ describe('ReverseProxy', () => { const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); await utpSocketSend(data, 0, data.byteLength, ingressPort, ingressHost); }; - const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + const utpConn = utpSocket.connect(ingressPort, ingressHost); + const tlsSocket = tls.connect( + { + key: Buffer.from(clientKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(clientCertPem, 'ascii'), + socket: utpConn, + rejectUnauthorized: false, + }, + () => { + resolveClientSecureConnectP(); + }, + ); + let tlsSocketEnded = false; + tlsSocket.on('end', () => { + tlsSocketEnded = true; + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', () => { + resolveClientCloseP(); + }); + await send(networkUtils.pingBuffer); + expect(revProxy.getConnectionCount()).toBe(1); + await clientReadyP; + await clientSecureConnectP; + await serverConnP; + + // await sleep(3000); + + console.log('BEFORE STOP'); + + // Stopping with 1 active connection (not just opened) await revProxy.stop(); - expect(revProxy.connectionCount).toBe(0); - await expect(serverConnP).resolves.toBeUndefined(); + + console.log('AFTER STOP'); + + + expect(revProxy.getConnectionCount()).toBe(0); + await clientCloseP; + + console.log('AAAAAAAAA'); + + await expect(serverConnEndP).resolves.toBeUndefined(); + console.log('BBBBBBBB'); await expect(serverConnClosedP).resolves.toBeUndefined(); + console.log('CCCCCCCCC'); + expect(tlsSocketEnded).toBe(true); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); diff --git a/tests/network/index.test.ts b/tests/network/index.test.ts index 9d94542cb..b4e57005f 100644 --- a/tests/network/index.test.ts +++ b/tests/network/index.test.ts @@ -5,39 +5,53 @@ import { utils as keysUtils } from '@/keys'; import { ForwardProxy, ReverseProxy, utils as networkUtils } from '@/network'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import { openTestServer, closeTestServer, GRPCClientTest } from '../grpc/utils'; +import { sleep } from '@/utils'; describe('network index', () => { const logger = new Logger('Network Test', LogLevel.WARN, [ new StreamHandler(), ]); - test('integration of forward and reverse proxy', async () => { + const authenticate = async (_metaClient, metaServer = new grpc.Metadata()) => + metaServer; + let clientKeyPairPem; + let clientCertPem; + let clientNodeId; + let serverKeyPairPem; + let serverCertPem; + let serverNodeId; + beforeAll(async () => { // Client keys const clientKeyPair = await keysUtils.generateKeyPair(1024); - const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( clientKeyPair.publicKey, clientKeyPair.privateKey, clientKeyPair.privateKey, 12332432423, ); - const clientCertPem = keysUtils.certToPem(clientCert); - const clientNodeId = networkUtils.certNodeId(clientCert); + clientCertPem = keysUtils.certToPem(clientCert); + clientNodeId = networkUtils.certNodeId(clientCert); // Server keys const serverKeyPair = await keysUtils.generateKeyPair(1024); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, serverKeyPair.privateKey, serverKeyPair.privateKey, 12332432423, ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const authenticate = async (metaClient, metaServer = new grpc.Metadata()) => - metaServer; - const [server, serverPort] = await openTestServer(authenticate, logger); - const revProxy = new ReverseProxy({ - logger, + serverCertPem = keysUtils.certToPem(serverCert); + serverNodeId = networkUtils.certNodeId(serverCert); + }); + let server; + let revProxy; + let fwdProxy; + let client; + beforeEach(async () => { + let serverPort; + [server, serverPort] = await openTestServer(authenticate, logger); + revProxy = new ReverseProxy({ + logger: logger.getChild('ReverseProxy integration'), }); await revProxy.start({ serverHost: '127.0.0.1' as Host, @@ -49,9 +63,9 @@ describe('network index', () => { certChainPem: serverCertPem, }, }); - const fwdProxy = new ForwardProxy({ + fwdProxy = new ForwardProxy({ authToken: 'abc', - logger, + logger: logger.getChild('ForwardProxy integration'), }); await fwdProxy.start({ tlsConfig: { @@ -63,10 +77,10 @@ describe('network index', () => { egressHost: '127.0.0.1' as Host, egressPort: 0 as Port, }); - const client = await GRPCClientTest.createGRPCClientTest({ + client = await GRPCClientTest.createGRPCClientTest({ nodeId: serverNodeId, - host: revProxy.ingressHost, - port: revProxy.ingressPort, + host: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), proxyConfig: { host: fwdProxy.getProxyHost(), port: fwdProxy.getProxyPort(), @@ -74,6 +88,16 @@ describe('network index', () => { }, logger, }); + }); + afterEach(async () => { + // All calls here are idempotent + // they will work even when they are already shutdown + await client.destroy(); + await fwdProxy.stop(); + await revProxy.stop(); + await closeTestServer(server); + }); + test('grpc integration with unary and stream calls', async () => { const m = new utilsPB.EchoMessage(); const challenge = 'Hello!'; m.setChallenge(challenge); @@ -106,7 +130,7 @@ describe('network index', () => { } // Ensure that the connection count is the same expect(fwdProxy.getConnectionCount()).toBe(1); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); expect( fwdProxy.getConnectionInfoByIngress(client.host, client.port), ).toEqual( @@ -114,8 +138,8 @@ describe('network index', () => { nodeId: serverNodeId, egressHost: fwdProxy.getEgressHost(), egressPort: fwdProxy.getEgressPort(), - ingressHost: revProxy.ingressHost, - ingressPort: revProxy.ingressPort, + ingressHost: revProxy.getIngressHost(), + ingressPort: revProxy.getIngressPort(), }), ); expect( @@ -128,13 +152,42 @@ describe('network index', () => { nodeId: clientNodeId, egressHost: fwdProxy.getEgressHost(), egressPort: fwdProxy.getEgressPort(), - ingressHost: revProxy.ingressHost, - ingressPort: revProxy.ingressPort, + ingressHost: revProxy.getIngressHost(), + ingressPort: revProxy.getIngressPort(), }), ); + }); + test('client initiates end', async () => { + // Wait for network to settle + await sleep(100); + // GRPC client end simultaneously triggers the server to end the connection + // This is because the GRPC send ending frames at HTTP2-level await client.destroy(); + // Wait for network to settle + await sleep(100); + }); + test('server initiates end', async () => { + // Wait for network to settle + await sleep(100); + // Closing the GRPC server will automatically change the state of the client + // However because the GRPCClient has not integrated state changes of the underlying channel + // Then the GRPCClient won't be in a destroyed state until we explicitly destroy it + await closeTestServer(server); + // Wait for network to settle + await sleep(100); + }); + test('forward initiates end', async () => { + // Wait for network to settle + await sleep(100); await fwdProxy.stop(); + // Wait for network to settle + await sleep(100); + }); + test('reverse initiates end', async () => { + // Wait for network to settle + await sleep(100); await revProxy.stop(); - await closeTestServer(server); + // Wait for network to settle + await sleep(100); }); }); From 22ae1965448e25991e7a8b6ea63d72dceb284980 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Mon, 27 Dec 2021 19:57:39 +1100 Subject: [PATCH 14/28] Integrating new js-async-init with locking start & stop & destroy, blocking ready decorators, and adjustments to partial starts This requires swapping to explicit partial close due to js-async-init locks Network proxies is updated to make use of the new js-async-init to address race conditions --- package-lock.json | 17 ++++++-- package.json | 2 +- src/PolykeyAgent.ts | 19 ++++++++- src/client/service/agentStop.ts | 3 +- src/network/ConnectionForward.ts | 35 +++++++++------ src/network/ConnectionReverse.ts | 68 ++++++++++++++---------------- src/network/ForwardProxy.ts | 4 +- src/network/ReverseProxy.ts | 21 +++------ src/nodes/NodeManager.ts | 2 +- src/vaults/VaultManager.ts | 2 +- test-destroy.ts | 50 ---------------------- test-end-destroy-client.ts | 44 ------------------- test-end-destroy.ts | 44 ------------------- tests/network/ReverseProxy.test.ts | 42 ++++-------------- 14 files changed, 106 insertions(+), 247 deletions(-) delete mode 100644 test-destroy.ts delete mode 100644 test-end-destroy-client.ts delete mode 100644 test-end-destroy.ts diff --git a/package-lock.json b/package-lock.json index 9bb5f7d93..986ca41ec 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1574,11 +1574,22 @@ } }, "@matrixai/async-init": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.2.0.tgz", - "integrity": "sha512-JM8bEvE9v5woWS2FohgWi66CV3cCD/j1cQvNPIBxAiKCoVPlJC/8geROinx3DGO5Wj7jTXkfzI9Ldu0tf8aPbg==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.5.0.tgz", + "integrity": "sha512-a9bUjHufSnYskWKSXT+cgz1Wwi8FhVZddKeMB4x6RG/jhN2dgPJIYb+Ru8PmU97fmE7nHwXtznvPvLa1Rvp98Q==", "requires": { + "async-mutex": "^0.3.2", "ts-custom-error": "^3.2.0" + }, + "dependencies": { + "async-mutex": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.3.2.tgz", + "integrity": "sha512-HuTK7E7MT7jZEh1P9GtRW9+aTWiDWWi9InbZ5hjxrnRa39KS4BW04+xLBhYNS2aXhHUIKZSw3gj4Pn1pj+qGAA==", + "requires": { + "tslib": "^2.3.1" + } + } } }, "@matrixai/db": { diff --git a/package.json b/package.json index e47fd1c84..ed7d51016 100644 --- a/package.json +++ b/package.json @@ -71,7 +71,7 @@ }, "dependencies": { "@grpc/grpc-js": "1.3.7", - "@matrixai/async-init": "^1.2.0", + "@matrixai/async-init": "^1.5.0", "@matrixai/db": "^1.1.2", "@matrixai/id": "^2.1.0", "@matrixai/logger": "^2.1.0", diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index f841f28cc..80311a0a5 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -542,7 +542,24 @@ class PolykeyAgent { this.logger.info(`Started ${this.constructor.name}`); } catch (e) { this.logger.warn(`Failed Starting ${this.constructor.name}`); - await this.stop(); + await this.status?.beginStop({ pid: process.pid }); + await this.sessionManager?.stop(); + await this.notificationsManager?.stop(); + await this.vaultManager?.stop(); + await this.discovery?.destroy(); + await this.nodeManager?.stop(); + await this.revProxy?.stop(); + await this.fwdProxy?.stop(); + await this.grpcServerAgent?.stop(); + await this.grpcServerClient?.stop(); + await this.gestaltGraph?.stop(); + await this.acl?.stop(); + await this.sigchain?.stop(); + await this.identitiesManager?.stop(); + await this.db?.stop(); + await this.keyManager?.stop(); + await this.schema?.stop(); + await this.status?.stop({}); throw e; } } diff --git a/src/client/service/agentStop.ts b/src/client/service/agentStop.ts index ac030aa98..a1a67d69d 100644 --- a/src/client/service/agentStop.ts +++ b/src/client/service/agentStop.ts @@ -1,6 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type PolykeyAgent from '../../PolykeyAgent'; +import { running } from '@matrixai/async-init'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; @@ -16,7 +17,7 @@ function agentStop ({ callback: grpc.sendUnaryData, ): Promise => { const response = new utilsPB.EmptyMessage(); - if (!pkAgent.running) { + if (!pkAgent[running]) { callback(null, response); return; } diff --git a/src/network/ConnectionForward.ts b/src/network/ConnectionForward.ts index b3d92c65a..66d2a6969 100644 --- a/src/network/ConnectionForward.ts +++ b/src/network/ConnectionForward.ts @@ -159,12 +159,13 @@ class ConnectionForward extends Connection { ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { - // Destroy the socket before calling stop - // The stop will try to do a graceful end - // if the socket is not already destroyed - // However at this point the socket is not actually established - this.tlsSocket.destroy(); - await this.stop(); + // Clean up partial start + // TLSSocket isn't established yet, so it is destroyed + if (!this.tlsSocket.destroyed) { + this.tlsSocket.end(); + this.tlsSocket.destroy(); + } + this.utpSocket.off('message', this.handleMessage); throw new networkErrors.ErrorConnectionStart(e.message, { code: e.code, errno: e.errno, @@ -176,17 +177,26 @@ class ConnectionForward extends Connection { this.tlsSocket.on('error', this.handleError); this.tlsSocket.off('error', handleStartError); if (timer?.timedOut) { - // Destroy the socket - // At this point the socket is not actually established - this.tlsSocket.destroy(); - await this.stop(); + // Clean up partial start + // TLSSocket isn't established yet, so it is destroyed + if (!this.tlsSocket.destroyed) { + this.tlsSocket.end(); + this.tlsSocket.destroy(); + } + this.utpSocket.off('message', this.handleMessage); throw new networkErrors.ErrorConnectionStartTimeout(); } const serverCertChain = networkUtils.getCertificateChain(this.tlsSocket); try { networkUtils.verifyServerCertificateChain(this.nodeId, serverCertChain); } catch (e) { - await this.stop(); + // Clean up partial start + this.utpSocket.off('message', this.handleMessage); + // TLSSocket is established, and is ended gracefully + this.logger.debug('Sends tlsSocket ending'); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + await this.endGracefully(this.tlsSocket, this.endTime); throw e; } await this.startKeepAliveInterval(); @@ -196,9 +206,6 @@ class ConnectionForward extends Connection { this.logger.info('Started Connection Forward'); } - /** - * Repeated invocations are noops - */ public async stop(): Promise { this.logger.info('Stopping Connection Forward'); this._composed = false; diff --git a/src/network/ConnectionReverse.ts b/src/network/ConnectionReverse.ts index 60b071a9f..fb138983b 100644 --- a/src/network/ConnectionReverse.ts +++ b/src/network/ConnectionReverse.ts @@ -1,4 +1,5 @@ import type { Socket, AddressInfo } from 'net'; +import type { TLSSocket } from 'tls'; import type UTPConnection from 'utp-native/lib/connection'; import type { Host, Port, Address, NetworkMessage } from './types'; import type { NodeId } from '../nodes/types'; @@ -27,7 +28,7 @@ class ConnectionReverse extends Connection { protected connections: ConnectionsReverse; protected serverSocket: Socket; - protected tlsSocket?: Socket; + protected tlsSocket?: TLSSocket; protected proxyHost: Host; protected proxyPort: Port; protected proxyAddress: Address; @@ -152,12 +153,10 @@ class ConnectionReverse extends Connection { ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { - // Destroy the socket before calling stop - // The stop will try to do a graceful end - // if the socket is not already destroyed - // However at this point the socket is not actually established + // Clean up partial start + // Socket isn't established yet, so it is destroyed this.serverSocket.destroy(); - await this.stop(); + this.utpSocket.off('message', this.handleMessage); throw new networkErrors.ErrorConnectionStart(e.message, { code: e.code, errno: e.errno, @@ -169,7 +168,10 @@ class ConnectionReverse extends Connection { this.serverSocket.on('error', this.handleError); this.serverSocket.off('error', handleStartError); if (timer?.timedOut) { - await this.stop(); + // Clean up partial start + // Socket isn't established yet, so it is destroyed + this.serverSocket.destroy(); + this.utpSocket.off('message', this.handleMessage); throw new networkErrors.ErrorConnectionStartTimeout(); } this.connections.egress.set(this.address, this); @@ -207,10 +209,7 @@ class ConnectionReverse extends Connection { this.logger.info('Stopped Connection Reverse'); } - /** - * Repeated invocations are noops - */ - @ready(new networkErrors.ErrorConnectionNotRunning()) + @ready(new networkErrors.ErrorConnectionNotRunning(), true) public async compose(utpConn: UTPConnection, timer?: Timer): Promise { try { if (this._composed) { @@ -222,6 +221,9 @@ class ConnectionReverse extends Connection { const { p: secureP, resolveP: resolveSecureP } = promise(); // Promise for compose errors const { p: errorP, rejectP: rejectErrorP } = promise(); + const handleComposeError = (e) => { + rejectErrorP(e); + }; const tlsSocket = new tls.TLSSocket(utpConn, { key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), @@ -232,9 +234,6 @@ class ConnectionReverse extends Connection { tlsSocket.once('secure', () => { resolveSecureP(); }); - const handleComposeError = (e) => { - rejectErrorP(e); - }; tlsSocket.once('error', handleComposeError); try { await Promise.race([ @@ -243,7 +242,7 @@ class ConnectionReverse extends Connection { ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { - // Hard close the tls socket + // Clean up partial compose if (!tlsSocket.destroyed) { tlsSocket.end(); tlsSocket.destroy(); @@ -259,15 +258,8 @@ class ConnectionReverse extends Connection { await this.stop(); }); tlsSocket.off('error', handleComposeError); - // TODO:, this location is problematic - // IF stop is called in the middle of composition - // It feels like we need to LOCK the operations - // So that if you're composing, you cannot stop - // And when stopping you cannot compose - // At this point, graceful exit can be done for the tls socket - this.tlsSocket = tlsSocket; if (timer?.timedOut) { - // Hard close the tls socket + // Clean up partial compose if (!tlsSocket.destroyed) { tlsSocket.end(); tlsSocket.destroy(); @@ -278,33 +270,33 @@ class ConnectionReverse extends Connection { try { networkUtils.verifyClientCertificateChain(clientCertChain); } catch (e) { - // Hard close the tls socket + // Clean up partial compose if (!tlsSocket.destroyed) { tlsSocket.end(); tlsSocket.destroy(); } throw e; } - - - tlsSocket.on('end', async () => { + // The TLSSocket is now established + this.tlsSocket = tlsSocket; + this.tlsSocket.on('end', async () => { this.logger.debug('Receives tlsSocket ending'); if (utpConn.destroyed) { - tlsSocket.destroy(); + this.tlsSocket!.destroy(); this.logger.debug('Destroyed tlsSocket'); } else { this.logger.debug('Responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); + this.tlsSocket!.end(); + this.tlsSocket!.destroy(); this.logger.debug('Responded tlsSocket ending'); } await this.stop(); }); - tlsSocket.on('close', async () => { + this.tlsSocket.on('close', async () => { await this.stop(); }); - tlsSocket.pipe(this.serverSocket, { end: false }); - this.serverSocket.pipe(tlsSocket, { end: false }); + this.tlsSocket.pipe(this.serverSocket, { end: false }); + this.serverSocket.pipe(this.tlsSocket, { end: false }); this.clientCertChain = clientCertChain; this.logger.info('Composed Connection Reverse'); } catch (e) { @@ -323,6 +315,7 @@ class ConnectionReverse extends Connection { return this.proxyPort; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getClientCertificates(): Array { if (!this._composed) { throw new networkErrors.ErrorConnectionNotComposed(); @@ -330,6 +323,7 @@ class ConnectionReverse extends Connection { return this.clientCertChain.map((crt) => keysUtils.certCopy(crt)); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getClientNodeIds(): Array { if (!this._composed) { throw new networkErrors.ErrorConnectionNotComposed(); @@ -339,13 +333,15 @@ class ConnectionReverse extends Connection { protected startKeepAliveTimeout() { this.timeout = setTimeout(async () => { - // This is more precisely an error for reverse - // However it may not yet be established const e = new networkErrors.ErrorConnectionTimeout(); + // If the TLSSocket is established, emit the error so the + // tlsSocket error handler handles it + // This is not emitted on serverSocket in order maintain + // symmetry with ConnectionForward behaviour if (this.tlsSocket != null && !this.tlsSocket.destroyed) { this.tlsSocket.emit('error', e); } else { - // The composition has not occurred yet + // Otherwise the composition has not occurred yet // This means we have timed out waiting for a composition this.logger.warn(`Reverse Error: ${e.toString()}`); await this.stop(); diff --git a/src/network/ForwardProxy.ts b/src/network/ForwardProxy.ts index 4d10f8b2b..00a0d5b98 100644 --- a/src/network/ForwardProxy.ts +++ b/src/network/ForwardProxy.ts @@ -218,7 +218,7 @@ class ForwardProxy { * It will only stop the timer if using the default timer * Set timer to `null` explicitly to wait forever */ - @ready(new networkErrors.ErrorForwardProxyNotRunning()) + @ready(new networkErrors.ErrorForwardProxyNotRunning(), true) public async openConnection( nodeId: NodeId, ingressHost: Host, @@ -247,7 +247,7 @@ class ForwardProxy { } } - @ready(new networkErrors.ErrorForwardProxyNotRunning()) + @ready(new networkErrors.ErrorForwardProxyNotRunning(), true) public async closeConnection( ingressHost: Host, ingressPort: Port, diff --git a/src/network/ReverseProxy.ts b/src/network/ReverseProxy.ts index c00c9b05d..5ecdf90c0 100644 --- a/src/network/ReverseProxy.ts +++ b/src/network/ReverseProxy.ts @@ -196,7 +196,7 @@ class ReverseProxy { this.tlsConfig = tlsConfig; } - @ready(new networkErrors.ErrorReverseProxyNotRunning()) + @ready(new networkErrors.ErrorReverseProxyNotRunning(), true) public async openConnection( egressHost: Host, egressPort: Port, @@ -224,7 +224,7 @@ class ReverseProxy { } } - @ready(new networkErrors.ErrorReverseProxyNotRunning()) + @ready(new networkErrors.ErrorReverseProxyNotRunning(), true) public async closeConnection( egressHost: Host, egressPort: Port, @@ -262,13 +262,6 @@ class ReverseProxy { } const release = await lock.acquire(); try { - const handleConnectionError = (e) => { - this.logger.warn( - `Failed connection from ${egressAddress} - ${e.toString()}`, - ); - utpConn.destroy(); - }; - utpConn.on('error', handleConnectionError); this.logger.info(`Handling connection from ${egressAddress}`); const timer = timerStart(this.connConnectTime); try { @@ -283,16 +276,14 @@ class ReverseProxy { throw e; } if (!utpConn.destroyed) { - utpConn.emit('error', e); - } else { - this.logger.warn( - `Failed connection from ${egressAddress} - ${e.toString()}`, - ); + utpConn.destroy(); } + this.logger.warn( + `Failed connection from ${egressAddress} - ${e.toString()}`, + ); } finally { timerStop(timer); } - utpConn.off('error', handleConnectionError); this.logger.info(`Handled connection from ${egressAddress}`); } finally { release(); diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index 55f56f6e8..9c643f046 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -140,7 +140,7 @@ class NodeManager { this.logger.info(`Started ${this.constructor.name}`); } catch (e) { this.logger.warn(`Failed Starting ${this.constructor.name}`); - await this.stop(); + await this.nodeGraph.stop(); throw e; } } diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 9ed2f3869..dcd979393 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -167,7 +167,7 @@ class VaultManager { this.logger.info(`Started ${this.constructor.name}`); } catch (e) { this.logger.warn(`Failed Starting ${this.constructor.name}`); - await this.stop(); + await this.efs.stop(); throw e; } } diff --git a/test-destroy.ts b/test-destroy.ts deleted file mode 100644 index e074e7dab..000000000 --- a/test-destroy.ts +++ /dev/null @@ -1,50 +0,0 @@ -import net from 'net'; -import { utils as keysUtils } from './src/keys'; -import { utils as networkUtils } from './src/network'; - -async function main () { - - const clientKeyPair = await keysUtils.generateKeyPair(1024); - const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); - const clientCert = keysUtils.generateCertificate( - clientKeyPair.publicKey, - clientKeyPair.privateKey, - clientKeyPair.privateKey, - 12332432423, - ); - const clientCertPem = keysUtils.certToPem(clientCert); - const clientNodeId = networkUtils.certNodeId(clientCert); - - let socket; - const p = new Promise((resolve) => { - socket = net.connect( - { - port: 80, - host: '142.250.66.206', - allowHalfOpen: false - }, - () => { - resolve(); - } - ); - socket.on('close', () => { - console.log('CLOSE EVENT EMITTED'); - }); - }); - - const p2 = new Promise((resolve) => { - socket.on('end', () => { - resolve(); - }); - }); - socket.end(); - await p2; - - console.log('allow half open', socket.allowHalfOpen); - console.log('ready state', socket.readyState); - console.log('destroyed', socket.destroyed); - - -} - -main(); diff --git a/test-end-destroy-client.ts b/test-end-destroy-client.ts deleted file mode 100644 index 46ada616e..000000000 --- a/test-end-destroy-client.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { utils as keysUtils } from './src/keys'; -import net from 'net'; -import tls from 'tls'; - -async function main () { - - const clientKeyPair = await keysUtils.generateKeyPair(1024); - const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); - const clientCert = keysUtils.generateCertificate( - clientKeyPair.publicKey, - clientKeyPair.privateKey, - clientKeyPair.privateKey, - 86400, - ); - const clientCertPem = keysUtils.certToPem(clientCert); - - const socket = net.createConnection({ - port: 55555, - host: '127.0.0.1', - allowHalfOpen: true - }, () => { - - const tlsSocket = tls.connect( - { - key: Buffer.from(clientKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(clientCertPem, 'ascii'), - socket: socket, - rejectUnauthorized: false, - }, - () => { - - tlsSocket.on('end', () => { - console.log('RECEIVED END AFTER SENDING end'); - }); - - console.log('SENDING END'); - tlsSocket.end(); - - }, - ); - }); -} - -main(); diff --git a/test-end-destroy.ts b/test-end-destroy.ts deleted file mode 100644 index 2d38217ed..000000000 --- a/test-end-destroy.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { utils as keysUtils } from './src/keys'; -import net from 'net'; -import tls from 'tls'; - -async function main () { - const serverKeyPair = await keysUtils.generateKeyPair(1024); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); - const serverCert = keysUtils.generateCertificate( - serverKeyPair.publicKey, - serverKeyPair.privateKey, - serverKeyPair.privateKey, - 86400, - ); - const serverCertPem = keysUtils.certToPem(serverCert); - const server = net.createServer({ allowHalfOpen: true }, (c) => { - console.log('received connection'); - const tlsSocket = new tls.TLSSocket(c, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.once('secure', () => { - console.log('established secure conn'); - }); - tlsSocket.on('end', async () => { - console.log('received end'); - tlsSocket.end(() => { - console.log('HELLO WORLD'); - }); - console.log('destroying'); - tlsSocket.destroy(); - }); - tlsSocket.on('close', () => { - console.log('destroyed'); - }); - }); - server.listen(55555, () => { - console.log('server bound'); - }); -} - -main(); diff --git a/tests/network/ReverseProxy.test.ts b/tests/network/ReverseProxy.test.ts index 0f62c724f..d3172ac29 100644 --- a/tests/network/ReverseProxy.test.ts +++ b/tests/network/ReverseProxy.test.ts @@ -64,7 +64,7 @@ function tcpServer(end: boolean = false) { } describe(ReverseProxy.name, () => { - const logger = new Logger(`${ReverseProxy.name} test`, LogLevel.DEBUG, [ + const logger = new Logger(`${ReverseProxy.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); let keyPairPem: KeyPairPem @@ -383,11 +383,14 @@ describe(ReverseProxy.name, () => { await revProxy.stop(); await serverClose(); }); - test.only('connect timeout due to hanging client', async () => { + test('connect timeout due to hanging client', async () => { // `connConnectTime` will affect ErrorConnectionComposeTimeout // `connKeepAliveTimeoutTime` will affect ErrorConnectionTimeout which is needed - // because failing to connect to the open connection - // doesn't automatically mean the connection is destroyed + // This should trigger both ErrorConnectionComposeTimeout and ErrorConnectionTimeout + // ErrorConnectionComposeTimeout results in a failed composition + // ErrorConnectionTimeout results in stopping the connection + // Failing to connect to the open connection doesn't + // automatically mean the connection is destroyed const revProxy = new ReverseProxy({ connConnectTime: 3000, connKeepAliveTimeoutTime: 3000, @@ -449,25 +452,10 @@ describe(ReverseProxy.name, () => { }); // The client connection times out await expect(utpConnErrorP).rejects.toThrow(/TIMED OUT/); - - console.log('000000'); - await utpConnClosedP; - - console.log('AAAA'); - await expect(serverConnP).resolves.toBeUndefined(); - - console.log('BBBB'); - await expect(serverConnEndP).resolves.toBeUndefined(); - - console.log('CCCC'); - await expect(serverConnClosedP).resolves.toBeUndefined(); - - console.log('DDDDD'); - // Connection count should reach 0 eventually await expect(poll( async () => { @@ -686,7 +674,7 @@ describe(ReverseProxy.name, () => { await revProxy.stop(); await serverClose(); }); - test.only('stopping the proxy with open connections', async () => { + test('stopping the proxy with open connections', async () => { const clientKeyPair = await keysUtils.generateKeyPair(1024); const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( @@ -771,26 +759,12 @@ describe(ReverseProxy.name, () => { await clientReadyP; await clientSecureConnectP; await serverConnP; - - // await sleep(3000); - - console.log('BEFORE STOP'); - // Stopping with 1 active connection (not just opened) await revProxy.stop(); - - console.log('AFTER STOP'); - - expect(revProxy.getConnectionCount()).toBe(0); await clientCloseP; - - console.log('AAAAAAAAA'); - await expect(serverConnEndP).resolves.toBeUndefined(); - console.log('BBBBBBBB'); await expect(serverConnClosedP).resolves.toBeUndefined(); - console.log('CCCCCCCCC'); expect(tlsSocketEnded).toBe(true); utpSocket.off('message', handleMessage); utpSocket.close(); From e582ec1fbf80f5246e107f4045f442d2c252d528 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Wed, 29 Dec 2021 12:40:48 +1100 Subject: [PATCH 15/28] Make getLatestClaimId protected method as it is used before Sigchain is ready --- src/sigchain/Sigchain.ts | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 0d179082e..04677fd1a 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -423,21 +423,6 @@ class Sigchain { }); } - @ready(new sigchainErrors.ErrorSigchainNotRunning()) - public async getLatestClaimId(): Promise { - return await this._transaction(async () => { - let latestId: ClaimId | undefined; - const keyStream = this.sigchainClaimsDb.createKeyStream({ - limit: 1, - reverse: true, - }); - for await (const o of keyStream) { - latestId = o as any as ClaimId; - } - return latestId; - }); - } - @ready(new sigchainErrors.ErrorSigchainNotRunning()) public async getSeqMap(): Promise> { const map: Record = {}; @@ -462,6 +447,20 @@ class Sigchain { ); }); } + + protected async getLatestClaimId(): Promise { + return await this._transaction(async () => { + let latestId: ClaimId | undefined; + const keyStream = this.sigchainClaimsDb.createKeyStream({ + limit: 1, + reverse: true, + }); + for await (const o of keyStream) { + latestId = o as any as ClaimId; + } + return latestId; + }); + } } export default Sigchain; From a16f27c48c9a59841ef78772b4a7934b2d36efb8 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Wed, 29 Dec 2021 12:52:12 +1100 Subject: [PATCH 16/28] Sigchain.addClaim now returns ClaimId as well --- src/client/service/identitiesClaim.ts | 2 +- src/sigchain/Sigchain.ts | 7 ++++--- tests/bin/identities/identities.test.ts | 2 +- tests/discovery/Discovery.test.ts | 6 +++--- tests/sigchain/Sigchain.test.ts | 12 +++--------- 5 files changed, 12 insertions(+), 17 deletions(-) diff --git a/src/client/service/identitiesClaim.ts b/src/client/service/identitiesClaim.ts index 1304b6774..3b02d5b64 100644 --- a/src/client/service/identitiesClaim.ts +++ b/src/client/service/identitiesClaim.ts @@ -44,7 +44,7 @@ function identitiesClaim({ throw new identitiesErrors.ErrorProviderUnauthenticated(); } // Create identity claim on our node - const claim = await sigchain.addClaim({ + const [, claim] = await sigchain.addClaim({ type: 'identity', node: nodeManager.getNodeId(), provider: providerId, diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 04677fd1a..779e8b0ab 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -220,7 +220,7 @@ class Sigchain { * Appends a claim (of any type) to the sigchain. */ @ready(new sigchainErrors.ErrorSigchainNotRunning()) - public async addClaim(claimData: ClaimData): Promise { + public async addClaim(claimData: ClaimData): Promise<[ClaimId, ClaimEncoded]> { return await this._transaction(async () => { const prevSequenceNumber = await this.getSequenceNumber(); const newSequenceNumber = prevSequenceNumber + 1; @@ -232,11 +232,12 @@ class Sigchain { }); // Add the claim to the sigchain database, and update the sequence number + const claimId = this.generateClaimId(); const ops: Array = [ { type: 'put', domain: this.sigchainClaimsDbDomain, - key: idUtils.toBuffer(this.generateClaimId()), + key: idUtils.toBuffer(claimId), value: claim, }, { @@ -247,7 +248,7 @@ class Sigchain { }, ]; await this.db.batch(ops); - return claim; + return [claimId, claim]; }); } diff --git a/tests/bin/identities/identities.test.ts b/tests/bin/identities/identities.test.ts index d6d47c2e7..0244fe853 100644 --- a/tests/bin/identities/identities.test.ts +++ b/tests/bin/identities/identities.test.ts @@ -702,7 +702,7 @@ describe('CLI Identities', () => { provider: testProvider.id, identity: identityId, }; - const claimEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [,claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 2); diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index c4ea85b48..a679d6add 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -134,7 +134,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityId, }; - const claimEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [,claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = await claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 3); @@ -264,7 +264,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdB, }; - const claimBEncoded = await nodeD.sigchain.addClaim(claimIdentToD); + const [,claimBEncoded] = await nodeD.sigchain.addClaim(claimIdentToD); const claimB = claimsUtils.decodeClaim(claimBEncoded); await testProvider.publishClaim(identityIdB, claimB); @@ -299,7 +299,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdA, }; - const claimAEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [,claimAEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claimA = claimsUtils.decodeClaim(claimAEncoded); await testProvider.publishClaim(identityIdA, claimA); diff --git a/tests/sigchain/Sigchain.test.ts b/tests/sigchain/Sigchain.test.ts index 69f690670..06f63f21b 100644 --- a/tests/sigchain/Sigchain.test.ts +++ b/tests/sigchain/Sigchain.test.ts @@ -85,9 +85,6 @@ describe('Sigchain', () => { await expect(async () => { await sigchain.getSequenceNumber(); }).rejects.toThrow(sigchainErrors.ErrorSigchainNotRunning); - await expect(async () => { - await sigchain.getLatestClaimId(); - }).rejects.toThrow(sigchainErrors.ErrorSigchainNotRunning); }); test('async start initialises the sequence number', async () => { const sigchain = await Sigchain.createSigchain({ keyManager, db, logger }); @@ -102,9 +99,8 @@ describe('Sigchain', () => { node1: srcNodeId, node2: 'NodeId2' as NodeId, }; - await sigchain.addClaim(cryptolink); + const [claimId] = await sigchain.addClaim(cryptolink); - const claimId = await sigchain.getLatestClaimId(); expect(claimId).toBeTruthy(); const claim = await sigchain.getClaim(claimId!); @@ -147,16 +143,14 @@ describe('Sigchain', () => { node1: srcNodeId, node2: 'NodeId2' as NodeId, }; - await sigchain.addClaim(cryptolink); - const claimId1 = await sigchain.getLatestClaimId(); + const [claimId1] = await sigchain.addClaim(cryptolink); const cryptolink2: ClaimData = { type: 'node', node1: srcNodeId, node2: 'NodeId3' as NodeId, }; - await sigchain.addClaim(cryptolink2); - const claimId2 = await sigchain.getLatestClaimId(); + const [claimId2] = await sigchain.addClaim(cryptolink2); const claim1 = await sigchain.getClaim(claimId1!); const claim2 = await sigchain.getClaim(claimId2!); From 4d4a2eb6e6e87fcf174cf6a4f0a1cea441afa196 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Wed, 29 Dec 2021 14:24:31 +1100 Subject: [PATCH 17/28] Integrating WorkerManager into `pk agent start` with `--workers` The `--workers` option does not work in tests atm Also changed seed nodes keyword to `` Fixed tests according to new @matrixai/async-init integration Removed `setupRemoteKeynode` in favour of `PolykeyAgent` Replaced `addRemoteDetails` with `nodesConnect` in `testsNodesUtils` Tests involving status is not robust due to polling and lack of file locking Removed useless dummy mainnet and testnet nodes --- jest.config.js | 3 +- package-lock.json | 20 +- package.json | 7 +- src/bin/agent/CommandStart.ts | 19 +- src/bin/polykey-agent.ts | 14 +- src/bin/types.ts | 1 + src/bin/utils/options.ts | 8 + src/bin/utils/parsers.ts | 12 +- src/client/service/agentStop.ts | 5 +- src/config.ts | 16 +- src/sessions/Session.ts | 2 - src/status/Status.ts | 2 +- src/workers/index.ts | 2 +- tests/bin/agent/start.test.ts | 310 +++++++----------- tests/bin/agent/status.test.ts | 10 +- tests/bin/agent/stop.test.ts | 26 +- tests/bin/identities/identities.test.ts | 42 ++- tests/bin/nodes/claim.test.ts | 20 +- tests/bin/nodes/find.test.ts | 31 +- tests/bin/nodes/ping.test.ts | 28 +- tests/client/rpcNodes.test.ts | 41 ++- tests/client/rpcNotifications.test.ts | 43 ++- tests/client/service/agentStop.test.ts | 15 +- tests/discovery/Discovery.test.ts | 134 +++++--- tests/gestalts/GestaltGraph.test.ts | 9 +- tests/global.d.ts | 1 - tests/globalSetup.ts | 1 - tests/identities/IdentitiesManager.test.ts | 9 +- tests/index.test.ts | 34 +- tests/keys/KeyManager.test.ts | 17 +- tests/nodes/NodeConnection.test.ts | 19 +- tests/nodes/NodeGraph.test.ts | 11 +- tests/nodes/NodeManager.test.ts | 150 ++++++--- tests/nodes/utils.ts | 19 +- .../NotificationsManager.test.ts | 19 +- tests/sessions/Session.test.ts | 6 - tests/sessions/SessionManager.test.ts | 6 +- tests/utils.ts | 79 +---- tests/vaults/VaultManager.test.ts | 33 +- 39 files changed, 709 insertions(+), 515 deletions(-) diff --git a/jest.config.js b/jest.config.js index 9febd9bab..fb0524bfc 100644 --- a/jest.config.js +++ b/jest.config.js @@ -21,11 +21,10 @@ const globals = { projectDir: __dirname, // Absolute directory to the test root testDir: path.join(__dirname, 'tests'), + // Default global data directory dataDir: fs.mkdtempSync( path.join(os.tmpdir(), 'polykey-test-global-'), ), - // Default global password for global agent or global key - password: 'password', // Default asynchronous test timeout defaultTimeout: 20000, polykeyStartupTimeout: 30000, diff --git a/package-lock.json b/package-lock.json index 986ca41ec..bf05be89d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1574,9 +1574,9 @@ } }, "@matrixai/async-init": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.5.0.tgz", - "integrity": "sha512-a9bUjHufSnYskWKSXT+cgz1Wwi8FhVZddKeMB4x6RG/jhN2dgPJIYb+Ru8PmU97fmE7nHwXtznvPvLa1Rvp98Q==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.6.0.tgz", + "integrity": "sha512-I24u6McZnSH2yX1l5e2H3O/Lu8IVb2fM/sVbDeRYrzejV2XLv/9g/goz2fglSrXgJ877BBFJNW2GMxVzvvyA5A==", "requires": { "async-mutex": "^0.3.2", "ts-custom-error": "^3.2.0" @@ -1634,11 +1634,11 @@ "integrity": "sha512-UmLuXi2PJ03v0Scfl57217RPnjEZDRLlpfdIjIwCfju+kofnhhCI9P7OZu3/FgW147vbvSzWCrrtpwJiLROUUA==" }, "@matrixai/workers": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@matrixai/workers/-/workers-1.2.3.tgz", - "integrity": "sha512-IRhUy25BnjjFn1d96Q7ZtGkqaWR8GgK70QHeoVy/WDzC0dfWLzlxPwu0D5t1/CilXYKOOnHgbC3EareSnbSorQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@matrixai/workers/-/workers-1.2.5.tgz", + "integrity": "sha512-ikI4K6RGKQbG68it7TXJJ5wX2csW+WpokUehTnz5r66d7o6FC3PkojE46LPLCDSwk3NVCGoQ743OZS2nuA8SRA==", "requires": { - "@matrixai/logger": "^2.0.1", + "@matrixai/logger": "^2.1.0", "threads": "^1.6.5", "ts-custom-error": "^3.2.0" } @@ -5823,6 +5823,12 @@ "integrity": "sha512-ZZUKRlEBizutngoO4KngzN30YoeAYP3nnwimk4cpi9WqLxQUf6SlAPK5p1D9usEpxDS3Uif2MIez3Bq0vGYR+g==", "dev": true }, + "jest-mock-props": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/jest-mock-props/-/jest-mock-props-1.9.0.tgz", + "integrity": "sha512-8IlIiZRvovnRuvqcvWZyDv4CyhrUGTbEW/1eKurHr2JY4VhIWQIPlbpt9lqL2nxdGnco+OcgpPBwGYCEeDb2+A==", + "dev": true + }, "jest-pnp-resolver": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", diff --git a/package.json b/package.json index ed7d51016..d560c3458 100644 --- a/package.json +++ b/package.json @@ -71,11 +71,11 @@ }, "dependencies": { "@grpc/grpc-js": "1.3.7", - "@matrixai/async-init": "^1.5.0", + "@matrixai/async-init": "^1.6.0", "@matrixai/db": "^1.1.2", "@matrixai/id": "^2.1.0", "@matrixai/logger": "^2.1.0", - "@matrixai/workers": "^1.2.3", + "@matrixai/workers": "^1.2.5", "ajv": "^7.0.4", "async-mutex": "^0.2.4", "bip39": "^3.0.3", @@ -104,7 +104,6 @@ "uuid": "^8.3.0" }, "devDependencies": { - "node-gyp-build": "4.2.3", "@babel/preset-env": "^7.13.10", "@types/cross-spawn": "^6.0.2", "@types/google-protobuf": "^3.7.4", @@ -126,8 +125,10 @@ "grpc_tools_node_protoc_ts": "^5.1.3", "jest": "^26.6.3", "jest-mock-process": "^1.4.1", + "jest-mock-props": "^1.9.0", "mocked-env": "^1.3.5", "nexpect": "^0.6.0", + "node-gyp-build": "4.2.3", "pkg": "5.3.0", "prettier": "^2.2.1", "ts-jest": "^26.4.4", diff --git a/src/bin/agent/CommandStart.ts b/src/bin/agent/CommandStart.ts index df69d403c..eb9f6f0ec 100644 --- a/src/bin/agent/CommandStart.ts +++ b/src/bin/agent/CommandStart.ts @@ -2,6 +2,7 @@ import type { StdioOptions } from 'child_process'; import type { AgentChildProcessInput, AgentChildProcessOutput } from '../types'; import type PolykeyAgent from '../../PolykeyAgent'; import type { RecoveryCode } from '../../keys/types'; +import type { PolykeyWorkerManagerInterface } from '../../workers/types'; import path from 'path'; import child_process from 'child_process'; import process from 'process'; @@ -26,6 +27,7 @@ class CommandStart extends CommandPolykey { this.addOption(binOptions.connTimeoutTime); this.addOption(binOptions.seedNodes); this.addOption(binOptions.network); + this.addOption(binOptions.workers); this.addOption(binOptions.background); this.addOption(binOptions.backgroundOutFile); this.addOption(binOptions.backgroundErrFile); @@ -36,6 +38,7 @@ class CommandStart extends CommandPolykey { options.clientPort = options.clientPort ?? config.defaults.networkConfig.clientPort; const { default: PolykeyAgent } = await import('../../PolykeyAgent'); + const { WorkerManager, utils: workersUtils } = await import('../../workers'); let password: string | undefined; if (options.fresh) { // If fresh, then get a new password @@ -165,6 +168,7 @@ class CommandStart extends CommandPolykey { }); const messageIn: AgentChildProcessInput = { logLevel: this.logger.getEffectiveLevel(), + workers: options.workers, agentConfig, }; agentProcess.send(messageIn, (e) => { @@ -180,15 +184,26 @@ class CommandStart extends CommandPolykey { // Change process name to polykey-agent process.title = 'polykey-agent'; // eslint-disable-next-line prefer-const - let pkAgent: PolykeyAgent | undefined; + let pkAgent: PolykeyAgent; + // eslint-disable-next-line prefer-const + let workerManager: PolykeyWorkerManagerInterface; this.exitHandlers.handlers.push(async () => { - if (pkAgent != null) await pkAgent.stop(); + pkAgent?.unsetWorkerManager(); + await workerManager?.destroy(); + await pkAgent?.stop(); }); pkAgent = await PolykeyAgent.createPolykeyAgent({ fs: this.fs, logger: this.logger.getChild(PolykeyAgent.name), ...agentConfig, }); + if (options.workers !== 0) { + workerManager = await workersUtils.createWorkerManager({ + cores: options.workers, + logger: this.logger.getChild(WorkerManager.name), + }); + pkAgent.setWorkerManager(workerManager); + } recoveryCodeOut = pkAgent.keyManager.getRecoveryCode(); } // Recovery code is only available if it was newly generated diff --git a/src/bin/polykey-agent.ts b/src/bin/polykey-agent.ts index 7e06e3a6f..3689bd201 100644 --- a/src/bin/polykey-agent.ts +++ b/src/bin/polykey-agent.ts @@ -5,6 +5,7 @@ * @module */ import type { AgentChildProcessInput, AgentChildProcessOutput } from './types'; +import type { PolykeyWorkerManagerInterface } from '../workers/types'; import fs from 'fs'; import process from 'process'; /** @@ -21,6 +22,7 @@ process.removeAllListeners('SIGTERM'); import Logger, { StreamHandler } from '@matrixai/logger'; import * as binUtils from './utils'; import PolykeyAgent from '../PolykeyAgent'; +import { WorkerManager, utils as workersUtils } from '../workers'; import ErrorPolykey from '../ErrorPolykey'; import { promisify, promise } from '../utils'; @@ -42,8 +44,11 @@ async function main(_argv = process.argv): Promise { const messageIn = await messageInP; logger.setLevel(messageIn.logLevel); let pkAgent: PolykeyAgent; + let workerManager: PolykeyWorkerManagerInterface; exitHandlers.handlers.push(async () => { - if (pkAgent != null) await pkAgent.stop(); + pkAgent?.unsetWorkerManager(); + await workerManager?.destroy(); + await pkAgent?.stop(); }); try { pkAgent = await PolykeyAgent.createPolykeyAgent({ @@ -51,6 +56,13 @@ async function main(_argv = process.argv): Promise { logger: logger.getChild(PolykeyAgent.name), ...messageIn.agentConfig, }); + if (messageIn.workers !== 0) { + workerManager = await workersUtils.createWorkerManager({ + cores: messageIn.workers, + logger: logger.getChild(WorkerManager.name), + }); + pkAgent.setWorkerManager(workerManager); + } } catch (e) { if (e instanceof ErrorPolykey) { process.stderr.write( diff --git a/src/bin/types.ts b/src/bin/types.ts index 79c281887..a842f9033 100644 --- a/src/bin/types.ts +++ b/src/bin/types.ts @@ -11,6 +11,7 @@ import type { Host, Port } from '../network/types'; */ type AgentChildProcessInput = { logLevel: LogLevel; + workers?: number; agentConfig: { password: string; nodePath?: string; diff --git a/src/bin/utils/options.ts b/src/bin/utils/options.ts index ea95d2d9e..1f4047a07 100644 --- a/src/bin/utils/options.ts +++ b/src/bin/utils/options.ts @@ -141,6 +141,13 @@ const network = new commander.Option( .env('PK_NETWORK') .default(config.defaults.network.mainnet); +const workers = new commander.Option( + '-w --workers ', + 'Number of workers to use, defaults to number of cores with `all`, 0 means no multi-threading' +) + .argParser(binParsers.parseCoreCount) + .default(undefined); + export { nodePath, format, @@ -161,4 +168,5 @@ export { rootKeyPairBits, seedNodes, network, + workers, }; diff --git a/src/bin/utils/parsers.ts b/src/bin/utils/parsers.ts index 3778d8000..83ca45876 100644 --- a/src/bin/utils/parsers.ts +++ b/src/bin/utils/parsers.ts @@ -15,6 +15,13 @@ function parseNumber(v: string): number { return num; } +function parseCoreCount(v: string): number | undefined { + if (v === 'all') { + return undefined; + } + return parseNumber(v); +} + function parseSecretPath( secretPath: string, ): [string, string, string | undefined] { @@ -92,7 +99,7 @@ function getDefaultSeedNodes(network: string): NodeMapping { * Seed nodes expected to be of form 'nodeId1@host:port;nodeId2@host:port;...' * By default, any specified seed nodes (in CLI option, or environment variable) * will overwrite the default nodes in src/config.ts. - * Special flag '' in the content indicates that the default seed + * Special flag `` indicates that the default seed * nodes should be added to the starting seed nodes instead of being overwritten. */ function parseSeedNodes(rawSeedNodes: string): [NodeMapping, boolean] { @@ -105,7 +112,7 @@ function parseSeedNodes(rawSeedNodes: string): [NodeMapping, boolean] { // Empty string will occur if there's an extraneous ';' (e.g. at end of env) if (rawSeedNode === '') continue; // Append the default seed nodes if we encounter the special flag - if (rawSeedNode === '') { + if (rawSeedNode === '') { defaults = true; continue; } @@ -148,6 +155,7 @@ function parseNetwork(network: string): NodeMapping { export { parseNumber, + parseCoreCount, parseSecretPath, parseGestaltId, getDefaultSeedNodes, diff --git a/src/client/service/agentStop.ts b/src/client/service/agentStop.ts index a1a67d69d..2e4fca322 100644 --- a/src/client/service/agentStop.ts +++ b/src/client/service/agentStop.ts @@ -1,7 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type PolykeyAgent from '../../PolykeyAgent'; -import { running } from '@matrixai/async-init'; +import { status, running } from '@matrixai/async-init'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; @@ -17,7 +17,8 @@ function agentStop ({ callback: grpc.sendUnaryData, ): Promise => { const response = new utilsPB.EmptyMessage(); - if (!pkAgent[running]) { + // If not running or in stopping status, then respond successfully + if (!pkAgent[running] || pkAgent[status] === 'stopping') { callback(null, response); return; } diff --git a/src/config.ts b/src/config.ts index fa3bf0a81..14391d6e9 100644 --- a/src/config.ts +++ b/src/config.ts @@ -92,20 +92,10 @@ const config = { connConnectTime: 20000, connTimeoutTime: 20000, }, - // Note: this is not used by the `PolykeyAgent`, that is defaulting to `{}`. + // This is not used by the `PolykeyAgent` with defaults to `{}` network: { - mainnet: { - v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug: { - host: 'testnet.polykey.io', - port: 1314, - }, - }, - testnet: { - v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug: { - host: '127.0.0.3', - port: 1314, - }, - }, + mainnet: { }, + testnet: { }, }, }, }; diff --git a/src/sessions/Session.ts b/src/sessions/Session.ts index c4f53091a..8685158e0 100644 --- a/src/sessions/Session.ts +++ b/src/sessions/Session.ts @@ -94,7 +94,6 @@ class Session { this.logger.info(`Destroyed ${this.constructor.name}`); } - @ready(new sessionErrors.ErrorSessionNotRunning()) public async readToken(): Promise { let sessionTokenFile; try { @@ -126,7 +125,6 @@ class Session { } } - @ready(new sessionErrors.ErrorSessionNotRunning()) public async writeToken(sessionToken: SessionToken): Promise { let sessionTokenFile; try { diff --git a/src/status/Status.ts b/src/status/Status.ts index 96d7aa1a9..7b46a6a48 100644 --- a/src/status/Status.ts +++ b/src/status/Status.ts @@ -164,7 +164,7 @@ class Status { if (statusInfo?.status === status) return true; return false; }, - 250, + 50, timeout, ); if (statusInfo == null) { diff --git a/src/workers/index.ts b/src/workers/index.ts index a6cf25ab7..7ff9ca454 100644 --- a/src/workers/index.ts +++ b/src/workers/index.ts @@ -1,5 +1,5 @@ +export { WorkerManager } from '@matrixai/workers'; export { default as polykeyWorker } from './polykeyWorkerModule'; export * as utils from './utils'; - export type { PolykeyWorkerModule } from './polykeyWorkerModule'; export type { PolykeyWorkerManagerInterface } from './types'; diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index 8af379c8f..e37da7a9f 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -3,12 +3,14 @@ import os from 'os'; import path from 'path'; import fs from 'fs'; import readline from 'readline'; +import * as jestMockProps from 'jest-mock-props'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { PolykeyAgent } from '@'; import { Status, errors as statusErrors } from '@/status'; import config from '@/config'; -import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; +import { sleep } from '@/utils'; describe('start', () => { const logger = new Logger('start test', LogLevel.WARN, [new StreamHandler()]); @@ -36,6 +38,8 @@ describe('start', () => { path.join(dataDir, 'polykey'), '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -90,6 +94,8 @@ describe('start', () => { path.join(dataDir, 'out.log'), '--background-err-file', path.join(dataDir, 'err.log'), + '--workers', + '0', '--verbose', ], { @@ -153,7 +159,7 @@ describe('start', () => { // One of these processes is blocked const [agentProcess1, agentProcess2] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -162,7 +168,7 @@ describe('start', () => { logger.getChild('agentProcess1'), ), testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -227,7 +233,7 @@ describe('start', () => { // One of these processes is blocked const [agentProcess, bootstrapProcess] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -299,7 +305,7 @@ describe('start', () => { async () => { const password = 'abc123'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -319,7 +325,7 @@ describe('start', () => { expect(exitCode1).toBe(null); expect(signal1).toBe('SIGHUP'); const agentProcess2 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -350,7 +356,7 @@ describe('start', () => { async () => { const password = 'password'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -384,6 +390,8 @@ describe('start', () => { 'start', '--root-key-pair-bits', '1024', + '--workers', + '0', '--fresh', '--verbose', ], @@ -437,6 +445,8 @@ describe('start', () => { path.join(dataDir, 'polykey'), '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -466,6 +476,8 @@ describe('start', () => { recoveryCodePath, '--root-key-pair-bits', '2048', + '--workers', + '0', '--verbose', ], { @@ -483,7 +495,7 @@ describe('start', () => { await testBinUtils.processExit(agentProcess2); // Check that the password has changed const agentProcess3 = await testBinUtils.pkSpawn( - ['agent', 'start', '--verbose'], + ['agent', 'start', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password2, @@ -504,7 +516,7 @@ describe('start', () => { recursive: true, }); const agentProcess4 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password2, @@ -542,6 +554,8 @@ describe('start', () => { 'start', '--root-key-pair-bits', '1024', + '--workers', + '0', '--client-host', clientHost, '--client-port', @@ -571,244 +585,160 @@ describe('start', () => { }, global.defaultTimeout * 2, ); - describe('seed nodes', () => { - const connTimeoutTime = 500; - const seedNodeHost = '127.0.0.1'; - const dummySeed1Id = nodesUtils.makeNodeId( - 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', - ); - const dummySeed1Host = '128.0.0.1'; - const dummySeed1Port = 1314; - const dummySeed2Id = nodesUtils.makeNodeId( - 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', - ); - const dummySeed2Host = '128.0.0.1'; - const dummySeed2Port = 1314; - let globalAgentDir; + describe('start with global agent', () => { + let globalAgentStatus; let globalAgentClose; - let seedNodeId; - let seedNodePort; + let agentDataDir; + let agent: PolykeyAgent; + let seedNodeId1; + let seedNodeHost1; + let seedNodePort1; + let seedNodeId2; + let seedNodeHost2; + let seedNodePort2; beforeAll(async () => { ({ - globalAgentDir, + globalAgentStatus, globalAgentClose } = await testUtils.setupGlobalAgent(logger)); - const status = new Status({ - statusPath: path.join(globalAgentDir, config.defaults.statusBase), - fs, + // Additional seed node + agentDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + agent = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(agentDataDir, 'agent'), + keysConfig: { + rootKeyPairBits: 1024 + }, logger, }); - const statusInfo = await status.waitFor('LIVE'); - // Get the dynamic seed node components - seedNodeId = statusInfo.data.nodeId; - seedNodePort = statusInfo.data.ingressPort; - }, global.maxTimeout); + seedNodeId1 = globalAgentStatus.data.nodeId; + seedNodeHost1 = globalAgentStatus.data.ingressHost; + seedNodePort1 = globalAgentStatus.data.ingressPort; + seedNodeId2 = agent.keyManager.getNodeId(); + seedNodeHost2 = agent.grpcServerAgent.host; + seedNodePort2 = agent.grpcServerAgent.port; + }, globalThis.maxTimeout); afterAll(async () => { + await agent.stop(); await globalAgentClose(); + await fs.promises.rm(agentDataDir, { + force: true, + recursive: true, + }); }); - test( - 'start with seed nodes as argument', + 'start with seed nodes option', async () => { const password = 'abc123'; - const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, password); const nodePath = path.join(dataDir, 'polykey'); - + const statusPath = path.join(nodePath, 'status.json'); + const status = new Status({ + statusPath, + fs, + logger, + }); + const mockedConfigDefaultsNetwork = jestMockProps.spyOnProp( + config.defaults, + 'network' + ).mockValue({ + mainnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2 + } + }, + testnet: {} + }); await testBinUtils.pkStdio( [ 'agent', 'start', - '--node-path', - nodePath, - '--password-file', - passwordPath, '--root-key-pair-bits', '1024', + '--workers', + '0', '--seed-nodes', - `${seedNodeId}@${seedNodeHost}:${seedNodePort};${dummySeed1Id}@${dummySeed1Host}:${dummySeed1Port}`, - '--connection-timeout', - connTimeoutTime.toString(), + `${seedNodeId1}@${seedNodeHost1}:${seedNodePort1};`, + '--network', + 'mainnet', '--verbose', ], { - PK_SEED_NODES: `${dummySeed2Id}@${dummySeed2Host}:${dummySeed2Port}`, + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, }, dataDir, ); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - - // Check the seed nodes have been added to the node graph - const foundSeedNode = await testBinUtils.pkStdio([ - 'nodes', - 'find', - seedNodeId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundSeedNode.exitCode).toBe(0); - expect(foundSeedNode.stdout).toContain( - `Found node at ${seedNodeHost}:${seedNodePort}`, - ); - const foundDummy1 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed1Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundDummy1.exitCode).toBe(0); - expect(foundDummy1.stdout).toContain( - `Found node at ${dummySeed1Host}:${dummySeed1Port}`, - ); - // Check the seed node in the environment variable was superseded by the - // ones provided as CLI arguments - const notFoundDummy2 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed2Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(notFoundDummy2.exitCode).toBe(1); - expect(notFoundDummy2.stdout).toContain( - `Failed to find node ${dummySeed2Id}`, - ); await testBinUtils.pkStdio( [ 'agent', 'stop', - '--node-path', - nodePath, - '--password-file', - passwordPath, ], - undefined, + { + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + }, dataDir, ); - await status.waitFor('DEAD', 5000); + mockedConfigDefaultsNetwork.mockRestore(); + await status.waitFor('DEAD'); }, global.defaultTimeout * 2, ); - - test( - 'start with seed nodes from environment variable and config file', - async () => { + test('start with seed nodes environment variable', async () => { const password = 'abc123'; - const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, password); const nodePath = path.join(dataDir, 'polykey'); - + const statusPath = path.join(nodePath, 'status.json'); + const status = new Status({ + statusPath, + fs, + logger, + }); + const mockedConfigDefaultsNetwork = jestMockProps.spyOnProp( + config.defaults, + 'network' + ).mockValue({ + mainnet: { }, + testnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2 + } + } + }); await testBinUtils.pkStdio( [ 'agent', 'start', - '--node-path', - nodePath, - '--password-file', - passwordPath, '--root-key-pair-bits', '1024', - '--connection-timeout', - connTimeoutTime.toString(), + '--workers', + '0', '--verbose', ], { - PK_SEED_NODES: - `${seedNodeId}@${seedNodeHost}:${seedNodePort};` + - `${dummySeed1Id}@${dummySeed1Host}:${dummySeed1Port};` + - ``, + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + PK_SEED_NODES: `;${seedNodeId1}@${seedNodeHost1}:${seedNodePort1}`, + PK_NETWORK: 'testnet' }, dataDir, ); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - - // Check the seed nodes have been added to the node graph - const foundSeedNode = await testBinUtils.pkStdio([ - 'nodes', - 'find', - seedNodeId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundSeedNode.exitCode).toBe(0); - expect(foundSeedNode.stdout).toContain( - `Found node at ${seedNodeHost}:${seedNodePort}`, - ); - const foundDummy1 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed1Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundDummy1.exitCode).toBe(0); - expect(foundDummy1.stdout).toContain( - `Found node at ${dummySeed1Host}:${dummySeed1Port}`, - ); - // Check the seed node/s in config file were added from the flag - for (const configId in config.defaults.network.mainnet) { - const address = config.defaults.network.mainnet[configId]; - expect(address.host).toBeDefined(); - expect(address.port).toBeDefined(); - const foundConfig = await testBinUtils.pkStdio([ - 'nodes', - 'find', - configId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundConfig.exitCode).toBe(0); - expect(foundConfig.stdout).toContain( - `Found node at ${address.host}:${address.port}`, - ); - } - await testBinUtils.pkStdio( [ 'agent', 'stop', - '--node-path', - nodePath, - '--password-file', - passwordPath, ], - undefined, + { + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + }, dataDir, ); - await status.waitFor('DEAD', 5000); - }, - global.defaultTimeout * 2, - ); + mockedConfigDefaultsNetwork.mockRestore(); + await status.waitFor('DEAD'); + }, global.defaultTimeout * 2); }); }); diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index 8cf706ed5..af37810c7 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -35,7 +35,7 @@ describe('status', () => { logger, }); const agentProcess = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -60,8 +60,10 @@ describe('status', () => { pid: agentProcess.pid, }); await status.waitFor('LIVE'); + const agentProcessExit = testBinUtils.processExit(agentProcess); agentProcess.kill('SIGTERM'); - await status.waitFor('STOPPING'); + // Cannot wait for STOPPING because waitFor polling may miss the transition + await status.waitFor('DEAD'); ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { @@ -72,11 +74,11 @@ describe('status', () => { )); expect(exitCode).toBe(0); // If the command was slow, it may have become DEAD already + // If it is DEAD, then pid property will be `undefined` expect(JSON.parse(stdout)).toMatchObject({ status: expect.stringMatching(/STOPPING|DEAD/), - pid: agentProcess.pid, }); - await testBinUtils.processExit(agentProcess); + await agentProcessExit; ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { diff --git a/tests/bin/agent/stop.test.ts b/tests/bin/agent/stop.test.ts index 889b64590..4ae96b641 100644 --- a/tests/bin/agent/stop.test.ts +++ b/tests/bin/agent/stop.test.ts @@ -34,6 +34,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), @@ -77,6 +79,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), @@ -85,6 +89,7 @@ describe('stop', () => { dataDir, ); expect(exitCode).toBe(0); + await status.waitFor('LIVE'); // Simultaneous calls to stop must use pkExec const [agentStop1, agentStop2] = await Promise.all([ testBinUtils.pkExec( @@ -102,7 +107,10 @@ describe('stop', () => { dataDir, ), ]); - await status.waitFor('STOPPING'); + // Cannot await for STOPPING + // It's not reliable until file watching is implemented + // So just 1 ms delay until sending another stop command + await sleep(1); const agentStop3 = await testBinUtils.pkStdio( ['agent', 'stop', '--node-path', path.join(dataDir, 'polykey')], { @@ -118,8 +126,16 @@ describe('stop', () => { }, dataDir, ); - expect(agentStop1.exitCode).toBe(0); - expect(agentStop2.exitCode).toBe(0); + // If the GRPC server gets closed after the GRPC connection is established + // then it's possible that one of these exit codes is 1 + if (agentStop1.exitCode === 1) { + expect(agentStop2.exitCode).toBe(0); + } else if (agentStop2.exitCode === 1) { + expect(agentStop1.exitCode).toBe(0); + } else { + expect(agentStop1.exitCode).toBe(0); + expect(agentStop2.exitCode).toBe(0); + } expect(agentStop3.exitCode).toBe(0); expect(agentStop4.exitCode).toBe(0); }, @@ -141,6 +157,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -187,6 +205,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), diff --git a/tests/bin/identities/identities.test.ts b/tests/bin/identities/identities.test.ts index 0244fe853..11947b918 100644 --- a/tests/bin/identities/identities.test.ts +++ b/tests/bin/identities/identities.test.ts @@ -14,7 +14,7 @@ import { makeNodeId } from '@/nodes/utils'; import * as claimsUtils from '@/claims/utils'; import * as identitiesUtils from '@/identities/utils'; import * as testBinUtils from '../utils'; -import * as testUtils from '../../utils'; +import * as testNodesUtils from '../../nodes/utils'; import TestProvider from '../../identities/TestProvider'; jest.mock('@/keys/utils', () => ({ @@ -656,25 +656,42 @@ describe('CLI Identities', () => { }); }); describe('commandDiscoverGestalts', () => { + let rootDataDir; // Test variables let nodeB: PolykeyAgent; let nodeC: PolykeyAgent; // Let testProvider: TestProvider; let identityId: IdentityId; - beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); // Setup the remote gestalt state here // Setting up remote nodes. - nodeB = await testUtils.setupRemoteKeynode({ logger }); - nodeC = await testUtils.setupRemoteKeynode({ logger }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); // Forming links // B->C // Adding connection details. - await testUtils.addRemoteDetails(polykeyAgent, nodeB); - await testUtils.addRemoteDetails(nodeB, polykeyAgent); - await testUtils.addRemoteDetails(nodeB, nodeC); - await testUtils.addRemoteDetails(nodeC, nodeB); + await testNodesUtils.nodesConnect(polykeyAgent, nodeB); + await testNodesUtils.nodesConnect(nodeB, polykeyAgent); + await testNodesUtils.nodesConnect(nodeB, nodeC); + await testNodesUtils.nodesConnect(nodeC, nodeB); // Adding sigchain details. const claimBtoC: ClaimLinkNode = { type: 'node', @@ -707,12 +724,15 @@ describe('CLI Identities', () => { await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - // Clean up the remote gestalt state here. - await testUtils.cleanupRemoteKeynode(nodeB); - await testUtils.cleanupRemoteKeynode(nodeC); + await nodeC.stop(); + await nodeB.stop(); // Unclaim identity testProvider.links = {}; testProvider.linkIdCounter = 0; + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { // Clean the local nodes gestalt graph here. diff --git a/tests/bin/nodes/claim.test.ts b/tests/bin/nodes/claim.test.ts index a0dd54331..e2f40d815 100644 --- a/tests/bin/nodes/claim.test.ts +++ b/tests/bin/nodes/claim.test.ts @@ -6,6 +6,7 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ @@ -19,6 +20,7 @@ describe('claim', () => { const logger = new Logger('claim test', LogLevel.WARN, [ new StreamHandler(), ]); + let rootDataDir: string; let dataDir: string; let nodePath: string; let passwordFile: string; @@ -36,6 +38,9 @@ describe('claim', () => { } beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -49,13 +54,18 @@ describe('claim', () => { }); keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode - remoteOnline = await testUtils.setupRemoteKeynode({ + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); await remoteOnline.nodeManager.setNode(keynodeId, { host: polykeyAgent.revProxy.getIngressHost(), @@ -91,7 +101,11 @@ describe('claim', () => { afterAll(async () => { await polykeyAgent.stop(); await polykeyAgent.destroy(); - await testUtils.cleanupRemoteKeynode(remoteOnline); + await remoteOnline.stop(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); await fs.promises.rm(dataDir, { force: true, recursive: true, diff --git a/tests/bin/nodes/find.test.ts b/tests/bin/nodes/find.test.ts index 2c346576c..0a594caba 100644 --- a/tests/bin/nodes/find.test.ts +++ b/tests/bin/nodes/find.test.ts @@ -7,6 +7,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ @@ -20,6 +21,7 @@ describe('find', () => { const logger = new Logger('find test', LogLevel.WARN, [ new StreamHandler(), ]); + let rootDataDir: string; let dataDir: string; let nodePath: string; let passwordFile: string; @@ -42,6 +44,9 @@ describe('find', () => { } beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -56,22 +61,32 @@ describe('find', () => { keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode - remoteOnline = await testUtils.setupRemoteKeynode({ + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); // Setting up an offline remote keynode - remoteOffline = await testUtils.setupRemoteKeynode({ + remoteOffline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOffline'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger, }); remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOffline); await remoteOffline.stop(); // Authorize session @@ -84,12 +99,16 @@ describe('find', () => { afterAll(async () => { await polykeyAgent.stop(); await polykeyAgent.destroy(); - await testUtils.cleanupRemoteKeynode(remoteOnline); - await testUtils.cleanupRemoteKeynode(remoteOffline); + await remoteOnline.stop(); + await remoteOffline.stop(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); test('find an online node', async () => { diff --git a/tests/bin/nodes/ping.test.ts b/tests/bin/nodes/ping.test.ts index 1c094a1d2..08b00fc91 100644 --- a/tests/bin/nodes/ping.test.ts +++ b/tests/bin/nodes/ping.test.ts @@ -7,6 +7,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ @@ -20,6 +21,7 @@ describe('ping', () => { const logger = new Logger('ping test', LogLevel.WARN, [ new StreamHandler(), ]); + let rootDataDir: string; let dataDir: string; let nodePath: string; let passwordFile: string; @@ -56,22 +58,32 @@ describe('ping', () => { keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode - remoteOnline = await testUtils.setupRemoteKeynode({ + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOnline); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); // Setting up an offline remote keynode - remoteOffline = await testUtils.setupRemoteKeynode({ + remoteOffline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOffline'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger, }); remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); - await testUtils.addRemoteDetails(polykeyAgent, remoteOffline); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOffline); await remoteOffline.stop(); // Authorize session @@ -84,12 +96,16 @@ describe('ping', () => { afterAll(async () => { await polykeyAgent.stop(); await polykeyAgent.destroy(); - await testUtils.cleanupRemoteKeynode(remoteOnline); - await testUtils.cleanupRemoteKeynode(remoteOffline); + await remoteOnline.stop(); + await remoteOffline.stop(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); test( 'fail when pinging an offline node', diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index c0f9e33ac..28dea0a44 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -20,7 +20,7 @@ import { makeNodeId } from '@/nodes/utils'; import config from '@/config'; import { Status } from '@/status'; import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; +import * as testNodesUtils from '../nodes/utils'; import { sleep } from '@/utils'; jest.mock('@/keys/utils', () => ({ @@ -38,6 +38,7 @@ describe('Client service', () => { let server: grpc.Server; let port: number; + let rootDataDir: string; let dataDir: string; let pkAgent: PolykeyAgent; @@ -58,6 +59,10 @@ describe('Client service', () => { ); beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -97,9 +102,15 @@ describe('Client service', () => { client = await testUtils.openSimpleClientClient(port); - polykeyServer = await testKeynodeUtils.setupRemoteKeynode({ - logger: logger, + polykeyServer = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'polykeyServer'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, }); + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: { notify: null, @@ -114,7 +125,7 @@ describe('Client service', () => { }); }, global.polykeyStartupTimeout); afterAll(async () => { - await testKeynodeUtils.cleanupRemoteKeynode(polykeyServer); + await polykeyServer.stop(); await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); @@ -122,6 +133,10 @@ describe('Client service', () => { await pkAgent.stop(); await pkAgent.destroy(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); await fs.promises.rm(dataDir, { force: true, recursive: true, @@ -169,7 +184,7 @@ describe('Client service', () => { 'should ping a node (online + offline)', async () => { const serverNodeId = polykeyServer.nodeManager.getNodeId(); - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); await polykeyServer.stop(); const statusPath = path.join(polykeyServer.nodePath, config.defaults.statusBase); const status = new Status({ @@ -193,7 +208,7 @@ describe('Client service', () => { await polykeyServer.start({ password: 'password' }); await status.waitFor('LIVE', 10000); // Update the details (changed because we started again) - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); const res2 = await nodesPing(nodeMessage, callCredentials); expect(res2.getSuccess()).toEqual(true); // Case 3: pre-existing connection no longer active, so offline @@ -229,7 +244,7 @@ describe('Client service', () => { test( 'should find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Case 2: node can be found on the remote node const nodeId = nodeId1; const nodeAddress: NodeAddress = { @@ -254,7 +269,7 @@ describe('Client service', () => { test( 'should fail to find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Case 3: node exhausts all contacts and cannot find node const nodeId = nodeId1; // Add a single dummy node to the server node graph database @@ -278,7 +293,7 @@ describe('Client service', () => { global.failedConnectionTimeout * 2, ); test('should send a gestalt invite (no existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Node Claim Case 1: No invitations have been received const nodesClaim = grpcUtils.promisifyUnaryCall( client, @@ -292,8 +307,8 @@ describe('Client service', () => { expect(res.getSuccess()).not.toBeTruthy(); }); test('should send a gestalt invite (existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, pkAgent); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(polykeyServer, pkAgent); // Node Claim Case 2: Already received an invite; force invite await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), @@ -325,8 +340,8 @@ describe('Client service', () => { }, vaults: {}, }); - await testKeynodeUtils.addRemoteDetails(pkAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, pkAgent); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(polykeyServer, pkAgent); // Node Claim Case 3: Already received an invite; claim node await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), diff --git a/tests/client/rpcNotifications.test.ts b/tests/client/rpcNotifications.test.ts index 9fba16876..039f84fe3 100644 --- a/tests/client/rpcNotifications.test.ts +++ b/tests/client/rpcNotifications.test.ts @@ -16,7 +16,7 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as vaultsUtils from '@/vaults/utils'; import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; +import * as testNodesUtils from '../nodes/utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -101,12 +101,33 @@ describe('Notifications client service', () => { }); describe('Notifications RPC', () => { + let receiverDataDir: string; + let senderDataDir: string; let receiver: PolykeyAgent; let sender: PolykeyAgent; beforeAll(async () => { - receiver = await testKeynodeUtils.setupRemoteKeynode({ logger }); - sender = await testKeynodeUtils.setupRemoteKeynode({ logger }); - + receiverDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + senderDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + receiver = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: receiverDataDir, + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + sender = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: senderDataDir, + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); await sender.nodeManager.setNode(node1.id, { host: pkAgent.revProxy.getIngressHost(), port: pkAgent.revProxy.getIngressPort(), @@ -125,8 +146,16 @@ describe('Notifications client service', () => { }); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - await testKeynodeUtils.cleanupRemoteKeynode(receiver); - await testKeynodeUtils.cleanupRemoteKeynode(sender); + await sender.stop(); + await receiver.stop(); + await fs.promises.rm(senderDataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(receiverDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { await receiver.notificationsManager.clearNotifications(); @@ -135,7 +164,7 @@ describe('Notifications client service', () => { }); test('should send notifications.', async () => { // Set up a remote node receiver and add its details to agent - await testKeynodeUtils.addRemoteDetails(pkAgent, receiver); + await testNodesUtils.nodesConnect(pkAgent, receiver); const notificationsSend = grpcUtils.promisifyUnaryCall( diff --git a/tests/client/service/agentStop.test.ts b/tests/client/service/agentStop.test.ts index d3f4dd9dc..ef0931e21 100644 --- a/tests/client/service/agentStop.test.ts +++ b/tests/client/service/agentStop.test.ts @@ -4,6 +4,7 @@ import fs from 'fs'; import path from 'path'; import os from 'os'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { running } from '@matrixai/async-init'; import { PolykeyAgent } from '@'; import { utils as keysUtils } from '@/keys'; import { GRPCServer } from '@/grpc'; @@ -100,13 +101,13 @@ describe('agentStop', () => { expect(response).toBeInstanceOf(utilsPB.EmptyMessage); // While the `agentStop` is asynchronous // There is a synchronous switch to `running` - expect(pkAgent.running).toBe(false); + expect(pkAgent[running]).toBe(false); // It may already be stopping expect(await status.readStatus()).toMatchObject({ status: expect.stringMatching(/LIVE|STOPPING|DEAD/) }); await status.waitFor('DEAD'); - expect(pkAgent.running).toBe(false); + expect(pkAgent[running]).toBe(false); }); test('stops the agent with token', async () => { const token = await pkAgent.sessionManager.createToken(); @@ -124,13 +125,13 @@ describe('agentStop', () => { expect(response).toBeInstanceOf(utilsPB.EmptyMessage); // While the `agentStop` is asynchronous // There is a synchronous switch to `running` - expect(pkAgent.running).toBe(false); + expect(pkAgent[running]).toBe(false); // It may already be stopping expect(await status.readStatus()).toMatchObject({ status: expect.stringMatching(/LIVE|STOPPING|DEAD/) }); await status.waitFor('DEAD'); - expect(pkAgent.running).toBe(false); + expect(pkAgent[running]).toBe(false); }); test('cannot stop the agent if not authenticated', async () => { const statusPath = path.join(nodePath, config.defaults.statusBase); @@ -145,21 +146,21 @@ describe('agentStop', () => { request, ); }).rejects.toThrow(clientErrors.ErrorClientAuthMissing); - expect(pkAgent.running).toBe(true); + expect(pkAgent[running]).toBe(true); await expect(async () => { await grpcClient.agentStop( request, clientUtils.encodeAuthFromPassword('wrong password') ); }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); - expect(pkAgent.running).toBe(true); + expect(pkAgent[running]).toBe(true); await expect(async () => { await grpcClient.agentStop( request, clientUtils.encodeAuthFromSession('wrong token' as SessionToken) ); }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); - expect(pkAgent.running).toBe(true); + expect(pkAgent[running]).toBe(true); expect(await status.readStatus()).toMatchObject({ status: 'LIVE' }); diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index a679d6add..7064e56e8 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -4,16 +4,12 @@ import fs from 'fs'; import path from 'path'; import os from 'os'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { Discovery } from '@/discovery'; -import PolykeyAgent from '@/PolykeyAgent'; -import * as discoveryErrors from '@/discovery/errors'; -import * as claimsUtils from '@/claims/utils'; +import { destroyed } from '@matrixai/async-init'; +import { PolykeyAgent } from '@'; +import { utils as claimsUtils } from '@/claims'; +import { Discovery, errors as discoveryErrors } from '@/discovery'; +import * as testNodesUtils from '../nodes/utils'; import TestProvider from '../identities/TestProvider'; -import { - addRemoteDetails, - cleanupRemoteKeynode, - setupRemoteKeynode, -} from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -74,16 +70,17 @@ describe('Discovery', () => { 'Starts and stops', async () => { // Not started. - expect(discovery.destroyed).toBeFalsy(); + expect(discovery[destroyed]).toBeFalsy(); // Starting. await discovery.destroy(); - expect(discovery.destroyed).toBeTruthy(); + expect(discovery[destroyed]).toBeTruthy(); }, global.polykeyStartupTimeout, ); }); describe('Discovery process', () => { + let rootDataDir; // Nodes should form the chain A->B->C let nodeA: PolykeyAgent; let nodeB: PolykeyAgent; @@ -92,18 +89,41 @@ describe('Discovery', () => { let identityId: IdentityId; beforeAll(async () => { - // Setting up remote nodes. - nodeA = await setupRemoteKeynode({ logger }); - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); - + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + // Setting up remote nodes + nodeA = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeA'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); // Forming links // A->B->C // Adding connection details. - await addRemoteDetails(nodeA, nodeB); - await addRemoteDetails(nodeB, nodeA); - await addRemoteDetails(nodeB, nodeC); - await addRemoteDetails(nodeC, nodeB); + await testNodesUtils.nodesConnect(nodeA, nodeB); + await testNodesUtils.nodesConnect(nodeB, nodeA); + await testNodesUtils.nodesConnect(nodeB, nodeC); + await testNodesUtils.nodesConnect(nodeC, nodeB); // Adding sigchain details. const claimBtoC: ClaimLinkNode = { type: 'node', @@ -139,11 +159,15 @@ describe('Discovery', () => { await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 3); afterAll(async () => { - await cleanupRemoteKeynode(nodeA); - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); + await nodeC.stop(); + await nodeB.stop(); + await nodeA.stop(); testProvider.links = {}; testProvider.linkIdCounter = 0; + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); beforeEach(async () => { await nodeA.gestaltGraph.clearDB(); @@ -196,6 +220,7 @@ describe('Discovery', () => { }); }); describe('End-to-end discovery between two gestalts', () => { + let rootDataDir; // Gestalt 1 let nodeA: PolykeyAgent; let nodeB: PolykeyAgent; @@ -207,19 +232,50 @@ describe('Discovery', () => { let testProvider: TestProvider; beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); // Setting up remote nodes. - nodeA = await setupRemoteKeynode({ logger }); - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); - nodeD = await setupRemoteKeynode({ logger }); + nodeA = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeA'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); + nodeD = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeD'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); // Adding connection details - await addRemoteDetails(nodeA, nodeB); - await addRemoteDetails(nodeA, nodeD); - await addRemoteDetails(nodeB, nodeA); - await addRemoteDetails(nodeC, nodeB); - await addRemoteDetails(nodeC, nodeD); - await addRemoteDetails(nodeD, nodeC); + await testNodesUtils.nodesConnect(nodeA, nodeB); + await testNodesUtils.nodesConnect(nodeA, nodeD); + await testNodesUtils.nodesConnect(nodeB, nodeA); + await testNodesUtils.nodesConnect(nodeC, nodeB); + await testNodesUtils.nodesConnect(nodeC, nodeD); + await testNodesUtils.nodesConnect(nodeD, nodeC); // Setting up identity provider testProvider = new TestProvider(); @@ -238,10 +294,14 @@ describe('Discovery', () => { identityIdB = (await gen2.next()).value as IdentityId; }, global.polykeyStartupTimeout * 4); afterAll(async () => { - await cleanupRemoteKeynode(nodeA); - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); - await cleanupRemoteKeynode(nodeD); + await nodeD.stop(); + await nodeC.stop(); + await nodeB.stop(); + await nodeA.stop(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { await nodeA.gestaltGraph.clearDB(); diff --git a/tests/gestalts/GestaltGraph.test.ts b/tests/gestalts/GestaltGraph.test.ts index 74739e2e9..f14e6f6aa 100644 --- a/tests/gestalts/GestaltGraph.test.ts +++ b/tests/gestalts/GestaltGraph.test.ts @@ -22,7 +22,6 @@ import { } from '@/gestalts'; import { ACL } from '@/acl'; import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; describe('GestaltGraph', () => { const logger = new Logger('GestaltGraph Test', LogLevel.WARN, [ @@ -49,7 +48,13 @@ describe('GestaltGraph', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(await keysUtils.generateKey()), + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); acl = await ACL.createACL({ db, logger }); diff --git a/tests/global.d.ts b/tests/global.d.ts index 228cfb6fc..174edcc78 100644 --- a/tests/global.d.ts +++ b/tests/global.d.ts @@ -5,7 +5,6 @@ declare var projectDir: string; declare var testDir: string; declare var dataDir: string; -declare var password: string; declare var defaultTimeout: number; declare var polykeyStartupTimeout: number; declare var failedConnectionTimeout: number; diff --git a/tests/globalSetup.ts b/tests/globalSetup.ts index cfea609d8..fde412205 100644 --- a/tests/globalSetup.ts +++ b/tests/globalSetup.ts @@ -1,5 +1,4 @@ /* eslint-disable no-console */ -import fs from 'fs'; import process from 'process'; /** diff --git a/tests/identities/IdentitiesManager.test.ts b/tests/identities/IdentitiesManager.test.ts index cbc2aeb04..61c6ec73a 100644 --- a/tests/identities/IdentitiesManager.test.ts +++ b/tests/identities/IdentitiesManager.test.ts @@ -17,7 +17,6 @@ import { IdentitiesManager, providers } from '@/identities'; import * as identitiesErrors from '@/identities/errors'; import * as keysUtils from '@/keys/utils'; import TestProvider from './TestProvider'; -import { makeCrypto } from '../utils'; describe('IdentitiesManager', () => { const logger = new Logger('IdentitiesManager Test', LogLevel.WARN, [ @@ -33,7 +32,13 @@ describe('IdentitiesManager', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(await keysUtils.generateKey()), + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); }); afterEach(async () => { diff --git a/tests/index.test.ts b/tests/index.test.ts index 952f089a8..b8be30718 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -1,33 +1,9 @@ -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { PolykeyAgent } from '@'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as polykey from '@'; describe('index', () => { - const logger = new Logger('index test', LogLevel.WARN, [new StreamHandler()]); - let dataDir; - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - }); - test('construction of Polykey', async () => { - const password = 'password'; - const pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir, - logger, - }); - expect(pk).toBeInstanceOf(PolykeyAgent); - await pk.stop(); - await pk.destroy(); + test('exports PolykeyAgent, PolykeyClient and errors', async () => { + expect('PolykeyAgent' in polykey).toBe(true); + expect('PolykeyClient' in polykey).toBe(true); + expect('errors' in polykey).toBe(true); }); }); diff --git a/tests/keys/KeyManager.test.ts b/tests/keys/KeyManager.test.ts index 248682135..7dafd65c4 100644 --- a/tests/keys/KeyManager.test.ts +++ b/tests/keys/KeyManager.test.ts @@ -11,7 +11,6 @@ import * as keysErrors from '@/keys/errors'; import * as workersUtils from '@/workers/utils'; import * as keysUtils from '@/keys/utils'; import { sleep } from '@/utils'; -import { makeCrypto } from '../utils'; describe('KeyManager', () => { const password = 'password'; @@ -305,7 +304,13 @@ describe('KeyManager', () => { const db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); const rootKeyPair1 = keyManager.getRootKeyPair(); const rootCert1 = keyManager.getRootCert(); @@ -348,7 +353,13 @@ describe('KeyManager', () => { const db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); const rootKeyPair1 = keyManager.getRootKeyPair(); const rootCert1 = keyManager.getRootCert(); diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 3407119a0..02cca39c8 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -9,7 +9,7 @@ import { DB } from '@matrixai/db'; import { ForwardProxy, ReverseProxy } from '@/network'; import { NodeConnection, NodeManager } from '@/nodes'; import { VaultManager } from '@/vaults'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import GRPCServer from '@/grpc/GRPCServer'; import { AgentServiceService, createAgentService } from '@/agent'; import { ACL } from '@/acl'; @@ -23,7 +23,6 @@ import * as networkErrors from '@/network/errors'; import { makeNodeId } from '@/nodes/utils'; import { poll } from '@/utils'; import * as nodesTestUtils from './utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -134,7 +133,13 @@ describe('NodeConnection', () => { dbPath: serverDbPath, fs: fs, logger: logger, - crypto: makeCrypto(serverKeyManager.dbKey), + crypto: { + key: serverKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); serverACL = await ACL.createACL({ db: serverDb, @@ -402,8 +407,8 @@ describe('NodeConnection', () => { await conn.stop(); await serverRevProxy.closeConnection( - clientFwdProxy.egressHost, - clientFwdProxy.egressPort, + clientFwdProxy.getEgressHost(), + clientFwdProxy.getEgressPort(), ); await conn.destroy(); }); @@ -461,8 +466,8 @@ describe('NodeConnection', () => { await conn.stop(); await serverRevProxy.closeConnection( - clientFwdProxy.egressHost, - clientFwdProxy.egressPort, + clientFwdProxy.getEgressHost(), + clientFwdProxy.getEgressPort(), ); await conn.destroy(); }); diff --git a/tests/nodes/NodeGraph.test.ts b/tests/nodes/NodeGraph.test.ts index 71c8ff1de..9d97eecf9 100644 --- a/tests/nodes/NodeGraph.test.ts +++ b/tests/nodes/NodeGraph.test.ts @@ -7,13 +7,12 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { NodeManager, errors as nodesErrors } from '@/nodes'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { ForwardProxy, ReverseProxy } from '@/network'; import * as nodesUtils from '@/nodes/utils'; import { Sigchain } from '@/sigchain'; import { makeNodeId } from '@/nodes/utils'; import * as nodesTestUtils from './utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -130,7 +129,13 @@ describe('NodeGraph', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + } }); sigchain = await Sigchain.createSigchain({ keyManager: keyManager, diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index 060bb6fc0..fb1a3673e 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -2,24 +2,19 @@ import type { ClaimIdString } from '@/claims/types'; import type { CertificatePem, KeyPairPem, PublicKeyPem } from '@/keys/types'; import type { Host, Port } from '@/network/types'; import type { NodeId, NodeAddress } from '@/nodes/types'; -import type { PolykeyAgent } from '@'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; - +import { PolykeyAgent } from '@'; import { DB } from '@matrixai/db'; -import { KeyManager } from '@/keys'; -import { NodeManager } from '@/nodes'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { NodeManager, errors as nodesErrors } from '@/nodes'; import { ForwardProxy, ReverseProxy } from '@/network'; import { Sigchain } from '@/sigchain'; -import { sleep } from '@/utils'; -import * as nodesErrors from '@/nodes/errors'; -import * as claimsUtils from '@/claims/utils'; +import { utils as claimsUtils } from '@/claims'; import { makeNodeId } from '@/nodes/utils'; -import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; -import * as testUtils from '../utils'; +import { sleep } from '@/utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -98,7 +93,13 @@ describe('NodeManager', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); sigchain = await Sigchain.createSigchain({ keyManager, db, logger }); @@ -144,22 +145,39 @@ describe('NodeManager', () => { // await expect(nodeManager.writeToken()).rejects.toThrow(nodesErrors.ErrorNodeManagerNotRunning); }); describe('getConnectionToNode', () => { + let targetDataDir: string; let target: PolykeyAgent; let targetNodeId: NodeId; let targetNodeAddress: NodeAddress; beforeAll(async () => { - target = await testUtils.setupRemoteKeynode({ - logger: logger, + targetDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + target = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: targetDataDir, + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, }); }, global.polykeyStartupTimeout); + afterAll(async () => { + await target.stop(); + await fs.promises.rm(targetDataDir, { + force: true, + recursive: true, + }); + }); + beforeEach(async () => { await target.start({ password: 'password' }); targetNodeId = target.keyManager.getNodeId(); targetNodeAddress = { - host: target.revProxy.ingressHost, - port: target.revProxy.ingressPort, + host: target.revProxy.getIngressHost(), + port: target.revProxy.getIngressPort(), }; await nodeManager.setNode(targetNodeId, targetNodeAddress); }); @@ -169,10 +187,6 @@ describe('NodeManager', () => { await target.stop(); }); - afterAll(async () => { - await testUtils.cleanupRemoteKeynode(target); - }); - test('creates new connection to node', async () => { // @ts-ignore get connection + lock from protected NodeConnectionMap const initialConnLock = nodeManager.connections.get(targetNodeId); @@ -254,13 +268,18 @@ describe('NodeManager', () => { test( 'pings node', async () => { - const server = await testUtils.setupRemoteKeynode({ + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048 + }, logger: logger, }); const serverNodeId = server.nodeManager.getNodeId(); let serverNodeAddress: NodeAddress = { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), }; await nodeManager.setNode(serverNodeId, serverNodeAddress); @@ -274,8 +293,8 @@ describe('NodeManager', () => { await server.start({ password: 'password' }); // Update the node address (only changes because we start and stop) serverNodeAddress = { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), }; await nodeManager.setNode(serverNodeId, serverNodeAddress); // Check if active @@ -292,8 +311,6 @@ describe('NodeManager', () => { // Case 3: pre-existing connection no longer active, so offline const active3 = await nodeManager.pingNode(serverNodeId); expect(active3).toBe(false); - - await testUtils.cleanupRemoteKeynode(server); }, global.failedConnectionTimeout * 2, ); // Ping needs to timeout (takes 20 seconds + setup + pulldown) @@ -319,16 +336,26 @@ describe('NodeManager', () => { host: '127.0.0.1' as Host, port: 11111 as Port, }; - const server = await testUtils.setupRemoteKeynode({ logger: logger }); + + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger: logger, + }); + + await nodeManager.setNode(server.nodeManager.getNodeId(), { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), } as NodeAddress); await server.nodeManager.setNode(nodeId, nodeAddress); const foundAddress2 = await nodeManager.findNode(nodeId); expect(foundAddress2).toStrictEqual(nodeAddress); - await testUtils.cleanupRemoteKeynode(server); + await server.stop(); }, global.polykeyStartupTimeout, ); @@ -337,10 +364,17 @@ describe('NodeManager', () => { async () => { // Case 3: node exhausts all contacts and cannot find node const nodeId = nodeId1; - const server = await testUtils.setupRemoteKeynode({ logger: logger }); + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, + }); await nodeManager.setNode(server.nodeManager.getNodeId(), { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), } as NodeAddress); // Add a dummy node to the server node graph database // Server will not be able to connect to this node (the only node in its @@ -353,8 +387,7 @@ describe('NodeManager', () => { await expect(() => nodeManager.findNode(nodeId)).rejects.toThrowError( nodesErrors.ErrorNodeGraphNodeNotFound, ); - - await testUtils.cleanupRemoteKeynode(server); + await server.stop(); }, global.failedConnectionTimeout * 2, ); @@ -380,34 +413,53 @@ describe('NodeManager', () => { // We're unable to mock the actions of the server, but we can ensure the // state on each side is as expected. + let xDataDir: string; let x: PolykeyAgent; let xNodeId: NodeId; let xNodeAddress: NodeAddress; let xPublicKey: PublicKeyPem; + let yDataDir: string; let y: PolykeyAgent; let yNodeId: NodeId; let yNodeAddress: NodeAddress; let yPublicKey: PublicKeyPem; beforeAll(async () => { - x = await testUtils.setupRemoteKeynode({ - logger: logger, + xDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + x = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: xDataDir, + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, }); + xNodeId = x.nodeManager.getNodeId(); xNodeAddress = { - host: x.revProxy.ingressHost, - port: x.revProxy.ingressPort, + host: x.revProxy.getIngressHost(), + port: x.revProxy.getIngressPort(), }; xPublicKey = x.keyManager.getRootKeyPairPem().publicKey; - y = await testUtils.setupRemoteKeynode({ - logger: logger, + yDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-') + ); + y = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: xDataDir, + keysConfig: { + rootKeyPairBits: 2048 + }, + logger, }); yNodeId = y.nodeManager.getNodeId(); yNodeAddress = { - host: y.revProxy.ingressHost, - port: y.revProxy.ingressPort, + host: y.revProxy.getIngressHost(), + port: y.revProxy.getIngressPort(), }; yPublicKey = y.keyManager.getRootKeyPairPem().publicKey; @@ -415,8 +467,16 @@ describe('NodeManager', () => { await y.nodeManager.setNode(xNodeId, xNodeAddress); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - await testUtils.cleanupRemoteKeynode(x); - await testUtils.cleanupRemoteKeynode(y); + await y.stop(); + await x.stop(); + await fs.promises.rm(yDataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(xDataDir, { + force: true, + recursive: true, + }); }); // Make sure to remove any side-effects after each test diff --git a/tests/nodes/utils.ts b/tests/nodes/utils.ts index da824bae8..ff79be3d0 100644 --- a/tests/nodes/utils.ts +++ b/tests/nodes/utils.ts @@ -1,5 +1,6 @@ -import type { NodeId } from '@/nodes/types'; +import type { NodeId, NodeAddress } from '@/nodes/types'; +import { PolykeyAgent } from '@'; import * as nodesUtils from '@/nodes/utils'; import { makeNodeId } from '@/nodes/utils'; import { fromMultibase } from '@/GenericIdTypes'; @@ -82,4 +83,18 @@ function bigIntToBuffer(number: BigInt) { return u8; } -export { generateNodeIdForBucket, incrementNodeId }; +async function nodesConnect( + localNode: PolykeyAgent, + remoteNode: PolykeyAgent, +) { + // Add remote node's details to local node + await localNode.nodeManager.setNode( + remoteNode.nodeManager.getNodeId(), + { + host: remoteNode.revProxy.getIngressHost(), + port: remoteNode.revProxy.getIngressPort(), + } as NodeAddress + ); +} + +export { generateNodeIdForBucket, incrementNodeId, nodesConnect }; diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index 4077082a6..258b41319 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -12,7 +12,7 @@ import { DB } from '@matrixai/db'; import { ACL } from '@/acl'; import { Sigchain } from '@/sigchain'; import { GRPCServer } from '@/grpc'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { VaultManager } from '@/vaults'; import { GestaltGraph } from '@/gestalts'; import { NodeManager } from '@/nodes'; @@ -22,7 +22,6 @@ import { AgentServiceService, createAgentService } from '@/agent'; import * as networkUtils from '@/network/utils'; import { generateVaultId } from '@/vaults/utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -130,7 +129,13 @@ describe('NotificationsManager', () => { dbPath: receiverDbPath, fs: fs, logger: logger, - crypto: makeCrypto(receiverKeyManager.dbKey), + crypto: { + key: receiverKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); receiverACL = await ACL.createACL({ db: receiverDb, @@ -222,7 +227,13 @@ describe('NotificationsManager', () => { dbPath: senderDbPath, fs, logger, - crypto: makeCrypto(senderKeyManager.dbKey), + crypto: { + key: senderKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); senderACL = await ACL.createACL({ db: senderDb, logger }); senderSigchain = await Sigchain.createSigchain({ diff --git a/tests/sessions/Session.test.ts b/tests/sessions/Session.test.ts index b853eba35..f79456b6a 100644 --- a/tests/sessions/Session.test.ts +++ b/tests/sessions/Session.test.ts @@ -37,12 +37,6 @@ describe('Session', () => { await expect(session.start()).rejects.toThrow( sessionErrors.ErrorSessionDestroyed, ); - await expect(session.readToken()).rejects.toThrow( - sessionErrors.ErrorSessionNotRunning, - ); - await expect(session.writeToken('abc' as SessionToken)).rejects.toThrow( - sessionErrors.ErrorSessionNotRunning, - ); }); test('creating session', async () => { const session1 = await Session.createSession({ diff --git a/tests/sessions/SessionManager.test.ts b/tests/sessions/SessionManager.test.ts index d8688da85..31461996b 100644 --- a/tests/sessions/SessionManager.test.ts +++ b/tests/sessions/SessionManager.test.ts @@ -3,10 +3,8 @@ import os from 'os'; import path from 'path'; import { DB } from '@matrixai/db'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { KeyManager } from '@/keys'; -import SessionManager from '@/sessions/SessionManager'; -import * as sessionsErrors from '@/sessions/errors'; -import * as keysUtils from '@/keys/utils'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { SessionManager, errors as sessionsErrors } from '@/sessions'; import { sleep } from '@/utils'; import * as testUtils from '../utils'; diff --git a/tests/utils.ts b/tests/utils.ts index 5b4742ee9..6d4ca7dca 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -1,6 +1,4 @@ -import type { NodeAddress } from '@/nodes/types'; import type { StatusLive } from '@/status/types'; -import os from 'os'; import path from 'path'; import fs from 'fs'; import lock from 'fd-lock'; @@ -20,7 +18,7 @@ import config from '@/config'; async function setupGlobalKeypair() { const globalKeyPairDir = path.join(globalThis.dataDir, 'keypair'); const globalKeyPairLock = await fs.promises.open( - path.join(global.dataDir, 'keypair.lock'), + path.join(globalThis.dataDir, 'keypair.lock'), fs.constants.O_WRONLY | fs.constants.O_CREAT, ); while (!lock(globalKeyPairLock.fd)) { @@ -72,7 +70,7 @@ async function setupGlobalKeypair() { * Uses a references directory as a reference count * Uses fd-lock to serialise access * This means all test modules using this will be serialised - * Any beforeAll must use global.maxTimeout + * Any beforeAll must use globalThis.maxTimeout * Tips for usage: * * Do not restart this global agent * * Ensure client-side side-effects are removed at the end of each test @@ -93,7 +91,7 @@ async function setupGlobalAgent( // Plus 1 to the reference count await fs.promises.writeFile(path.join(globalAgentDir, 'references', pid), ''); const globalAgentLock = await fs.promises.open( - path.join(global.dataDir, 'agent.lock'), + path.join(globalThis.dataDir, 'agent.lock'), fs.constants.O_WRONLY | fs.constants.O_CREAT, ); while (!lock(globalAgentLock.fd)) { @@ -112,7 +110,7 @@ async function setupGlobalAgent( password: globalAgentPassword, nodePath: globalAgentDir, keysConfig: { - rootKeyPairBits: 1024 + rootKeyPairBits: 2048 }, seedNodes: {}, // explicitly no seed nodes on startup logger, @@ -172,76 +170,7 @@ async function setupGlobalAgent( }; } -function makeCrypto(dbKey: Buffer) { - return { - key: dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }; -} - -/** - * Helper function to create a remote keynode to contact. - * It will append a directory to options.baseDir or create it's own temp directory if not specified. - * For multiple nodes, specify a unique number. - */ -async function setupRemoteKeynode({ - logger, - dataDir, -}: { - logger: Logger; - dataDir?: string; -}): Promise { - // Create and start the keynode + its temp directory - let nodeDir: string; - if (dataDir) { - // Add the directory. - nodeDir = path.join(dataDir, `remoteNode`); - await fs.promises.mkdir(nodeDir, { recursive: true }); - } else { - nodeDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-remote-'), - ); - } - const agent = await PolykeyAgent.createPolykeyAgent({ - password: 'password', - nodePath: nodeDir, - logger: logger, - }); - return agent; -} - -/** - * Helper function to stop a created remote keynode, and remove its temporary - * directory. - */ -async function cleanupRemoteKeynode(node: PolykeyAgent): Promise { - await node.stop(); - await node.destroy(); - await fs.promises.rm(node.nodePath, { - force: true, - recursive: true, - }); -} - -async function addRemoteDetails( - localNode: PolykeyAgent, - remoteNode: PolykeyAgent, -) { - // Add remote node's details to local node - await localNode.nodeManager.setNode(remoteNode.nodeManager.getNodeId(), { - host: remoteNode.revProxy.getIngressHost(), - port: remoteNode.revProxy.getIngressPort(), - } as NodeAddress); -} - export { setupGlobalKeypair, setupGlobalAgent, - makeCrypto, - setupRemoteKeynode, - cleanupRemoteKeynode, - addRemoteDetails, }; diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index f7db6f8a1..c2ed4ecf9 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -10,7 +10,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { utils as idUtils } from '@matrixai/id'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { NodeManager } from '@/nodes'; import { Sigchain } from '@/sigchain'; import { VaultManager, vaultOps } from '@/vaults'; @@ -24,7 +24,6 @@ import { NotificationsManager } from '@/notifications'; import { errors as vaultErrors } from '@/vaults'; import { utils as vaultUtils } from '@/vaults'; import { makeVaultId } from '@/vaults/utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -107,7 +106,13 @@ describe('VaultManager', () => { db = await DB.createDB({ dbPath: dbPath, logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); sigchain = await Sigchain.createSigchain({ @@ -590,7 +595,13 @@ describe('VaultManager', () => { targetDb = await DB.createDB({ dbPath: path.join(targetDataDir, 'db'), logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); targetSigchain = await Sigchain.createSigchain({ keyManager: targetKeyManager, @@ -681,7 +692,13 @@ describe('VaultManager', () => { altDb = await DB.createDB({ dbPath: path.join(altDataDir, 'db'), logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); altSigchain = await Sigchain.createSigchain({ keyManager: altKeyManager, @@ -763,10 +780,10 @@ describe('VaultManager', () => { await revProxy.closeConnection(altHost, altPort); await revProxy.closeConnection(sourceHost, sourcePort); await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection(fwdProxy.egressHost, fwdProxy.egressPort); + await fwdProxy.closeConnection(fwdProxy.getEgressHost(), fwdProxy.getEgressPort()); await altFwdProxy.closeConnection( - altFwdProxy.egressHost, - altFwdProxy.egressPort, + altFwdProxy.getEgressHost(), + altFwdProxy.getEgressPort(), ); await revProxy.stop(); await altRevProxy.stop(); From fcc261af1cef4cc81d76852879c4fb7f00d62a7f Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Wed, 5 Jan 2022 14:34:21 +1100 Subject: [PATCH 18/28] Added section on packaging cross-platform executables --- README.md | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 435046cbe..04e68a90f 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Once you update the `src/proto/schemas` files, run this to update the `src/proto npm run proto-generate ``` -### Executing Commands +### Calling Commands When calling commands in development, use this style: @@ -85,7 +85,7 @@ npm run docs See the docs at: https://matrixai.github.io/Polykey/ -### Publishing +### Publishing to NPM ```sh # npm login @@ -96,6 +96,18 @@ git push git push --tags ``` +### Packaging Cross-Platform Executables + +We use `pkg` to package the source code into executables. + +This requires a specific version of `pkg` and also `node-gyp-build`. + +Configuration for `pkg` is done in: + +* `package.json` - Pins `pkg` and `node-gyp-build`, and configures assets and scripts. +* `utils.nix` - Pins `pkg` for Nix usage +* `release.nix` - Build expressions for executables + ## Deployment ### Deploying to AWS ECS: From 49fcafdd727c3db7196e257851be593cd609d40e Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Wed, 5 Jan 2022 14:51:03 +1100 Subject: [PATCH 19/28] Lintfixed --- src/agent/service/index.ts | 18 +- src/agent/service/nodesChainDataGet.ts | 10 +- .../service/nodesClosestLocalNodesGet.ts | 4 +- src/agent/service/nodesCrossSignClaim.ts | 10 +- .../service/nodesHolePunchMessageSend.ts | 9 +- src/agent/service/vaultsGitInfoGet.ts | 8 +- src/agent/service/vaultsGitPackGet.ts | 6 +- src/agent/service/vaultsPermissionsCheck.ts | 2 +- src/agent/service/vaultsScan.ts | 4 +- src/bin/agent/CommandStart.ts | 4 +- src/bin/utils/options.ts | 2 +- src/client/index.ts | 5 +- src/client/service/agentLockAll.ts | 2 +- src/client/service/agentStatus.ts | 2 +- src/client/service/agentStop.ts | 2 +- src/client/service/agentUnlock.ts | 6 +- .../service/gestaltsActionsGetByIdentity.ts | 2 +- .../service/gestaltsActionsGetByNode.ts | 2 +- .../service/gestaltsActionsSetByIdentity.ts | 2 +- .../service/gestaltsActionsSetByNode.ts | 2 +- .../service/gestaltsActionsUnsetByIdentity.ts | 2 +- .../service/gestaltsActionsUnsetByNode.ts | 2 +- .../service/gestaltsDiscoveryByIdentity.ts | 2 +- src/client/service/gestaltsDiscoveryByNode.ts | 2 +- .../service/gestaltsGestaltGetByIdentity.ts | 2 +- .../service/gestaltsGestaltGetByNode.ts | 2 +- src/client/service/gestaltsGestaltList.ts | 2 +- src/client/service/identitiesClaim.ts | 5 +- .../service/identitiesInfoGetConnected.ts | 5 +- src/client/service/identitiesProvidersList.ts | 3 +- src/client/service/identitiesTokenDelete.ts | 4 +- src/client/service/identitiesTokenGet.ts | 2 +- src/client/service/identitiesTokenPut.ts | 3 +- src/client/service/index.ts | 59 +- src/client/service/keysCertsChainGet.ts | 2 +- src/client/service/keysCertsGet.ts | 2 +- src/client/service/keysKeyPairRenew.ts | 2 +- src/client/service/keysKeyPairReset.ts | 2 +- src/client/service/keysKeyPairRoot.ts | 2 +- src/client/service/keysVerify.ts | 2 +- src/client/service/nodesAdd.ts | 6 +- src/client/service/nodesClaim.ts | 6 +- src/client/service/nodesFind.ts | 5 +- src/client/service/nodesPing.ts | 4 +- src/client/service/notificationsClear.ts | 2 +- src/client/service/notificationsRead.ts | 2 +- src/client/service/notificationsSend.ts | 4 +- src/client/service/vaultsClone.ts | 8 +- src/client/service/vaultsCreate.ts | 7 +- src/client/service/vaultsDelete.ts | 11 +- src/client/service/vaultsList.ts | 7 +- src/client/service/vaultsLog.ts | 3 - src/client/service/vaultsPermissions.ts | 8 +- src/client/service/vaultsPermissionsSet.ts | 2 +- src/client/service/vaultsPermissionsUnset.ts | 2 +- src/client/service/vaultsPull.ts | 8 +- src/client/service/vaultsRename.ts | 7 +- src/client/service/vaultsScan.ts | 2 +- src/client/service/vaultsSecretsDelete.ts | 7 +- src/client/service/vaultsSecretsEdit.ts | 7 +- src/client/service/vaultsSecretsGet.ts | 7 +- src/client/service/vaultsSecretsList.ts | 14 +- src/client/service/vaultsSecretsMkdir.ts | 7 +- src/client/service/vaultsSecretsNew.ts | 7 +- src/client/service/vaultsSecretsNewDir.ts | 9 +- src/client/service/vaultsSecretsRename.ts | 7 +- src/client/service/vaultsSecretsStat.ts | 9 +- src/client/types.ts | 4 +- src/config.ts | 4 +- src/schema/Schema.ts | 5 +- src/sessions/Session.ts | 5 +- src/sigchain/Sigchain.ts | 4 +- tests/PolykeyAgent.test.ts | 66 +- tests/acl/ACL.test.ts | 2 +- tests/agent/GRPCClientAgent.test.ts | 2 +- tests/bin/agent/lock.test.ts | 12 +- tests/bin/agent/lockall.test.ts | 17 +- tests/bin/agent/start.test.ts | 145 +- tests/bin/agent/status.test.ts | 17 +- tests/bin/agent/unlock.test.ts | 16 +- tests/bin/identities/identities.test.ts | 12 +- tests/bin/keys/keys.test.ts | 6 +- tests/bin/nodes/add.test.ts | 6 +- tests/bin/nodes/claim.test.ts | 13 +- tests/bin/nodes/find.test.ts | 15 +- tests/bin/nodes/ping.test.ts | 21 +- tests/bin/sessions.test.ts | 12 +- tests/bin/utils.ts | 4 - tests/bootstrap/utils.test.ts | 44 +- tests/client/rpcNodes.test.ts | 9 +- tests/client/rpcNotifications.test.ts | 7 +- tests/client/service/agentStop.test.ts | 32 +- tests/discovery/Discovery.test.ts | 20 +- tests/gestalts/GestaltGraph.test.ts | 2 +- tests/global.d.ts | 1 + tests/identities/IdentitiesManager.test.ts | 2 +- tests/network/ForwardProxy.test.ts | 1294 +++++++++-------- tests/network/ReverseProxy.test.ts | 90 +- tests/network/index.test.ts | 2 +- tests/nodes/NodeConnection.test.ts | 2 +- tests/nodes/NodeGraph.test.ts | 2 +- tests/nodes/NodeManager.test.ts | 17 +- tests/nodes/utils.ts | 18 +- tests/sigchain/Sigchain.test.ts | 2 +- tests/utils.ts | 35 +- tests/vaults/VaultManager.test.ts | 5 +- 106 files changed, 1130 insertions(+), 1219 deletions(-) diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts index dbe5b3826..43af3c005 100644 --- a/src/agent/service/index.ts +++ b/src/agent/service/index.ts @@ -17,17 +17,15 @@ import vaultsPermissionsCheck from './vaultsPermissionsCheck'; import vaultsScan from './vaultsScan'; import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; -function createService ( - container: { - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - notificationsManager: NotificationsManager; - sigchain: Sigchain; - } -) { +function createService(container: { + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + notificationsManager: NotificationsManager; + sigchain: Sigchain; +}) { const container_ = { - ...container + ...container, }; const service: IAgentServiceServer = { echo: echo(container_), diff --git a/src/agent/service/nodesChainDataGet.ts b/src/agent/service/nodesChainDataGet.ts index d161f9a01..0f4c201c7 100644 --- a/src/agent/service/nodesChainDataGet.ts +++ b/src/agent/service/nodesChainDataGet.ts @@ -1,18 +1,14 @@ import type * as grpc from '@grpc/grpc-js'; -import type { ClaimIdString, } from '../../claims/types'; +import type { ClaimIdString } from '../../claims/types'; import type { NodeManager } from '../../nodes'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; /** * Retrieves the ChainDataEncoded of this node. */ -function nodesChainDataGet({ - nodeManager, -}: { - nodeManager: NodeManager; -}) { +function nodesChainDataGet({ nodeManager }: { nodeManager: NodeManager }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/agent/service/nodesClosestLocalNodesGet.ts b/src/agent/service/nodesClosestLocalNodesGet.ts index 06bbde0d1..5ef5fc071 100644 --- a/src/agent/service/nodesClosestLocalNodesGet.ts +++ b/src/agent/service/nodesClosestLocalNodesGet.ts @@ -21,9 +21,7 @@ function nodesClosestLocalNodesGet({ try { const targetNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); // Get all local nodes that are closest to the target node from the request - const closestNodes = await nodeManager.getClosestLocalNodes( - targetNodeId, - ); + const closestNodes = await nodeManager.getClosestLocalNodes(targetNodeId); for (const node of closestNodes) { const addressMessage = new nodesPB.Address(); addressMessage.setHost(node.address.host); diff --git a/src/agent/service/nodesCrossSignClaim.ts b/src/agent/service/nodesCrossSignClaim.ts index 81ade4628..55857e140 100644 --- a/src/agent/service/nodesCrossSignClaim.ts +++ b/src/agent/service/nodesCrossSignClaim.ts @@ -1,14 +1,11 @@ import type * as grpc from '@grpc/grpc-js'; -import type { - ClaimEncoded, - ClaimIntermediary, -} from '../../claims/types'; +import type { ClaimEncoded, ClaimIntermediary } from '../../claims/types'; import type { NodeManager } from '../../nodes'; import type { Sigchain } from '../../sigchain'; import type { KeyManager } from '../../keys'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as claimsUtils, errors as claimsErrors } from '../../claims'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; function nodesCrossSignClaim({ keyManager, @@ -33,8 +30,7 @@ function nodesCrossSignClaim({ throw new claimsErrors.ErrorEmptyStream(); } const receivedMessage = readStatus.value; - const intermediaryClaimMessage = - receivedMessage.getSinglySignedClaim(); + const intermediaryClaimMessage = receivedMessage.getSinglySignedClaim(); if (!intermediaryClaimMessage) { throw new claimsErrors.ErrorUndefinedSinglySignedClaim(); } diff --git a/src/agent/service/nodesHolePunchMessageSend.ts b/src/agent/service/nodesHolePunchMessageSend.ts index 3aea995ad..d1a1ea8aa 100644 --- a/src/agent/service/nodesHolePunchMessageSend.ts +++ b/src/agent/service/nodesHolePunchMessageSend.ts @@ -1,9 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { NodeManager } from '../../nodes'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as networkUtils } from '../../network'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function nodesHolePunchMessageSend({ @@ -21,7 +21,8 @@ function nodesHolePunchMessageSend({ // If so, then we want to make this node start sending hole punching packets // back to the source node. if ( - nodeManager.getNodeId() === nodesUtils.makeNodeId(call.request.getTargetId()) + nodeManager.getNodeId() === + nodesUtils.makeNodeId(call.request.getTargetId()) ) { const [host, port] = networkUtils.parseAddress( call.request.getEgressAddress(), @@ -30,7 +31,9 @@ function nodesHolePunchMessageSend({ // Otherwise, find if node in table // If so, ask the nodeManager to relay to the node } else if ( - await nodeManager.knowsNode(nodesUtils.makeNodeId(call.request.getSrcId())) + await nodeManager.knowsNode( + nodesUtils.makeNodeId(call.request.getSrcId()), + ) ) { await nodeManager.relayHolePunchMessage(call.request); } diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index cca0e063d..8ee13efed 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -2,15 +2,11 @@ import type { VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils, errors as grpcErrors } from '../../grpc'; +import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsGitInfoGet({ - vaultManager, -}: { - vaultManager: VaultManager; -}) { +function vaultsGitInfoGet({ vaultManager }: { vaultManager: VaultManager }) { return async ( call: grpc.ServerWritableStream, ): Promise => { diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts index 6da05bccb..8590fcd29 100644 --- a/src/agent/service/vaultsGitPackGet.ts +++ b/src/agent/service/vaultsGitPackGet.ts @@ -6,11 +6,7 @@ import { errors as grpcErrors } from '../../grpc'; import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsGitPackGet({ - vaultManager, -}: { - vaultManager: VaultManager; -}) { +function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { return async ( call: grpc.ServerDuplexStream, ) => { diff --git a/src/agent/service/vaultsPermissionsCheck.ts b/src/agent/service/vaultsPermissionsCheck.ts index 4b47d8648..8b3046f06 100644 --- a/src/agent/service/vaultsPermissionsCheck.ts +++ b/src/agent/service/vaultsPermissionsCheck.ts @@ -1,6 +1,6 @@ import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsPermissionsCheck(_) { return async ( diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index 84d84124b..f7e618664 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -1,7 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; function vaultsScan(_) { return async ( diff --git a/src/bin/agent/CommandStart.ts b/src/bin/agent/CommandStart.ts index eb9f6f0ec..5c71999d9 100644 --- a/src/bin/agent/CommandStart.ts +++ b/src/bin/agent/CommandStart.ts @@ -38,7 +38,9 @@ class CommandStart extends CommandPolykey { options.clientPort = options.clientPort ?? config.defaults.networkConfig.clientPort; const { default: PolykeyAgent } = await import('../../PolykeyAgent'); - const { WorkerManager, utils: workersUtils } = await import('../../workers'); + const { WorkerManager, utils: workersUtils } = await import( + '../../workers' + ); let password: string | undefined; if (options.fresh) { // If fresh, then get a new password diff --git a/src/bin/utils/options.ts b/src/bin/utils/options.ts index 1f4047a07..2fffd0ecb 100644 --- a/src/bin/utils/options.ts +++ b/src/bin/utils/options.ts @@ -143,7 +143,7 @@ const network = new commander.Option( const workers = new commander.Option( '-w --workers ', - 'Number of workers to use, defaults to number of cores with `all`, 0 means no multi-threading' + 'Number of workers to use, defaults to number of cores with `all`, 0 means no multi-threading', ) .argParser(binParsers.parseCoreCount) .default(undefined); diff --git a/src/client/index.ts b/src/client/index.ts index b2b36c024..d5959db3e 100644 --- a/src/client/index.ts +++ b/src/client/index.ts @@ -1,4 +1,7 @@ -export { default as createClientService, ClientServiceService } from './service'; +export { + default as createClientService, + ClientServiceService, +} from './service'; export { default as GRPCClientClient } from './GRPCClientClient'; export * as errors from './errors'; export * as utils from './utils'; diff --git a/src/client/service/agentLockAll.ts b/src/client/service/agentLockAll.ts index 70ce95a87..3641e1f71 100644 --- a/src/client/service/agentLockAll.ts +++ b/src/client/service/agentLockAll.ts @@ -4,7 +4,7 @@ import type { SessionManager } from '../../sessions'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function agentLockAll ({ +function agentLockAll({ sessionManager, authenticate, }: { diff --git a/src/client/service/agentStatus.ts b/src/client/service/agentStatus.ts index 2b5c91116..e71cf5a82 100644 --- a/src/client/service/agentStatus.ts +++ b/src/client/service/agentStatus.ts @@ -3,10 +3,10 @@ import type { Authenticate } from '../types'; import type { KeyManager } from '../../keys'; import type { GRPCServer } from '../../grpc'; import type { ForwardProxy, ReverseProxy } from '../../network'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import process from 'process'; import * as grpcUtils from '../../grpc/utils'; import * as agentPB from '../../proto/js/polykey/v1/agent/agent_pb'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function agentStatus({ authenticate, diff --git a/src/client/service/agentStop.ts b/src/client/service/agentStop.ts index 2e4fca322..94f3e0ff3 100644 --- a/src/client/service/agentStop.ts +++ b/src/client/service/agentStop.ts @@ -5,7 +5,7 @@ import { status, running } from '@matrixai/async-init'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function agentStop ({ +function agentStop({ authenticate, pkAgent, }: { diff --git a/src/client/service/agentUnlock.ts b/src/client/service/agentUnlock.ts index ca04af6e6..1ddef6f11 100644 --- a/src/client/service/agentUnlock.ts +++ b/src/client/service/agentUnlock.ts @@ -3,11 +3,7 @@ import type { Authenticate } from '../types'; import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function agentUnlock ({ - authenticate, -}: { - authenticate: Authenticate; -}) { +function agentUnlock({ authenticate }: { authenticate: Authenticate }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/client/service/gestaltsActionsGetByIdentity.ts b/src/client/service/gestaltsActionsGetByIdentity.ts index f70bb2391..6c039cdec 100644 --- a/src/client/service/gestaltsActionsGetByIdentity.ts +++ b/src/client/service/gestaltsActionsGetByIdentity.ts @@ -2,8 +2,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsGetByIdentity({ diff --git a/src/client/service/gestaltsActionsGetByNode.ts b/src/client/service/gestaltsActionsGetByNode.ts index 45a4e5190..9bc6d5e0b 100644 --- a/src/client/service/gestaltsActionsGetByNode.ts +++ b/src/client/service/gestaltsActionsGetByNode.ts @@ -1,9 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsGetByNode({ diff --git a/src/client/service/gestaltsActionsSetByIdentity.ts b/src/client/service/gestaltsActionsSetByIdentity.ts index d6cccfe94..976e2450f 100644 --- a/src/client/service/gestaltsActionsSetByIdentity.ts +++ b/src/client/service/gestaltsActionsSetByIdentity.ts @@ -2,10 +2,10 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as gestaltsUtils } from '../../gestalts'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsSetByIdentity({ authenticate, diff --git a/src/client/service/gestaltsActionsSetByNode.ts b/src/client/service/gestaltsActionsSetByNode.ts index aec7b96bf..5c1303cdf 100644 --- a/src/client/service/gestaltsActionsSetByNode.ts +++ b/src/client/service/gestaltsActionsSetByNode.ts @@ -1,11 +1,11 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; import { utils as gestaltsUtils } from '../../gestalts'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsSetByNode({ authenticate, diff --git a/src/client/service/gestaltsActionsUnsetByIdentity.ts b/src/client/service/gestaltsActionsUnsetByIdentity.ts index bf6b35c0a..7d6bedd4e 100644 --- a/src/client/service/gestaltsActionsUnsetByIdentity.ts +++ b/src/client/service/gestaltsActionsUnsetByIdentity.ts @@ -2,10 +2,10 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as gestaltsUtils } from '../../gestalts'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsUnsetByIdentity({ authenticate, diff --git a/src/client/service/gestaltsActionsUnsetByNode.ts b/src/client/service/gestaltsActionsUnsetByNode.ts index 6c62590cb..1f9e3c297 100644 --- a/src/client/service/gestaltsActionsUnsetByNode.ts +++ b/src/client/service/gestaltsActionsUnsetByNode.ts @@ -1,11 +1,11 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; import { utils as gestaltsUtils } from '../../gestalts'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; function gestaltsActionsUnsetByNode({ authenticate, diff --git a/src/client/service/gestaltsDiscoveryByIdentity.ts b/src/client/service/gestaltsDiscoveryByIdentity.ts index 66de892ca..11c54fb89 100644 --- a/src/client/service/gestaltsDiscoveryByIdentity.ts +++ b/src/client/service/gestaltsDiscoveryByIdentity.ts @@ -2,9 +2,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { Discovery } from '../../discovery'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; function gestaltsDiscoveryByIdentity({ authenticate, diff --git a/src/client/service/gestaltsDiscoveryByNode.ts b/src/client/service/gestaltsDiscoveryByNode.ts index 3a43b7492..507215b8c 100644 --- a/src/client/service/gestaltsDiscoveryByNode.ts +++ b/src/client/service/gestaltsDiscoveryByNode.ts @@ -1,10 +1,10 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { Discovery } from '../../discovery'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; function gestaltsDiscoveryByNode({ authenticate, diff --git a/src/client/service/gestaltsGestaltGetByIdentity.ts b/src/client/service/gestaltsGestaltGetByIdentity.ts index a82e26c9e..eaf2be7cb 100644 --- a/src/client/service/gestaltsGestaltGetByIdentity.ts +++ b/src/client/service/gestaltsGestaltGetByIdentity.ts @@ -2,8 +2,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; function gestaltsGestaltGetByIdentity({ diff --git a/src/client/service/gestaltsGestaltGetByNode.ts b/src/client/service/gestaltsGestaltGetByNode.ts index 38b6bf4fd..beeab3df3 100644 --- a/src/client/service/gestaltsGestaltGetByNode.ts +++ b/src/client/service/gestaltsGestaltGetByNode.ts @@ -1,10 +1,10 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as nodesUtils } from '../../nodes'; import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; function gestaltsGestaltGetByNode({ authenticate, diff --git a/src/client/service/gestaltsGestaltList.ts b/src/client/service/gestaltsGestaltList.ts index e97609215..e4daf338f 100644 --- a/src/client/service/gestaltsGestaltList.ts +++ b/src/client/service/gestaltsGestaltList.ts @@ -2,9 +2,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { GestaltGraph } from '../../gestalts'; import type { Gestalt } from '../../gestalts/types'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function gestaltsGestaltList({ authenticate, diff --git a/src/client/service/identitiesClaim.ts b/src/client/service/identitiesClaim.ts index 3b02d5b64..ba5d8740d 100644 --- a/src/client/service/identitiesClaim.ts +++ b/src/client/service/identitiesClaim.ts @@ -4,11 +4,11 @@ import type { NodeManager } from '../../nodes'; import type { Sigchain } from '../../sigchain'; import type { IdentitiesManager } from '../../identities'; import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as clientErrors from '../errors'; import { utils as grpcUtils } from '../../grpc'; import { utils as claimsUtils } from '../../claims'; import { errors as identitiesErrors } from '../../identities'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; /** @@ -36,8 +36,7 @@ function identitiesClaim({ // Check provider is authenticated const providerId = call.request.getProviderId() as ProviderId; const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); + if (provider == null) throw new clientErrors.ErrorClientInvalidProvider(); const identityId = call.request.getIdentityId() as IdentityId; const identities = await provider.getAuthIdentityIds(); if (!identities.includes(identityId)) { diff --git a/src/client/service/identitiesInfoGetConnected.ts b/src/client/service/identitiesInfoGetConnected.ts index faae81fe4..683f0702d 100644 --- a/src/client/service/identitiesInfoGetConnected.ts +++ b/src/client/service/identitiesInfoGetConnected.ts @@ -1,7 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { IdentitiesManager } from '../../identities'; -import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import type { IdentityId, ProviderId } from '../../identities/types'; import * as clientErrors from '../errors'; import { utils as grpcUtils } from '../../grpc'; import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; @@ -30,8 +30,7 @@ function identitiesInfoGetConnected({ .getProvider() ?.getIdentityId() as IdentityId; const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); + if (provider == null) throw new clientErrors.ErrorClientInvalidProvider(); const identities = provider.getConnectedIdentityDatas( identityId, diff --git a/src/client/service/identitiesProvidersList.ts b/src/client/service/identitiesProvidersList.ts index 0ccd1349e..62f883d65 100644 --- a/src/client/service/identitiesProvidersList.ts +++ b/src/client/service/identitiesProvidersList.ts @@ -1,9 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { IdentitiesManager } from '../../identities'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function identitiesProvidersList({ identitiesManager, @@ -33,4 +33,3 @@ function identitiesProvidersList({ } export default identitiesProvidersList; - diff --git a/src/client/service/identitiesTokenDelete.ts b/src/client/service/identitiesTokenDelete.ts index 3313a59cc..383f0d51d 100644 --- a/src/client/service/identitiesTokenDelete.ts +++ b/src/client/service/identitiesTokenDelete.ts @@ -1,9 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { IdentitiesManager } from '../../identities'; -import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function identitiesTokenDelete({ diff --git a/src/client/service/identitiesTokenGet.ts b/src/client/service/identitiesTokenGet.ts index d99cd3c08..bb1f614d0 100644 --- a/src/client/service/identitiesTokenGet.ts +++ b/src/client/service/identitiesTokenGet.ts @@ -1,7 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { IdentitiesManager } from '../../identities'; -import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import type { IdentityId, ProviderId } from '../../identities/types'; import { utils as grpcUtils } from '../../grpc'; import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; diff --git a/src/client/service/identitiesTokenPut.ts b/src/client/service/identitiesTokenPut.ts index def2f0dfe..b447368b5 100644 --- a/src/client/service/identitiesTokenPut.ts +++ b/src/client/service/identitiesTokenPut.ts @@ -2,8 +2,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { IdentitiesManager } from '../../identities'; import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; function identitiesTokenPut({ @@ -40,4 +40,3 @@ function identitiesTokenPut({ } export default identitiesTokenPut; - diff --git a/src/client/service/index.ts b/src/client/service/index.ts index b336fcea8..88a6ca861 100644 --- a/src/client/service/index.ts +++ b/src/client/service/index.ts @@ -1,4 +1,3 @@ -import Logger from '@matrixai/logger'; import type PolykeyAgent from '../../PolykeyAgent'; import type { KeyManager } from '../../keys'; import type { VaultManager } from '../../vaults'; @@ -13,6 +12,7 @@ import type { GRPCServer } from '../../grpc'; import type { ForwardProxy, ReverseProxy } from '../../network'; import type { IClientServiceServer } from '../../proto/js/polykey/v1/client_service_grpc_pb'; import type { FileSystem } from '../../types'; +import Logger from '@matrixai/logger'; import agentLockAll from './agentLockAll'; import agentStatus from './agentStatus'; import agentStop from './agentStop'; @@ -77,36 +77,31 @@ import vaultsSecretsStat from './vaultsSecretsStat'; import * as clientUtils from '../utils'; import { ClientServiceService } from '../../proto/js/polykey/v1/client_service_grpc_pb'; -function createService ( - { - keyManager, - sessionManager, - logger = new Logger(createService.name), - fs = require('fs'), - ...containerRest - }: { - pkAgent: PolykeyAgent; - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - identitiesManager: IdentitiesManager; - gestaltGraph: GestaltGraph; - sessionManager: SessionManager; - notificationsManager: NotificationsManager; - discovery: Discovery; - sigchain: Sigchain; - grpcServerClient: GRPCServer; - grpcServerAgent: GRPCServer; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; - logger?: Logger; - fs?: FileSystem; - } -) { - const authenticate = clientUtils.authenticator( - sessionManager, - keyManager - ); +function createService({ + keyManager, + sessionManager, + logger = new Logger(createService.name), + fs = require('fs'), + ...containerRest +}: { + pkAgent: PolykeyAgent; + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + identitiesManager: IdentitiesManager; + gestaltGraph: GestaltGraph; + sessionManager: SessionManager; + notificationsManager: NotificationsManager; + discovery: Discovery; + sigchain: Sigchain; + grpcServerClient: GRPCServer; + grpcServerAgent: GRPCServer; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + logger?: Logger; + fs?: FileSystem; +}) { + const authenticate = clientUtils.authenticator(sessionManager, keyManager); const container = { ...containerRest, keyManager, @@ -115,7 +110,7 @@ function createService ( fs, authenticate, }; - const service: IClientServiceServer ={ + const service: IClientServiceServer = { agentLockAll: agentLockAll(container), agentStatus: agentStatus(container), agentStop: agentStop(container), diff --git a/src/client/service/keysCertsChainGet.ts b/src/client/service/keysCertsChainGet.ts index 6de9e12d2..830381136 100644 --- a/src/client/service/keysCertsChainGet.ts +++ b/src/client/service/keysCertsChainGet.ts @@ -1,8 +1,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysCertsChainGet({ diff --git a/src/client/service/keysCertsGet.ts b/src/client/service/keysCertsGet.ts index fc9e438ae..d70bff9b4 100644 --- a/src/client/service/keysCertsGet.ts +++ b/src/client/service/keysCertsGet.ts @@ -1,8 +1,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysCertsGet({ diff --git a/src/client/service/keysKeyPairRenew.ts b/src/client/service/keysKeyPairRenew.ts index 664e27637..ba32b7b2b 100644 --- a/src/client/service/keysKeyPairRenew.ts +++ b/src/client/service/keysKeyPairRenew.ts @@ -5,9 +5,9 @@ import type { NodeManager } from '../../nodes'; import type { GRPCServer } from '../../grpc'; import type { ForwardProxy, ReverseProxy } from '../../network'; import type { TLSConfig } from '../../network/types'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysKeyPairRenew({ keyManager, diff --git a/src/client/service/keysKeyPairReset.ts b/src/client/service/keysKeyPairReset.ts index ceb662f0a..fef7a2b7a 100644 --- a/src/client/service/keysKeyPairReset.ts +++ b/src/client/service/keysKeyPairReset.ts @@ -5,9 +5,9 @@ import type { NodeManager } from '../../nodes'; import type { GRPCServer } from '../../grpc'; import type { ForwardProxy, ReverseProxy } from '../../network'; import type { TLSConfig } from '../../network/types'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysKeyPairReset({ keyManager, diff --git a/src/client/service/keysKeyPairRoot.ts b/src/client/service/keysKeyPairRoot.ts index 7e785ec39..793c08e3c 100644 --- a/src/client/service/keysKeyPairRoot.ts +++ b/src/client/service/keysKeyPairRoot.ts @@ -1,8 +1,8 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysKeyPairRoot({ diff --git a/src/client/service/keysVerify.ts b/src/client/service/keysVerify.ts index 70b858279..007ab1681 100644 --- a/src/client/service/keysVerify.ts +++ b/src/client/service/keysVerify.ts @@ -1,9 +1,9 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { KeyManager } from '../../keys'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; function keysVerify({ keyManager, diff --git a/src/client/service/nodesAdd.ts b/src/client/service/nodesAdd.ts index 8d65c03ff..4560c0d9a 100644 --- a/src/client/service/nodesAdd.ts +++ b/src/client/service/nodesAdd.ts @@ -2,18 +2,18 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { NodeManager } from '../../nodes'; import type { NodeAddress } from '../../nodes/types'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; import { utils as grpcUtils } from '../../grpc'; import { utils as networkUtils } from '../../network'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; /** * Adds a node ID -> node address mapping into the buckets database. * This is an unrestricted add: no validity checks are made for the correctness * of the passed ID or host/port. */ -function nodesAdd ({ +function nodesAdd({ nodeManager, authenticate, }: { @@ -44,7 +44,7 @@ function nodesAdd ({ { host: call.request.getAddress()!.getHost(), port: call.request.getAddress()!.getPort(), - } as NodeAddress + } as NodeAddress, ); callback(null, response); return; diff --git a/src/client/service/nodesClaim.ts b/src/client/service/nodesClaim.ts index 58c47cbf1..7c13ad584 100644 --- a/src/client/service/nodesClaim.ts +++ b/src/client/service/nodesClaim.ts @@ -3,17 +3,17 @@ import type { Authenticate } from '../types'; import type { NodeManager } from '../../nodes'; import type { NotificationData } from '../../notifications/types'; import type { NotificationsManager } from '../../notifications'; -import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as nodesUtils } from '../../nodes'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; /** * Checks whether there is an existing Gestalt Invitation from the other node. * If not, send an invitation, if so, create a cryptolink claim between the * other node and host node. */ -function nodesClaim ({ +function nodesClaim({ nodeManager, notificationsManager, authenticate, diff --git a/src/client/service/nodesFind.ts b/src/client/service/nodesFind.ts index 2282fc350..070a904f8 100644 --- a/src/client/service/nodesFind.ts +++ b/src/client/service/nodesFind.ts @@ -1,8 +1,7 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { NodeManager } from '../../nodes'; -import type { NodeAddress } from '../../nodes/types'; -import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import { utils as nodesUtils } from '../../nodes'; import { utils as grpcUtils } from '../../grpc'; import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; @@ -11,7 +10,7 @@ import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; * keynodes in the wider Polykey network). * @throws ErrorNodeGraphNodeNotFound if node address cannot be found */ -function nodesFind ({ +function nodesFind({ nodeManager, authenticate, }: { diff --git a/src/client/service/nodesPing.ts b/src/client/service/nodesPing.ts index d0ae7ea02..e4da23b73 100644 --- a/src/client/service/nodesPing.ts +++ b/src/client/service/nodesPing.ts @@ -1,15 +1,15 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { NodeManager } from '../../nodes'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import { utils as nodesUtils } from '../../nodes'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; /** * Checks if a remote node is online. */ -function nodesPing ({ +function nodesPing({ nodeManager, authenticate, }: { diff --git a/src/client/service/notificationsClear.ts b/src/client/service/notificationsClear.ts index dfb289648..adb139294 100644 --- a/src/client/service/notificationsClear.ts +++ b/src/client/service/notificationsClear.ts @@ -4,7 +4,7 @@ import type { NotificationsManager } from '../../notifications'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function notificationsClear ({ +function notificationsClear({ notificationsManager, authenticate, }: { diff --git a/src/client/service/notificationsRead.ts b/src/client/service/notificationsRead.ts index 2673f1e6c..953e1e9a1 100644 --- a/src/client/service/notificationsRead.ts +++ b/src/client/service/notificationsRead.ts @@ -4,7 +4,7 @@ import type { NotificationsManager } from '../../notifications'; import { utils as grpcUtils } from '../../grpc'; import * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; -function notificationsRead ({ +function notificationsRead({ notificationsManager, authenticate, }: { diff --git a/src/client/service/notificationsSend.ts b/src/client/service/notificationsSend.ts index 9b2f35815..9992f7e73 100644 --- a/src/client/service/notificationsSend.ts +++ b/src/client/service/notificationsSend.ts @@ -1,11 +1,11 @@ import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; import type { NotificationsManager } from '../../notifications'; +import type * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; import { utils as grpcUtils } from '../../grpc'; -import { utils as nodesUtils } from '../../nodes'; +import { utils as nodesUtils } from '../../nodes'; import { utils as notificationsUtils } from '../../notifications'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; function notificationsSend({ notificationsManager, diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index 294795fca..2a5e579d7 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -1,14 +1,10 @@ import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsClone({ - authenticate -}: { - authenticate: Authenticate; -}) { +function vaultsClone({ authenticate }: { authenticate: Authenticate }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts index 18a2cdef3..6539c7083 100644 --- a/src/client/service/vaultsCreate.ts +++ b/src/client/service/vaultsCreate.ts @@ -1,21 +1,18 @@ import type { Authenticate } from '../types'; import type { Vault, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; -import type { FileSystem } from '../../types'; -import * as grpc from '@grpc/grpc-js'; +import type * as grpc from '@grpc/grpc-js'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils } from '../../vaults'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsCreate({ vaultManager, authenticate, - fs, }: { vaultManager: VaultManager; authenticate: Authenticate; - fs: FileSystem; }) { return async ( call: grpc.ServerUnaryCall, diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts index 96f783faf..34a6c692f 100644 --- a/src/client/service/vaultsDelete.ts +++ b/src/client/service/vaultsDelete.ts @@ -1,13 +1,12 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; -import type { FileSystem } from '../../types'; -import * as grpc from '@grpc/grpc-js'; +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors, } from '../../vaults'; +import { errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) @@ -18,11 +17,9 @@ function decodeVaultId(input: string): VaultId | undefined { function vaultsDelete({ vaultManager, authenticate, - fs, }: { vaultManager: VaultManager; authenticate: Authenticate; - fs: FileSystem; }) { return async ( call: grpc.ServerUnaryCall, diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts index a5c9d11c4..237aaedf1 100644 --- a/src/client/service/vaultsList.ts +++ b/src/client/service/vaultsList.ts @@ -1,20 +1,17 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; -import type { FileSystem } from '../../types'; -import * as grpc from '@grpc/grpc-js'; +import type * as grpc from '@grpc/grpc-js'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils } from '../../vaults'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsList({ vaultManager, authenticate, - fs, }: { vaultManager: VaultManager; authenticate: Authenticate; - fs: FileSystem; }) { return async ( call: grpc.ServerWritableStream, diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts index 4c38bef69..6678cd52b 100644 --- a/src/client/service/vaultsLog.ts +++ b/src/client/service/vaultsLog.ts @@ -1,7 +1,6 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; -import type { FileSystem } from '../../types'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; @@ -17,11 +16,9 @@ function decodeVaultId(input: string): VaultId | undefined { function vaultsLog({ vaultManager, authenticate, - fs, }: { vaultManager: VaultManager; authenticate: Authenticate; - fs: FileSystem; }) { return async ( call: grpc.ServerWritableStream, diff --git a/src/client/service/vaultsPermissions.ts b/src/client/service/vaultsPermissions.ts index ceafdf1c4..2a43ade68 100644 --- a/src/client/service/vaultsPermissions.ts +++ b/src/client/service/vaultsPermissions.ts @@ -1,13 +1,9 @@ import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsPermissions({ - authenticate, -}: { - authenticate: Authenticate; -}) { +function vaultsPermissions({ authenticate }: { authenticate: Authenticate }) { return async ( call: grpc.ServerWritableStream, ): Promise => { diff --git a/src/client/service/vaultsPermissionsSet.ts b/src/client/service/vaultsPermissionsSet.ts index 976b80d1a..adb31381d 100644 --- a/src/client/service/vaultsPermissionsSet.ts +++ b/src/client/service/vaultsPermissionsSet.ts @@ -1,8 +1,8 @@ import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsPermissionsSet({ authenticate, diff --git a/src/client/service/vaultsPermissionsUnset.ts b/src/client/service/vaultsPermissionsUnset.ts index d1fc23b9e..4840176d3 100644 --- a/src/client/service/vaultsPermissionsUnset.ts +++ b/src/client/service/vaultsPermissionsUnset.ts @@ -1,8 +1,8 @@ import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsPermissionsUnset({ authenticate, diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts index 81b43edf3..1e61964f9 100644 --- a/src/client/service/vaultsPull.ts +++ b/src/client/service/vaultsPull.ts @@ -1,14 +1,10 @@ import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsPull({ - authenticate, -}: { - authenticate: Authenticate; -}) { +function vaultsPull({ authenticate }: { authenticate: Authenticate }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts index 69920a270..3c2bfd078 100644 --- a/src/client/service/vaultsRename.ts +++ b/src/client/service/vaultsRename.ts @@ -1,13 +1,10 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - utils as vaultsUtils, - errors as vaultsErrors, -} from '../../vaults'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function decodeVaultId(input: string): VaultId | undefined { diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts index 226197cd6..98fba456e 100644 --- a/src/client/service/vaultsScan.ts +++ b/src/client/service/vaultsScan.ts @@ -1,7 +1,7 @@ import type { Authenticate } from '../types'; import type { VaultManager } from '../../vaults'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import * as grpc from '@grpc/grpc-js'; +import type * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import { utils as vaultsUtils } from '../../vaults'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts index 75e56083e..4a3a58fd1 100644 --- a/src/client/service/vaultsSecretsDelete.ts +++ b/src/client/service/vaultsSecretsDelete.ts @@ -1,15 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts index 91a5dfbb7..830232677 100644 --- a/src/client/service/vaultsSecretsEdit.ts +++ b/src/client/service/vaultsSecretsEdit.ts @@ -1,15 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts index 95bbf0a7d..f3c6b2094 100644 --- a/src/client/service/vaultsSecretsGet.ts +++ b/src/client/service/vaultsSecretsGet.ts @@ -1,14 +1,11 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts index fd16e0d8a..778947b8d 100644 --- a/src/client/service/vaultsSecretsList.ts +++ b/src/client/service/vaultsSecretsList.ts @@ -1,15 +1,11 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; -import type { FileSystem } from '../../types'; -import * as grpc from '@grpc/grpc-js'; +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { @@ -21,11 +17,9 @@ function decodeVaultId(input: string): VaultId | undefined { function vaultsSecretsList({ vaultManager, authenticate, - fs, }: { vaultManager: VaultManager; authenticate: Authenticate; - fs: FileSystem; }) { return async ( call: grpc.ServerWritableStream, diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts index 2d9aa22a1..594640a66 100644 --- a/src/client/service/vaultsSecretsMkdir.ts +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -1,15 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts index cb8d2c1e3..0352e0241 100644 --- a/src/client/service/vaultsSecretsNew.ts +++ b/src/client/service/vaultsSecretsNew.ts @@ -1,15 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts index fbe45597b..9b3804ddf 100644 --- a/src/client/service/vaultsSecretsNewDir.ts +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -1,16 +1,13 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultId, VaultName } from '../../vaults/types'; +import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; import type { FileSystem } from '../../types'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts index 6be7b6a91..dc3f98f2b 100644 --- a/src/client/service/vaultsSecretsRename.ts +++ b/src/client/service/vaultsSecretsRename.ts @@ -1,15 +1,12 @@ import type { Authenticate } from '../types'; import type { VaultId, VaultName } from '../../vaults/types'; import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; import { utils as idUtils } from '@matrixai/id'; import { utils as grpcUtils } from '../../grpc'; -import { - vaultOps, - errors as vaultsErrors, -} from '../../vaults'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; function decodeVaultId(input: string): VaultId | undefined { return idUtils.fromMultibase(input) diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts index 6fd5d136d..20b7308f3 100644 --- a/src/client/service/vaultsSecretsStat.ts +++ b/src/client/service/vaultsSecretsStat.ts @@ -1,14 +1,9 @@ +import type * as grpc from '@grpc/grpc-js'; import type { Authenticate } from '../types'; -import type { Vault } from '../../vaults/types'; -import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '../../grpc'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function vaultsSecretsStat({ - authenticate, -}: { - authenticate: Authenticate; -}) { +function vaultsSecretsStat({ authenticate }: { authenticate: Authenticate }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/client/types.ts b/src/client/types.ts index 61d5f0326..dc642800f 100644 --- a/src/client/types.ts +++ b/src/client/types.ts @@ -5,6 +5,4 @@ type Authenticate = ( metadataServer?: grpc.Metadata, ) => Promise; -export type { - Authenticate -}; +export type { Authenticate }; diff --git a/src/config.ts b/src/config.ts index 14391d6e9..db799104b 100644 --- a/src/config.ts +++ b/src/config.ts @@ -94,8 +94,8 @@ const config = { }, // This is not used by the `PolykeyAgent` with defaults to `{}` network: { - mainnet: { }, - testnet: { }, + mainnet: {}, + testnet: {}, }, }, }; diff --git a/src/schema/Schema.ts b/src/schema/Schema.ts index dbb2ba217..467139fa3 100644 --- a/src/schema/Schema.ts +++ b/src/schema/Schema.ts @@ -64,7 +64,10 @@ class Schema { }) { this.logger = logger ?? new Logger(this.constructor.name); this.statePath = statePath; - this.stateVersionPath = path.join(statePath, config.defaults.stateVersionBase); + this.stateVersionPath = path.join( + statePath, + config.defaults.stateVersionBase, + ); this.stateVersion = stateVersion; this.lock = lock; this.fs = fs; diff --git a/src/sessions/Session.ts b/src/sessions/Session.ts index 8685158e0..ff8c48c45 100644 --- a/src/sessions/Session.ts +++ b/src/sessions/Session.ts @@ -2,10 +2,7 @@ import type { SessionToken } from './types'; import type { FileSystem } from '../types'; import Logger from '@matrixai/logger'; -import { - CreateDestroyStartStop, - ready, -} from '@matrixai/async-init/dist/CreateDestroyStartStop'; +import { CreateDestroyStartStop } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import lock from 'fd-lock'; import * as sessionErrors from './errors'; import * as utils from '../utils'; diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 779e8b0ab..220c8932c 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -220,7 +220,9 @@ class Sigchain { * Appends a claim (of any type) to the sigchain. */ @ready(new sigchainErrors.ErrorSigchainNotRunning()) - public async addClaim(claimData: ClaimData): Promise<[ClaimId, ClaimEncoded]> { + public async addClaim( + claimData: ClaimData, + ): Promise<[ClaimId, ClaimEncoded]> { return await this._transaction(async () => { const prevSequenceNumber = await this.getSequenceNumber(); const newSequenceNumber = prevSequenceNumber + 1; diff --git a/tests/PolykeyAgent.test.ts b/tests/PolykeyAgent.test.ts index 4693faec7..41de45e74 100644 --- a/tests/PolykeyAgent.test.ts +++ b/tests/PolykeyAgent.test.ts @@ -75,7 +75,7 @@ describe('PolykeyAgent', () => { expect(nodePathContents).toContain(config.defaults.statusBase); expect(nodePathContents).toContain(config.defaults.stateBase); let stateContents = await fs.promises.readdir( - path.join(nodePath, config.defaults.stateBase) + path.join(nodePath, config.defaults.stateBase), ); expect(stateContents).toContain(config.defaults.keysBase); expect(stateContents).toContain(config.defaults.dbBase); @@ -85,7 +85,7 @@ describe('PolykeyAgent', () => { expect(nodePathContents).toContain(config.defaults.statusBase); expect(nodePathContents).toContain(config.defaults.stateBase); stateContents = await fs.promises.readdir( - path.join(nodePath, config.defaults.stateBase) + path.join(nodePath, config.defaults.stateBase), ); expect(stateContents).toContain(config.defaults.keysBase); expect(stateContents).toContain(config.defaults.dbBase); @@ -110,26 +110,23 @@ describe('PolykeyAgent', () => { logger, }); await pkAgent.stop(); - expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); await expect(pkAgent.start({ password })).resolves.not.toThrowError(); - expect(await status.readStatus()).toMatchObject({status: 'LIVE'}); + expect(await status.readStatus()).toMatchObject({ status: 'LIVE' }); await pkAgent.stop(); - expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); - await expect(pkAgent.start({ password: 'wrong password' })).rejects.toThrowError( - errors.ErrorRootKeysParse - ); - expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); + await expect( + pkAgent.start({ password: 'wrong password' }), + ).rejects.toThrowError(errors.ErrorRootKeysParse); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); await pkAgent.destroy(); - expect(await status.readStatus()).toMatchObject({status: 'DEAD'}); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); }); test('schema state version is maintained after start and stop', async () => { const nodePath = path.join(dataDir, 'polykey'); - const statePath = path.join( - nodePath, - config.defaults.stateBase, - ); + const statePath = path.join(nodePath, config.defaults.stateBase); const schema = new Schema({ - statePath + statePath, }); const pkAgent = await PolykeyAgent.createPolykeyAgent({ password, @@ -143,36 +140,37 @@ describe('PolykeyAgent', () => { }); test('cannot start during state version mismatch', async () => { const nodePath = path.join(dataDir, 'polykey'); - const statePath = path.join( - nodePath, - config.defaults.stateBase, - ); + const statePath = path.join(nodePath, config.defaults.stateBase); await fs.promises.mkdir(nodePath); let schema = await Schema.createSchema({ statePath, - stateVersion: config.stateVersion + 1 as StateVersion, + stateVersion: (config.stateVersion + 1) as StateVersion, logger, - fresh: true + fresh: true, }); - schema.stop(); - await expect(PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - })).rejects.toThrow(errors.ErrorSchemaVersionTooNew); + await schema.stop(); + await expect( + PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }), + ).rejects.toThrow(errors.ErrorSchemaVersionTooNew); // The 0 version will always be too old // Because we started our PK's state version as 1 schema = await Schema.createSchema({ statePath, stateVersion: 0 as StateVersion, logger, - fresh: true + fresh: true, }); - schema.stop(); - await expect(PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - })).rejects.toThrow(errors.ErrorSchemaVersionTooOld); + await schema.stop(); + await expect( + PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }), + ).rejects.toThrow(errors.ErrorSchemaVersionTooOld); }); }); diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index 70491de03..c40e11e53 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -37,7 +37,7 @@ describe(ACL.name, () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); vaultId1 = vaultsUtils.makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); vaultId2 = vaultsUtils.makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 23cbfa487..a2a68c27c 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -7,8 +7,8 @@ import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Mutex } from 'async-mutex'; -import { GRPCClientAgent } from '@/agent'; import { DB } from '@matrixai/db'; +import { GRPCClientAgent } from '@/agent'; import { KeyManager } from '@/keys'; import { NodeManager } from '@/nodes'; import { VaultManager } from '@/vaults'; diff --git a/tests/bin/agent/lock.test.ts b/tests/bin/agent/lock.test.ts index d1cd9a1cb..eb43b122b 100644 --- a/tests/bin/agent/lock.test.ts +++ b/tests/bin/agent/lock.test.ts @@ -18,11 +18,8 @@ describe('lock', () => { let globalAgentPassword; let globalAgentClose; beforeAll(async () => { - ({ - globalAgentDir, - globalAgentPassword, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); }, globalThis.maxTimeout); afterAll(async () => { await globalAgentClose(); @@ -57,10 +54,7 @@ describe('lock', () => { ); expect(exitCode).toBe(0); const session = await Session.createSession({ - sessionTokenPath: path.join( - globalAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); diff --git a/tests/bin/agent/lockall.test.ts b/tests/bin/agent/lockall.test.ts index 8c307ba2b..3c93cc1f8 100644 --- a/tests/bin/agent/lockall.test.ts +++ b/tests/bin/agent/lockall.test.ts @@ -24,11 +24,8 @@ describe('lockall', () => { let globalAgentPassword; let globalAgentClose; beforeAll(async () => { - ({ - globalAgentDir, - globalAgentPassword, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); }, globalThis.maxTimeout); afterAll(async () => { await globalAgentClose(); @@ -63,10 +60,7 @@ describe('lockall', () => { ); expect(exitCode).toBe(0); const session = await Session.createSession({ - sessionTokenPath: path.join( - globalAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); @@ -116,10 +110,7 @@ describe('lockall', () => { globalAgentDir, ); const session = await Session.createSession({ - sessionTokenPath: path.join( - globalAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index e37da7a9f..017ce7fd8 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -10,7 +10,6 @@ import { Status, errors as statusErrors } from '@/status'; import config from '@/config'; import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; -import { sleep } from '@/utils'; describe('start', () => { const logger = new Logger('start test', LogLevel.WARN, [new StreamHandler()]); @@ -159,7 +158,15 @@ describe('start', () => { // One of these processes is blocked const [agentProcess1, agentProcess2] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -168,7 +175,15 @@ describe('start', () => { logger.getChild('agentProcess1'), ), testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -233,7 +248,15 @@ describe('start', () => { // One of these processes is blocked const [agentProcess, bootstrapProcess] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -305,7 +328,15 @@ describe('start', () => { async () => { const password = 'abc123'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -325,7 +356,15 @@ describe('start', () => { expect(exitCode1).toBe(null); expect(signal1).toBe('SIGHUP'); const agentProcess2 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -356,7 +395,15 @@ describe('start', () => { async () => { const password = 'password'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -516,7 +563,15 @@ describe('start', () => { recursive: true, }); const agentProcess4 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password2, @@ -597,10 +652,8 @@ describe('start', () => { let seedNodeHost2; let seedNodePort2; beforeAll(async () => { - ({ - globalAgentStatus, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentStatus, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); // Additional seed node agentDataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -609,7 +662,7 @@ describe('start', () => { password: 'password', nodePath: path.join(agentDataDir, 'agent'), keysConfig: { - rootKeyPairBits: 1024 + rootKeyPairBits: 1024, }, logger, }); @@ -639,18 +692,17 @@ describe('start', () => { fs, logger, }); - const mockedConfigDefaultsNetwork = jestMockProps.spyOnProp( - config.defaults, - 'network' - ).mockValue({ - mainnet: { - [seedNodeId2]: { - host: seedNodeHost2, - port: seedNodePort2 - } - }, - testnet: {} - }); + const mockedConfigDefaultsNetwork = jestMockProps + .spyOnProp(config.defaults, 'network') + .mockValue({ + mainnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2, + }, + }, + testnet: {}, + }); await testBinUtils.pkStdio( [ 'agent', @@ -672,10 +724,7 @@ describe('start', () => { dataDir, ); await testBinUtils.pkStdio( - [ - 'agent', - 'stop', - ], + ['agent', 'stop'], { PK_NODE_PATH: nodePath, PK_PASSWORD: password, @@ -687,7 +736,9 @@ describe('start', () => { }, global.defaultTimeout * 2, ); - test('start with seed nodes environment variable', async () => { + test( + 'start with seed nodes environment variable', + async () => { const password = 'abc123'; const nodePath = path.join(dataDir, 'polykey'); const statusPath = path.join(nodePath, 'status.json'); @@ -696,18 +747,17 @@ describe('start', () => { fs, logger, }); - const mockedConfigDefaultsNetwork = jestMockProps.spyOnProp( - config.defaults, - 'network' - ).mockValue({ - mainnet: { }, - testnet: { - [seedNodeId2]: { - host: seedNodeHost2, - port: seedNodePort2 - } - } - }); + const mockedConfigDefaultsNetwork = jestMockProps + .spyOnProp(config.defaults, 'network') + .mockValue({ + mainnet: {}, + testnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2, + }, + }, + }); await testBinUtils.pkStdio( [ 'agent', @@ -722,15 +772,12 @@ describe('start', () => { PK_NODE_PATH: nodePath, PK_PASSWORD: password, PK_SEED_NODES: `;${seedNodeId1}@${seedNodeHost1}:${seedNodePort1}`, - PK_NETWORK: 'testnet' + PK_NETWORK: 'testnet', }, dataDir, ); await testBinUtils.pkStdio( - [ - 'agent', - 'stop', - ], + ['agent', 'stop'], { PK_NODE_PATH: nodePath, PK_PASSWORD: password, @@ -739,6 +786,8 @@ describe('start', () => { ); mockedConfigDefaultsNetwork.mockRestore(); await status.waitFor('DEAD'); - }, global.defaultTimeout * 2); + }, + global.defaultTimeout * 2, + ); }); }); diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index af37810c7..a384da103 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -35,7 +35,15 @@ describe('status', () => { logger, }); const agentProcess = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--workers', '0', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -112,11 +120,8 @@ describe('status', () => { let globalAgentPassword; let globalAgentClose; beforeAll(async () => { - ({ - globalAgentDir, - globalAgentPassword, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); }, globalThis.maxTimeout); afterAll(async () => { await globalAgentClose(); diff --git a/tests/bin/agent/unlock.test.ts b/tests/bin/agent/unlock.test.ts index 31bf58617..530fb0492 100644 --- a/tests/bin/agent/unlock.test.ts +++ b/tests/bin/agent/unlock.test.ts @@ -8,16 +8,15 @@ import * as testBinUtils from '../utils'; import * as testUtils from '../../utils'; describe('unlock', () => { - const logger = new Logger('unlock test', LogLevel.WARN, [new StreamHandler()]); + const logger = new Logger('unlock test', LogLevel.WARN, [ + new StreamHandler(), + ]); let globalAgentDir; let globalAgentPassword; let globalAgentClose; beforeAll(async () => { - ({ - globalAgentDir, - globalAgentPassword, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); }, globalThis.maxTimeout); afterAll(async () => { await globalAgentClose(); @@ -37,10 +36,7 @@ describe('unlock', () => { test('unlock acquires session token', async () => { // Fresh session, to delete the token const session = await Session.createSession({ - sessionTokenPath: path.join( - globalAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, fresh: true, diff --git a/tests/bin/identities/identities.test.ts b/tests/bin/identities/identities.test.ts index 11947b918..fbfcad359 100644 --- a/tests/bin/identities/identities.test.ts +++ b/tests/bin/identities/identities.test.ts @@ -1,8 +1,4 @@ -import type { - IdentityId, - IdentityInfo, - ProviderId, -} from '@/identities/types'; +import type { IdentityId, IdentityInfo, ProviderId } from '@/identities/types'; import type { NodeInfo } from '@/nodes/types'; import type { ClaimLinkIdentity, ClaimLinkNode } from '@/claims/types'; import os from 'os'; @@ -672,7 +668,7 @@ describe('CLI Identities', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeB'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -680,7 +676,7 @@ describe('CLI Identities', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeC'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -719,7 +715,7 @@ describe('CLI Identities', () => { provider: testProvider.id, identity: identityId, }; - const [,claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 2); diff --git a/tests/bin/keys/keys.test.ts b/tests/bin/keys/keys.test.ts index e65cd75aa..89192eb97 100644 --- a/tests/bin/keys/keys.test.ts +++ b/tests/bin/keys/keys.test.ts @@ -73,7 +73,11 @@ describe('CLI keys', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const result2 = await testBinUtils.pkStdio([...command, '-pk'], {}, dataDir); + const result2 = await testBinUtils.pkStdio( + [...command, '-pk'], + {}, + dataDir, + ); expect(result2.exitCode).toBe(0); }); }); diff --git a/tests/bin/nodes/add.test.ts b/tests/bin/nodes/add.test.ts index e715e1481..b238c9479 100644 --- a/tests/bin/nodes/add.test.ts +++ b/tests/bin/nodes/add.test.ts @@ -1,4 +1,3 @@ -import type { Host, Port } from '@/network/types'; import type { NodeId } from '@/nodes/types'; import os from 'os'; import path from 'path'; @@ -7,7 +6,6 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; -import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -17,9 +15,7 @@ jest.mock('@/keys/utils', () => ({ describe('add', () => { const password = 'password'; - const logger = new Logger('add test', LogLevel.WARN, [ - new StreamHandler(), - ]); + const logger = new Logger('add test', LogLevel.WARN, [new StreamHandler()]); let dataDir: string; let nodePath: string; let passwordFile: string; diff --git a/tests/bin/nodes/claim.test.ts b/tests/bin/nodes/claim.test.ts index e2f40d815..67f20bc53 100644 --- a/tests/bin/nodes/claim.test.ts +++ b/tests/bin/nodes/claim.test.ts @@ -1,4 +1,3 @@ -import type { Host, Port } from '@/network/types'; import type { NodeId } from '@/nodes/types'; import os from 'os'; import path from 'path'; @@ -7,7 +6,6 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import * as testBinUtils from '../utils'; import * as testNodesUtils from '../../nodes/utils'; -import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -17,9 +15,7 @@ jest.mock('@/keys/utils', () => ({ describe('claim', () => { const password = 'password'; - const logger = new Logger('claim test', LogLevel.WARN, [ - new StreamHandler(), - ]); + const logger = new Logger('claim test', LogLevel.WARN, [new StreamHandler()]); let rootDataDir: string; let dataDir: string; let nodePath: string; @@ -29,8 +25,6 @@ describe('claim', () => { let keynodeId: NodeId; let remoteOnlineNodeId: NodeId; - let remoteOnlineHost: Host; - let remoteOnlinePort: Port; // Helper functions function genCommands(options: Array) { @@ -58,13 +52,11 @@ describe('claim', () => { password: 'password', nodePath: path.join(rootDataDir, 'remoteOnline'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); - remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); - remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); await remoteOnline.nodeManager.setNode(keynodeId, { @@ -144,7 +136,6 @@ describe('claim', () => { // Received an invitation, so will attempt to perform the claiming process const commands = genCommands(['claim', remoteOnlineNodeId]); const result = await testBinUtils.pkStdio(commands, {}, dataDir); - console.log('result', result.exitCode, result.stderr, result.stdout); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('cryptolink claim'); expect(result.stdout).toContain(remoteOnlineNodeId); diff --git a/tests/bin/nodes/find.test.ts b/tests/bin/nodes/find.test.ts index 0a594caba..7393a2fee 100644 --- a/tests/bin/nodes/find.test.ts +++ b/tests/bin/nodes/find.test.ts @@ -8,7 +8,6 @@ import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; import * as testNodesUtils from '../../nodes/utils'; -import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -18,9 +17,7 @@ jest.mock('@/keys/utils', () => ({ describe('find', () => { const password = 'password'; - const logger = new Logger('find test', LogLevel.WARN, [ - new StreamHandler(), - ]); + const logger = new Logger('find test', LogLevel.WARN, [new StreamHandler()]); let rootDataDir: string; let dataDir: string; let nodePath: string; @@ -29,7 +26,6 @@ describe('find', () => { let remoteOnline: PolykeyAgent; let remoteOffline: PolykeyAgent; - let keynodeId: NodeId; let remoteOnlineNodeId: NodeId; let remoteOfflineNodeId: NodeId; @@ -58,14 +54,13 @@ describe('find', () => { nodePath: nodePath, logger: logger, }); - keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode remoteOnline = await PolykeyAgent.createPolykeyAgent({ password: 'password', nodePath: path.join(rootDataDir, 'remoteOnline'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -79,7 +74,7 @@ describe('find', () => { password: 'password', nodePath: path.join(rootDataDir, 'remoteOffline'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -188,9 +183,7 @@ describe('find', () => { const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(1); expect(result2.stdout).toContain(`message`); - expect(result2.stdout).toContain( - `Failed to find node ${unknownNodeId}`, - ); + expect(result2.stdout).toContain(`Failed to find node ${unknownNodeId}`); expect(result2.stdout).toContain('id'); expect(result2.stdout).toContain(unknownNodeId); expect(result2.stdout).toContain('port'); diff --git a/tests/bin/nodes/ping.test.ts b/tests/bin/nodes/ping.test.ts index 08b00fc91..902d7e915 100644 --- a/tests/bin/nodes/ping.test.ts +++ b/tests/bin/nodes/ping.test.ts @@ -1,4 +1,3 @@ -import type { Host, Port } from '@/network/types'; import type { NodeId } from '@/nodes/types'; import os from 'os'; import path from 'path'; @@ -8,7 +7,6 @@ import PolykeyAgent from '@/PolykeyAgent'; import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; import * as testNodesUtils from '../../nodes/utils'; -import * as testUtils from '../../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -18,9 +16,7 @@ jest.mock('@/keys/utils', () => ({ describe('ping', () => { const password = 'password'; - const logger = new Logger('ping test', LogLevel.WARN, [ - new StreamHandler(), - ]); + const logger = new Logger('ping test', LogLevel.WARN, [new StreamHandler()]); let rootDataDir: string; let dataDir: string; let nodePath: string; @@ -29,15 +25,9 @@ describe('ping', () => { let remoteOnline: PolykeyAgent; let remoteOffline: PolykeyAgent; - let keynodeId: NodeId; let remoteOnlineNodeId: NodeId; let remoteOfflineNodeId: NodeId; - let remoteOnlineHost: Host; - let remoteOnlinePort: Port; - let remoteOfflineHost: Host; - let remoteOfflinePort: Port; - // Helper functions function genCommands(options: Array) { return ['nodes', ...options, '-np', nodePath]; @@ -55,20 +45,17 @@ describe('ping', () => { nodePath: nodePath, logger: logger, }); - keynodeId = polykeyAgent.nodeManager.getNodeId(); // Setting up a remote keynode remoteOnline = await PolykeyAgent.createPolykeyAgent({ password: 'password', nodePath: path.join(rootDataDir, 'remoteOnline'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); - remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); - remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); // Setting up an offline remote keynode @@ -76,13 +63,11 @@ describe('ping', () => { password: 'password', nodePath: path.join(rootDataDir, 'remoteOffline'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); - remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); - remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); await testNodesUtils.nodesConnect(polykeyAgent, remoteOffline); await remoteOffline.stop(); diff --git a/tests/bin/sessions.test.ts b/tests/bin/sessions.test.ts index c870254c0..b688eb2ef 100644 --- a/tests/bin/sessions.test.ts +++ b/tests/bin/sessions.test.ts @@ -27,11 +27,8 @@ describe('sessions', () => { let globalAgentPassword; let globalAgentClose; beforeAll(async () => { - ({ - globalAgentDir, - globalAgentPassword, - globalAgentClose - } = await testUtils.setupGlobalAgent(logger)); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); }, globalThis.maxTimeout); afterAll(async () => { await globalAgentClose(); @@ -50,10 +47,7 @@ describe('sessions', () => { }); test('serial commands refresh the session token', async () => { const session = await Session.createSession({ - sessionTokenPath: path.join( - globalAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); diff --git a/tests/bin/utils.ts b/tests/bin/utils.ts index a5d74f300..47211b018 100644 --- a/tests/bin/utils.ts +++ b/tests/bin/utils.ts @@ -9,13 +9,9 @@ import readline from 'readline'; import * as mockProcess from 'jest-mock-process'; import mockedEnv from 'mocked-env'; import nexpect from 'nexpect'; -import lock from 'fd-lock'; import Logger from '@matrixai/logger'; import main from '@/bin/polykey'; import * as binUtils from '@/bin/utils'; -import { Status, errors as statusErrors } from '@/status'; -import config from '@/config'; -import { never, sleep } from '@/utils'; /** * Runs pk command functionally diff --git a/tests/bootstrap/utils.test.ts b/tests/bootstrap/utils.test.ts index 2ef7253b3..7c4adbe53 100644 --- a/tests/bootstrap/utils.test.ts +++ b/tests/bootstrap/utils.test.ts @@ -30,7 +30,9 @@ describe('bootstrap/utils', () => { }); let dataDir: string; beforeEach(async () => { - dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-')); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); }); afterEach(async () => { await fs.promises.rm(dataDir, { @@ -45,7 +47,7 @@ describe('bootstrap/utils', () => { password, nodePath, fs, - logger + logger, }); expect(typeof recoveryCode).toBe('string'); expect( @@ -57,7 +59,7 @@ describe('bootstrap/utils', () => { expect(nodePathContents).toContain(config.defaults.statusBase); expect(nodePathContents).toContain(config.defaults.stateBase); const stateContents = await fs.promises.readdir( - path.join(nodePath, config.defaults.stateBase) + path.join(nodePath, config.defaults.stateBase), ); expect(stateContents).toContain(config.defaults.keysBase); expect(stateContents).toContain(config.defaults.dbBase); @@ -71,7 +73,7 @@ describe('bootstrap/utils', () => { password, nodePath, fs, - logger + logger, }); expect(typeof recoveryCode).toBe('string'); expect( @@ -83,7 +85,7 @@ describe('bootstrap/utils', () => { expect(nodePathContents).toContain(config.defaults.statusBase); expect(nodePathContents).toContain(config.defaults.stateBase); const stateContents = await fs.promises.readdir( - path.join(nodePath, config.defaults.stateBase) + path.join(nodePath, config.defaults.stateBase), ); expect(stateContents).toContain(config.defaults.keysBase); expect(stateContents).toContain(config.defaults.dbBase); @@ -96,7 +98,7 @@ describe('bootstrap/utils', () => { await fs.promises.writeFile( path.join(nodePath1, 'random'), 'normal file', - 'utf-8' + 'utf-8', ); const password = 'password'; await expect( @@ -104,8 +106,8 @@ describe('bootstrap/utils', () => { password, nodePath: nodePath1, fs, - logger - }) + logger, + }), ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); // Hidden file const nodePath2 = path.join(dataDir, 'polykey2'); @@ -113,15 +115,15 @@ describe('bootstrap/utils', () => { await fs.promises.writeFile( path.join(nodePath2, '.random'), 'hidden file', - 'utf-8' + 'utf-8', ); await expect( bootstrapUtils.bootstrapState({ password, nodePath: nodePath2, fs, - logger - }) + logger, + }), ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); // Directory const nodePath3 = path.join(dataDir, 'polykey3'); @@ -132,8 +134,8 @@ describe('bootstrap/utils', () => { password, nodePath: nodePath3, fs, - logger - }) + logger, + }), ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); }); test('concurrent bootstrapping results in 1 success', async () => { @@ -144,26 +146,24 @@ describe('bootstrap/utils', () => { password, nodePath, fs, - logger + logger, }), bootstrapUtils.bootstrapState({ password, nodePath, fs, - logger + logger, }), ]); expect( (result1.status === 'rejected' && - result1.reason instanceof statusErrors.ErrorStatusLocked) - || - (result2.status === 'rejected' && - result2.reason instanceof statusErrors.ErrorStatusLocked) + result1.reason instanceof statusErrors.ErrorStatusLocked) || + (result2.status === 'rejected' && + result2.reason instanceof statusErrors.ErrorStatusLocked), ).toBe(true); expect( - (result1.status === 'fulfilled' && typeof result1.value === 'string') - || - (result2.status === 'fulfilled' && typeof result2.value === 'string') + (result1.status === 'fulfilled' && typeof result1.value === 'string') || + (result2.status === 'fulfilled' && typeof result2.value === 'string'), ).toBe(true); }); }); diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index 28dea0a44..1e19e425d 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -19,9 +19,9 @@ import * as nodesErrors from '@/nodes/errors'; import { makeNodeId } from '@/nodes/utils'; import config from '@/config'; import { Status } from '@/status'; +import { sleep } from '@/utils'; import * as testUtils from './utils'; import * as testNodesUtils from '../nodes/utils'; -import { sleep } from '@/utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -106,7 +106,7 @@ describe('Client service', () => { password: 'password', nodePath: path.join(rootDataDir, 'polykeyServer'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -186,7 +186,10 @@ describe('Client service', () => { const serverNodeId = polykeyServer.nodeManager.getNodeId(); await testNodesUtils.nodesConnect(pkAgent, polykeyServer); await polykeyServer.stop(); - const statusPath = path.join(polykeyServer.nodePath, config.defaults.statusBase); + const statusPath = path.join( + polykeyServer.nodePath, + config.defaults.statusBase, + ); const status = new Status({ statusPath, fs, diff --git a/tests/client/rpcNotifications.test.ts b/tests/client/rpcNotifications.test.ts index 039f84fe3..9d85038c9 100644 --- a/tests/client/rpcNotifications.test.ts +++ b/tests/client/rpcNotifications.test.ts @@ -116,7 +116,7 @@ describe('Notifications client service', () => { password: 'password', nodePath: receiverDataDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -124,7 +124,7 @@ describe('Notifications client service', () => { password: 'password', nodePath: senderDataDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -306,8 +306,7 @@ describe('Notifications client service', () => { await notificationsClear(emptyMessage, callCredentials); // Call read notifications to check there are none - const notifs = - await pkAgent.notificationsManager.readNotifications(); + const notifs = await pkAgent.notificationsManager.readNotifications(); expect(notifs).toEqual([]); }); }); diff --git a/tests/client/service/agentStop.test.ts b/tests/client/service/agentStop.test.ts index ef0931e21..a050b6dcd 100644 --- a/tests/client/service/agentStop.test.ts +++ b/tests/client/service/agentStop.test.ts @@ -13,7 +13,7 @@ import { GRPCClientClient, ClientServiceService, utils as clientUtils, - errors as clientErrors + errors as clientErrors, } from '@/client'; import agentStop from '@/client/service/agentStop'; import config from '@/config'; @@ -46,7 +46,9 @@ describe('agentStop', () => { let grpcServer: GRPCServer; let grpcClient: GRPCClientClient; beforeEach(async () => { - dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-')); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); nodePath = path.join(dataDir, 'polykey'); // Note that by doing this, the agent the call is stopping is a separate agent pkAgent = await PolykeyAgent.createPolykeyAgent({ @@ -56,13 +58,13 @@ describe('agentStop', () => { }); const authenticate = clientUtils.authenticator( pkAgent.sessionManager, - pkAgent.keyManager + pkAgent.keyManager, ); const clientService = { agentStop: agentStop({ authenticate, pkAgent: pkAgent as unknown as PolykeyAgent, - }) + }), }; grpcServer = new GRPCServer({ logger }); await grpcServer.start({ @@ -74,7 +76,7 @@ describe('agentStop', () => { nodeId: pkAgent.keyManager.getNodeId(), host: '127.0.0.1' as Host, port: grpcServer.port, - logger + logger, }); }); afterEach(async () => { @@ -96,7 +98,7 @@ describe('agentStop', () => { const request = new utilsPB.EmptyMessage(); const response = await grpcClient.agentStop( request, - clientUtils.encodeAuthFromPassword(password) + clientUtils.encodeAuthFromPassword(password), ); expect(response).toBeInstanceOf(utilsPB.EmptyMessage); // While the `agentStop` is asynchronous @@ -104,7 +106,7 @@ describe('agentStop', () => { expect(pkAgent[running]).toBe(false); // It may already be stopping expect(await status.readStatus()).toMatchObject({ - status: expect.stringMatching(/LIVE|STOPPING|DEAD/) + status: expect.stringMatching(/LIVE|STOPPING|DEAD/), }); await status.waitFor('DEAD'); expect(pkAgent[running]).toBe(false); @@ -120,7 +122,7 @@ describe('agentStop', () => { const request = new utilsPB.EmptyMessage(); const response = await grpcClient.agentStop( request, - clientUtils.encodeAuthFromSession(token) + clientUtils.encodeAuthFromSession(token), ); expect(response).toBeInstanceOf(utilsPB.EmptyMessage); // While the `agentStop` is asynchronous @@ -128,7 +130,7 @@ describe('agentStop', () => { expect(pkAgent[running]).toBe(false); // It may already be stopping expect(await status.readStatus()).toMatchObject({ - status: expect.stringMatching(/LIVE|STOPPING|DEAD/) + status: expect.stringMatching(/LIVE|STOPPING|DEAD/), }); await status.waitFor('DEAD'); expect(pkAgent[running]).toBe(false); @@ -142,27 +144,25 @@ describe('agentStop', () => { }); const request = new utilsPB.EmptyMessage(); await expect(async () => { - await grpcClient.agentStop( - request, - ); + await grpcClient.agentStop(request); }).rejects.toThrow(clientErrors.ErrorClientAuthMissing); expect(pkAgent[running]).toBe(true); await expect(async () => { await grpcClient.agentStop( request, - clientUtils.encodeAuthFromPassword('wrong password') + clientUtils.encodeAuthFromPassword('wrong password'), ); }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); expect(pkAgent[running]).toBe(true); await expect(async () => { await grpcClient.agentStop( request, - clientUtils.encodeAuthFromSession('wrong token' as SessionToken) + clientUtils.encodeAuthFromSession('wrong token' as SessionToken), ); }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); expect(pkAgent[running]).toBe(true); expect(await status.readStatus()).toMatchObject({ - status: 'LIVE' + status: 'LIVE', }); }); -}) +}); diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index 7064e56e8..e60d77d3c 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -97,7 +97,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeA'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -105,7 +105,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeB'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -113,7 +113,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeC'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -154,7 +154,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityId, }; - const [,claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = await claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 3); @@ -240,7 +240,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeA'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -248,7 +248,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeB'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -256,7 +256,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeC'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -264,7 +264,7 @@ describe('Discovery', () => { password: 'password', nodePath: path.join(rootDataDir, 'nodeD'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -324,7 +324,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdB, }; - const [,claimBEncoded] = await nodeD.sigchain.addClaim(claimIdentToD); + const [, claimBEncoded] = await nodeD.sigchain.addClaim(claimIdentToD); const claimB = claimsUtils.decodeClaim(claimBEncoded); await testProvider.publishClaim(identityIdB, claimB); @@ -359,7 +359,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdA, }; - const [,claimAEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimAEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claimA = claimsUtils.decodeClaim(claimAEncoded); await testProvider.publishClaim(identityIdA, claimA); diff --git a/tests/gestalts/GestaltGraph.test.ts b/tests/gestalts/GestaltGraph.test.ts index f14e6f6aa..b818c9842 100644 --- a/tests/gestalts/GestaltGraph.test.ts +++ b/tests/gestalts/GestaltGraph.test.ts @@ -54,7 +54,7 @@ describe('GestaltGraph', () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); acl = await ACL.createACL({ db, logger }); diff --git a/tests/global.d.ts b/tests/global.d.ts index 174edcc78..8fe267fd3 100644 --- a/tests/global.d.ts +++ b/tests/global.d.ts @@ -1,3 +1,4 @@ +/* eslint-disable no-var */ /** * Follows the globals in jest.config.ts * @module diff --git a/tests/identities/IdentitiesManager.test.ts b/tests/identities/IdentitiesManager.test.ts index 61c6ec73a..497f6592c 100644 --- a/tests/identities/IdentitiesManager.test.ts +++ b/tests/identities/IdentitiesManager.test.ts @@ -38,7 +38,7 @@ describe('IdentitiesManager', () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); }); afterEach(async () => { diff --git a/tests/network/ForwardProxy.test.ts b/tests/network/ForwardProxy.test.ts index 7f2d9d641..a8dd2fb5c 100644 --- a/tests/network/ForwardProxy.test.ts +++ b/tests/network/ForwardProxy.test.ts @@ -68,7 +68,7 @@ describe(ForwardProxy.name, () => { globalKeyPair.publicKey, globalKeyPair.privateKey, globalKeyPair.privateKey, - 86400 + 86400, ); certPem = keysUtils.certToPem(cert); }); @@ -181,7 +181,7 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyPort(), authToken, `127.0.0.1:0?nodeId=${encodeURIComponent('abc')}`, - ) + ), ).rejects.toThrow('502'); await fwdProxy.stop(); }); @@ -322,46 +322,44 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - // All TLS servers must have a certificate and associated key - // This is TLS socket is therefore dead on arrival by not providing - // any certificate nor key - const tlsSocket = new tls.TLSSocket(utpConn, { - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - // TLS socket will be closed as soon as error is emitted - // Therefore this will never be called - // However the socket is ended anyway automatically - tlsSocket.on('end', () => { - tlsSocketEnd(); - if (utpConn.destroyed) { - tlsSocket.destroy(); - } else { - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -392,8 +390,11 @@ describe(ForwardProxy.name, () => { expect(utpConnError.mock.calls.length).toBe(0); // The TLS socket throw an error because there's no suitable signature algorithm expect(tlsSocketError.mock.calls.length).toBe(1); - // expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); - expect(tlsSocketError.mock.calls[0][0]).toHaveProperty('code', 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM'); + // Expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty( + 'code', + 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM', + ); // The TLS socket end event never was emitted expect(tlsSocketEnd.mock.calls.length).toBe(0); // The TLS socket close event is emitted with error @@ -425,46 +426,44 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - // All TLS servers must have a certificate and associated key - // This is TLS socket is therefore dead on arrival by not providing - // any certificate nor key - const tlsSocket = new tls.TLSSocket(utpConn, { - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - // TLS socket will be closed as soon as error is emitted - // Therefore this will never be called - // However the socket is ended anyway automatically - tlsSocket.on('end', () => { - tlsSocketEnd(); - if (utpConn.destroyed) { - tlsSocket.destroy(); - } else { - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -489,15 +488,20 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent('somerandomnodeid')}`, - ) + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + 'somerandomnodeid', + )}`, + ), ).rejects.toThrow('502'); await expect(remoteClosedP).resolves.toBeUndefined(); expect(utpConnError.mock.calls.length).toBe(0); // The TLS socket throw an error because there's no suitable signature algorithm expect(tlsSocketError.mock.calls.length).toBe(1); - // expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); - expect(tlsSocketError.mock.calls[0][0]).toHaveProperty('code', 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM'); + // Expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty( + 'code', + 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM', + ); // The TLS socket end event never was emitted expect(tlsSocketEnd.mock.calls.length).toBe(0); // The TLS socket close event is emitted with error @@ -539,48 +543,46 @@ describe(ForwardProxy.name, () => { const tlsSocketClose = jest.fn(); // This UTP server will hold the connection let secured = false; - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - secured = true; - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -655,48 +657,46 @@ describe(ForwardProxy.name, () => { const tlsSocketClose = jest.fn(); // This UTP server will hold the connection let secured = false; - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - secured = true; - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -720,8 +720,10 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent('somerandomnodeid')}`, - ) + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + 'somerandomnodeid', + )}`, + ), ).rejects.toThrow('526'); await expect(remoteReadyP).resolves.toBeUndefined(); expect(secured).toBe(true); @@ -754,7 +756,9 @@ describe(ForwardProxy.name, () => { const serverNodeId = networkUtils.certNodeId(serverCert); const fwdProxy = new ForwardProxy({ authToken, - logger: logger.getChild('ForwardProxy open connection success - forward initiates end'), + logger: logger.getChild( + 'ForwardProxy open connection success - forward initiates end', + ), }); await fwdProxy.start({ tlsConfig: { @@ -774,49 +778,47 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - logger.debug('Reverse: responded tlsSocket ending'); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -879,7 +881,9 @@ describe(ForwardProxy.name, () => { const fwdProxy = new ForwardProxy({ authToken, connEndTime: 5000, - logger: logger.getChild('ForwardProxy open connection success - reverse initiates end'), + logger: logger.getChild( + 'ForwardProxy open connection success - reverse initiates end', + ), }); await fwdProxy.start({ tlsConfig: { @@ -901,50 +905,48 @@ describe(ForwardProxy.name, () => { // Will use this to simulate reverse initiating end let tlsSocket_: tls.TLSSocket; // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket_ = tlsSocket; - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - utpConn.end(); - tlsSocket.destroy(); - logger.debug('Reverse: responded tlsSocket ending'); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + utpConn.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -979,7 +981,7 @@ describe(ForwardProxy.name, () => { expect(fwdProxy.getConnectionCount()).toBe(1); // Start the graceful ending of the tls socket logger.debug('Reverse: begins tlsSocket ending'); - const { p: endP, resolveP: resolveEndP, } = promise(); + const { p: endP, resolveP: resolveEndP } = promise(); tlsSocket_!.removeAllListeners('end'); tlsSocket_!.once('end', resolveEndP); tlsSocket_!.end(); @@ -989,16 +991,18 @@ describe(ForwardProxy.name, () => { logger.debug('Reverse: finishes tlsSocket ending'); await expect(remoteClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return fwdProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); expect(utpConnError.mock.calls.length).toBe(0); expect(tlsSocketError.mock.calls.length).toBe(0); // This time the reverse side initiates the end @@ -1024,7 +1028,9 @@ describe(ForwardProxy.name, () => { const serverNodeId = networkUtils.certNodeId(serverCert); const fwdProxy = new ForwardProxy({ authToken, - logger: logger.getChild('ForwardProxy HTTP CONNECT success - forward initiates end'), + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - forward initiates end', + ), }); await fwdProxy.start({ tlsConfig: { @@ -1044,49 +1050,47 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - logger.debug('Reverse: responded tlsSocket ending'); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1109,7 +1113,9 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); @@ -1168,7 +1174,9 @@ describe(ForwardProxy.name, () => { const serverNodeId = networkUtils.certNodeId(serverCert); const fwdProxy = new ForwardProxy({ authToken, - logger: logger.getChild('ForwardProxy HTTP CONNECT success - reverse initiates end'), + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - reverse initiates end', + ), }); await fwdProxy.start({ tlsConfig: { @@ -1190,50 +1198,48 @@ describe(ForwardProxy.name, () => { // Will use this to simulate reverse initiating end let tlsSocket_: tls.TLSSocket; // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket_ = tlsSocket; - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - logger.debug('Reverse: responded tlsSocket ending'); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1256,7 +1262,9 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); @@ -1286,7 +1294,7 @@ describe(ForwardProxy.name, () => { expect(fwdProxy.getConnectionCount()).toBe(1); // Start the graceful ending of the tls socket logger.debug('Reverse: begins tlsSocket ending'); - const { p: endP, resolveP: resolveEndP, } = promise(); + const { p: endP, resolveP: resolveEndP } = promise(); tlsSocket_!.removeAllListeners('end'); tlsSocket_!.once('end', resolveEndP); tlsSocket_!.end(); @@ -1298,16 +1306,18 @@ describe(ForwardProxy.name, () => { expect(clientSocketEnd.mock.calls.length).toBe(1); await expect(remoteClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return fwdProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); expect(utpConnError.mock.calls.length).toBe(0); expect(tlsSocketError.mock.calls.length).toBe(0); // This time the reverse side initiates the end @@ -1333,7 +1343,9 @@ describe(ForwardProxy.name, () => { const serverNodeId = networkUtils.certNodeId(serverCert); const fwdProxy = new ForwardProxy({ authToken, - logger: logger.getChild('ForwardProxy HTTP CONNECT success - client initiates end'), + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - client initiates end', + ), }); await fwdProxy.start({ tlsConfig: { @@ -1353,49 +1365,47 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - logger.debug('Reverse: responded tlsSocket ending'); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1418,7 +1428,9 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); @@ -1436,7 +1448,7 @@ describe(ForwardProxy.name, () => { utpSocketPort as Port, ); expect(fwdProxy.getConnectionCount()).toBe(1); - const { p: endP, resolveP: resolveEndP, } = promise(); + const { p: endP, resolveP: resolveEndP } = promise(); // By default net sockets have `allowHalfOpen: false` // Here we override the behaviour by removing the end listener // And replacing it with our own, and remember to also force destroy @@ -1452,16 +1464,18 @@ describe(ForwardProxy.name, () => { await expect(localClosedP).resolves.toBeUndefined(); await expect(remoteClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return fwdProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); expect(utpConnError.mock.calls.length).toBe(0); expect(tlsSocketError.mock.calls.length).toBe(0); expect(tlsSocketEnd.mock.calls.length).toBe(1); @@ -1505,48 +1519,46 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1637,48 +1649,46 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1754,48 +1764,46 @@ describe(ForwardProxy.name, () => { const tlsSocketEnd = jest.fn(); const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - utpConn.on('error', (e) => { - utpConnError(e); - }); - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('error', (e) => { - tlsSocketError(e); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - tlsSocketEnd(); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); - tlsSocket.on('close', (hadError) => { - tlsSocketClose(hadError); - resolveRemoteClosedP(); - }); + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1819,7 +1827,9 @@ describe(ForwardProxy.name, () => { fwdProxy.getProxyHost(), fwdProxy.getProxyPort(), authToken, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent(serverNodeId)}`, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); @@ -1836,16 +1846,18 @@ describe(ForwardProxy.name, () => { await expect(localClosedP).resolves.toBeUndefined(); await expect(remoteClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return fwdProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); expect(utpConnError.mock.calls.length).toBe(0); expect(tlsSocketError.mock.calls.length).toBe(0); expect(tlsSocketEnd.mock.calls.length).toBe(1); @@ -1884,40 +1896,38 @@ describe(ForwardProxy.name, () => { promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - tlsSocket.on('end', () => { - logger.debug('Reverse: receives tlsSocket ending'); - if (utpConn.destroyed) { - logger.debug('Reverse: destroys tlsSocket'); - tlsSocket.destroy(); - } else { - logger.debug('Reverse: responds tlsSocket ending'); - tlsSocket.end(); - tlsSocket.destroy(); - } - }); + const utpSocket = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP(); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -1996,34 +2006,32 @@ describe(ForwardProxy.name, () => { promise(); const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = promise(); - const utpSocket1 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem1, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP1(); - }); - tlsSocket.on('end', () => { - if (utpConn.destroyed) { - tlsSocket.destroy(); - } else { - tlsSocket.end(); - tlsSocket.destroy(); - } - }); + const utpSocket1 = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem1, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP1(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send1(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send1(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send1(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP1; - clearInterval(punchInterval); - } - ); + }, 1000); + await remoteReadyP1; + clearInterval(punchInterval); + }); const handleMessage1 = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -2041,34 +2049,32 @@ describe(ForwardProxy.name, () => { await utpSocketListen1(0, '127.0.0.1'); const utpSocketHost1 = utpSocket1.address().address; const utpSocketPort1 = utpSocket1.address().port; - const utpSocket2 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem2, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP2(); - }); - tlsSocket.on('end', () => { - if (utpConn.destroyed) { - tlsSocket.destroy(); - } else { - tlsSocket.end(); - tlsSocket.destroy(); - } - }); + const utpSocket2 = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem2, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP2(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + await send2(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send2(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send2(networkUtils.pingBuffer); - }, 2000); - await remoteReadyP2; - clearInterval(punchInterval); - } - ); + }, 2000); + await remoteReadyP2; + clearInterval(punchInterval); + }); const handleMessage2 = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { diff --git a/tests/network/ReverseProxy.test.ts b/tests/network/ReverseProxy.test.ts index d3172ac29..8f0f3550c 100644 --- a/tests/network/ReverseProxy.test.ts +++ b/tests/network/ReverseProxy.test.ts @@ -11,7 +11,7 @@ import { errors as networkErrors, } from '@/network'; import * as keysUtils from '@/keys/utils'; -import { promisify, promise, timerStart, timerStop, poll, sleep } from '@/utils'; +import { promisify, promise, timerStart, timerStop, poll } from '@/utils'; import * as testUtils from '../utils'; /** @@ -20,30 +20,34 @@ import * as testUtils from '../utils'; */ function tcpServer(end: boolean = false) { const { p: serverConnP, resolveP: resolveServerConnP } = promise(); - const { p: serverConnEndP, resolveP: resolveServerConnEndP } = promise(); + const { p: serverConnEndP, resolveP: resolveServerConnEndP } = + promise(); const { p: serverConnClosedP, resolveP: resolveServerConnClosedP } = promise(); - const server = net.createServer({ - allowHalfOpen: false - }, (conn) => { - resolveServerConnP(); - conn.on('end', () => { - resolveServerConnEndP(); - conn.end(); - conn.destroy(); - }); - conn.once('close', () => { - resolveServerConnClosedP(); - }); - if (end) { - conn.removeAllListeners('end'); + const server = net.createServer( + { + allowHalfOpen: false, + }, + (conn) => { + resolveServerConnP(); conn.on('end', () => { resolveServerConnEndP(); + conn.end(); conn.destroy(); }); - conn.end(); - } - }); + conn.once('close', () => { + resolveServerConnClosedP(); + }); + if (end) { + conn.removeAllListeners('end'); + conn.on('end', () => { + resolveServerConnEndP(); + conn.destroy(); + }); + conn.end(); + } + }, + ); const serverClose = promisify(server.close).bind(server); const serverListen = promisify(server.listen).bind(server); const serverHost = () => { @@ -67,7 +71,7 @@ describe(ReverseProxy.name, () => { const logger = new Logger(`${ReverseProxy.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); - let keyPairPem: KeyPairPem + let keyPairPem: KeyPairPem; let certPem: string; beforeAll(async () => { const globalKeyPair = await testUtils.setupGlobalKeypair(); @@ -76,7 +80,7 @@ describe(ReverseProxy.name, () => { globalKeyPair.publicKey, globalKeyPair.privateKey, globalKeyPair.privateKey, - 86400 + 86400, ); certPem = keysUtils.certToPem(cert); }); @@ -457,16 +461,18 @@ describe(ReverseProxy.name, () => { await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return revProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -561,16 +567,18 @@ describe(ReverseProxy.name, () => { await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); // Connection count should reach 0 eventually - await expect(poll( - async () => { - return revProxy.getConnectionCount(); - }, - (_, result) => { - if (result === 0) return true; - return false; - }, - 100 - )).resolves.toBe(0); + await expect( + poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); diff --git a/tests/network/index.test.ts b/tests/network/index.test.ts index b4e57005f..913e045a0 100644 --- a/tests/network/index.test.ts +++ b/tests/network/index.test.ts @@ -4,8 +4,8 @@ import grpc from '@grpc/grpc-js'; import { utils as keysUtils } from '@/keys'; import { ForwardProxy, ReverseProxy, utils as networkUtils } from '@/network'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import { openTestServer, closeTestServer, GRPCClientTest } from '../grpc/utils'; import { sleep } from '@/utils'; +import { openTestServer, closeTestServer, GRPCClientTest } from '../grpc/utils'; describe('network index', () => { const logger = new Logger('Network Test', LogLevel.WARN, [ diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 02cca39c8..35aac2fb9 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -139,7 +139,7 @@ describe('NodeConnection', () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); serverACL = await ACL.createACL({ db: serverDb, diff --git a/tests/nodes/NodeGraph.test.ts b/tests/nodes/NodeGraph.test.ts index 9d97eecf9..c9f9ac4ad 100644 --- a/tests/nodes/NodeGraph.test.ts +++ b/tests/nodes/NodeGraph.test.ts @@ -135,7 +135,7 @@ describe('NodeGraph', () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); sigchain = await Sigchain.createSigchain({ keyManager: keyManager, diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index fb1a3673e..c91123f60 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -6,8 +6,8 @@ import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { PolykeyAgent } from '@'; import { DB } from '@matrixai/db'; +import { PolykeyAgent } from '@'; import { KeyManager, utils as keysUtils } from '@/keys'; import { NodeManager, errors as nodesErrors } from '@/nodes'; import { ForwardProxy, ReverseProxy } from '@/network'; @@ -158,7 +158,7 @@ describe('NodeManager', () => { password: 'password', nodePath: targetDataDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -272,7 +272,7 @@ describe('NodeManager', () => { password: 'password', nodePath: path.join(dataDir, 'server'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger: logger, }); @@ -341,12 +341,11 @@ describe('NodeManager', () => { password: 'password', nodePath: path.join(dataDir, 'server'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger: logger, }); - await nodeManager.setNode(server.nodeManager.getNodeId(), { host: server.revProxy.getIngressHost(), port: server.revProxy.getIngressPort(), @@ -368,7 +367,7 @@ describe('NodeManager', () => { password: 'password', nodePath: path.join(dataDir, 'server'), keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -433,7 +432,7 @@ describe('NodeManager', () => { password: 'password', nodePath: xDataDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); @@ -446,13 +445,13 @@ describe('NodeManager', () => { xPublicKey = x.keyManager.getRootKeyPairPem().publicKey; yDataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-') + path.join(os.tmpdir(), 'polykey-test-'), ); y = await PolykeyAgent.createPolykeyAgent({ password: 'password', nodePath: xDataDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, logger, }); diff --git a/tests/nodes/utils.ts b/tests/nodes/utils.ts index ff79be3d0..960e844d3 100644 --- a/tests/nodes/utils.ts +++ b/tests/nodes/utils.ts @@ -1,6 +1,6 @@ import type { NodeId, NodeAddress } from '@/nodes/types'; -import { PolykeyAgent } from '@'; +import type { PolykeyAgent } from '@'; import * as nodesUtils from '@/nodes/utils'; import { makeNodeId } from '@/nodes/utils'; import { fromMultibase } from '@/GenericIdTypes'; @@ -83,18 +83,12 @@ function bigIntToBuffer(number: BigInt) { return u8; } -async function nodesConnect( - localNode: PolykeyAgent, - remoteNode: PolykeyAgent, -) { +async function nodesConnect(localNode: PolykeyAgent, remoteNode: PolykeyAgent) { // Add remote node's details to local node - await localNode.nodeManager.setNode( - remoteNode.nodeManager.getNodeId(), - { - host: remoteNode.revProxy.getIngressHost(), - port: remoteNode.revProxy.getIngressPort(), - } as NodeAddress - ); + await localNode.nodeManager.setNode(remoteNode.nodeManager.getNodeId(), { + host: remoteNode.revProxy.getIngressHost(), + port: remoteNode.revProxy.getIngressPort(), + } as NodeAddress); } export { generateNodeIdForBucket, incrementNodeId, nodesConnect }; diff --git a/tests/sigchain/Sigchain.test.ts b/tests/sigchain/Sigchain.test.ts index 06f63f21b..ab0f65330 100644 --- a/tests/sigchain/Sigchain.test.ts +++ b/tests/sigchain/Sigchain.test.ts @@ -56,7 +56,7 @@ describe('Sigchain', () => { encrypt: keysUtils.encryptWithKey, decrypt: keysUtils.decryptWithKey, }, - } + }, }); }); afterEach(async () => { diff --git a/tests/utils.ts b/tests/utils.ts index 6d4ca7dca..f6a04a0dd 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -31,7 +31,10 @@ async function setupGlobalKeypair() { // Return key pair if the directory exists if (e.code === 'EEXIST') { const globalKeyPairPem = { - publicKey: fs.readFileSync(path.join(globalKeyPairDir, 'root.pub'), 'utf-8'), + publicKey: fs.readFileSync( + path.join(globalKeyPairDir, 'root.pub'), + 'utf-8', + ), privateKey: fs.readFileSync( path.join(globalKeyPairDir, 'root.key'), 'utf-8', @@ -56,7 +59,7 @@ async function setupGlobalKeypair() { ), ]); return globalKeyPair; - } finally { + } finally { // Unlock when we have returned the keypair lock.unlock(globalKeyPairLock.fd); await globalKeyPairLock.close(); @@ -79,7 +82,7 @@ async function setupGlobalKeypair() { async function setupGlobalAgent( logger: Logger = new Logger(setupGlobalAgent.name, LogLevel.WARN, [ new StreamHandler(), - ]) + ]), ) { const globalAgentPassword = 'password'; const globalAgentDir = path.join(globalThis.dataDir, 'agent'); @@ -98,21 +101,18 @@ async function setupGlobalAgent( await sleep(1000); } const status = new Status({ - statusPath: path.join( - globalAgentDir, - config.defaults.statusBase - ), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), fs, }); let statusInfo = await status.readStatus(); - if (statusInfo == null || statusInfo.status === 'DEAD') { + if (statusInfo == null || statusInfo.status === 'DEAD') { await PolykeyAgent.createPolykeyAgent({ password: globalAgentPassword, nodePath: globalAgentDir, keysConfig: { - rootKeyPairBits: 2048 + rootKeyPairBits: 2048, }, - seedNodes: {}, // explicitly no seed nodes on startup + seedNodes: {}, // Explicitly no seed nodes on startup logger, }); statusInfo = await status.readStatus(); @@ -133,9 +133,7 @@ async function setupGlobalAgent( // 7. Because not all tests need the global agent // 8. Therefore setupGlobalAgent is lazy and executed by jest worker processes try { - await fs.promises.rm( - path.join(globalAgentDir, 'references', pid), - ); + await fs.promises.rm(path.join(globalAgentDir, 'references', pid)); // If the references directory is not empty // there are other processes still using the global agent try { @@ -148,13 +146,13 @@ async function setupGlobalAgent( } // Stopping may occur in a different jest worker process // therefore we cannot rely on pkAgent, but instead use GRPC - const statusInfo = await status.readStatus() as StatusLive; + const statusInfo = (await status.readStatus()) as StatusLive; const grpcClient = await GRPCClientClient.createGRPCClientClient({ nodeId: statusInfo.data.nodeId, host: statusInfo.data.clientHost, port: statusInfo.data.clientPort, tlsConfig: { keyPrivatePem: undefined, certChainPem: undefined }, - logger + logger, }); const emptyMessage = new utilsPB.EmptyMessage(); const meta = clientUtils.encodeAuthFromPassword(globalAgentPassword); @@ -166,11 +164,8 @@ async function setupGlobalAgent( lock.unlock(globalAgentLock.fd); await globalAgentLock.close(); } - } + }, }; } -export { - setupGlobalKeypair, - setupGlobalAgent, -}; +export { setupGlobalKeypair, setupGlobalAgent }; diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index c2ed4ecf9..5db37e4c4 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -780,7 +780,10 @@ describe('VaultManager', () => { await revProxy.closeConnection(altHost, altPort); await revProxy.closeConnection(sourceHost, sourcePort); await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection(fwdProxy.getEgressHost(), fwdProxy.getEgressPort()); + await fwdProxy.closeConnection( + fwdProxy.getEgressHost(), + fwdProxy.getEgressPort(), + ); await altFwdProxy.closeConnection( altFwdProxy.getEgressHost(), altFwdProxy.getEgressPort(), From e33af840d272aa3a5bad30430c3828906e7a2cf7 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Thu, 6 Jan 2022 12:40:52 +1100 Subject: [PATCH 20/28] Integrate child dynamic pipelines for parallelising tests in CI/CD --- .gitlab-ci.yml | 206 +++++--------------------------------- scripts/test-pipelines.sh | 35 +++++++ 2 files changed, 58 insertions(+), 183 deletions(-) create mode 100755 scripts/test-pipelines.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index fb3674a00..3e1659fe8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,11 +1,9 @@ -default: - interruptible: true - variables: GIT_SUBMODULE_STRATEGY: recursive stages: - check + - test - build - quality - release @@ -21,191 +19,37 @@ stages: npm run lint; ' -.test 1/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/client; - ' - -.test 2/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/agent; - ' - -.test 3/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/bin; - ' - -.test 4/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/bootstrap; - ' - -.test 5/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/network; - ' - -.test 6/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/vaults; - ' - -.test 7/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/workers; - ' - -.test 8/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/grpc; - ' - -.test 9/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/http; - ' - -.test 10/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/GenericIdTypes.test.ts tests/index.test.ts tests/PolykeyAgent.test.ts tests/utils.test.ts; - ' - -.test 11/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/claims; - ' - -.test 12/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/gestalts; - ' - -.test 13/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/nodes; - ' - -.test 14/16: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner +.nix-dry: stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/git; - ' - -.test 15/16: image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: check - interruptible: true script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/keys; - ' + - nix-build -v -v --dry-run ./release.nix --attr application + - nix-build -v -v --dry-run ./release.nix --attr docker + - nix-build -v -v --dry-run ./release.nix --attr package.linux.x64.elf + - nix-build -v -v --dry-run ./release.nix --attr package.windows.x64.exe + - nix-build -v -v --dry-run ./release.nix --attr package.macos.x64.macho -.test 16/16: +test-generate: image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true script: + - mkdir -p ./tmp - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test -- tests/acl tests/discovery tests/lockfile tests/notifications tests/session tests/sigchain tests/identities; - ' + nix-shell -I nixpkgs=./pkgs.nix --packages bash --run ' + ./scripts/test-pipelines.sh > tmp/test-pipelines.yml + ' + artifacts: + paths: + - tmp/test-pipelines.yml -.nix-dry: - stage: check - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - script: - - nix-build -v -v --dry-run ./release.nix --attr application - - nix-build -v -v --dry-run ./release.nix --attr docker - - nix-build -v -v --dry-run ./release.nix --attr package.linux.x64.elf - - nix-build -v -v --dry-run ./release.nix --attr package.windows.x64.exe - - nix-build -v -v --dry-run ./release.nix --attr package.macos.x64.macho +test: + stage: test + trigger: + include: + - artifact: tmp/test-pipelines.yml + job: test-generate + strategy: depend nix: stage: build @@ -252,7 +96,6 @@ application run: nix-store --import | \ tail -1 \ )" - - echo password > testPassword - $build_application/bin/polykey only: - master @@ -282,7 +125,6 @@ linux run: dependencies: - nix script: - - echo password > testPassword - for f in ./builds/*-linux-*; do "$f"; done only: - qa-testing @@ -293,7 +135,6 @@ windows run: dependencies: - nix script: - - echo password > testPassword - Get-ChildItem -File ./builds/*-win32-* | ForEach {& $_.FullName} tags: - windows @@ -307,7 +148,6 @@ macos run: dependencies: - nix script: - - echo password > testPassword - for f in ./builds/*-macos-*; do "$f"; done only: - qa-testing @@ -315,7 +155,7 @@ macos run: tags: - shared-macos-amd64 -.packages: +packages: stage: release image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner dependencies: diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh new file mode 100755 index 000000000..eb18e416c --- /dev/null +++ b/scripts/test-pipelines.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# All directories are accumulated + +for test in tests/*/; do +test="${test%\/}" +cat << EOF +test ${test##*/}: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm test -- ./$test; + ' +EOF +done + +# All tests in the tests index are accumulated + +tests=(tests/*.test.ts) +cat << EOF +test index: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm install; + npm test -- ./${tests[@]}; + ' +EOF From f1102897951180223ad80cb922c461e2a230a53a Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Fri, 7 Jan 2022 12:21:39 +1100 Subject: [PATCH 21/28] Integrating ts-node cache, npm cache, and using `npm ci` instead of `npm install` --- .gitlab-ci.yml | 21 +++++++++++++++++---- scripts/test-pipelines.sh | 27 ++++++++++++++++++++++----- 2 files changed, 39 insertions(+), 9 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3e1659fe8..d8e460277 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,5 +1,18 @@ variables: - GIT_SUBMODULE_STRATEGY: recursive + GIT_SUBMODULE_STRATEGY: "recursive" + # Cache .npm + NPM_CONFIG_CACHE: "./tmp/npm" + # Prefer offline node module installation + NPM_CONFIG_PREFER_OFFLINE: "true" + # `ts-node` has its own cache + TS_CACHED_TRANSPILE_CACHE: "./tmp/ts-node-cache" + +# Cached directories shared between jobs & pipelines per-branch +cache: + key: $CI_COMMIT_REF_SLUG + paths: + - ./tmp/npm/ + - ./tmp/ts-node-cache/ stages: - check @@ -15,7 +28,7 @@ stages: script: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; + npm ci; npm run lint; ' @@ -37,11 +50,11 @@ test-generate: - mkdir -p ./tmp - > nix-shell -I nixpkgs=./pkgs.nix --packages bash --run ' - ./scripts/test-pipelines.sh > tmp/test-pipelines.yml + ./scripts/test-pipelines.sh > ./tmp/test-pipelines.yml ' artifacts: paths: - - tmp/test-pipelines.yml + - ./tmp/test-pipelines.yml test: stage: test diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh index eb18e416c..1016e4597 100755 --- a/scripts/test-pipelines.sh +++ b/scripts/test-pipelines.sh @@ -1,7 +1,25 @@ #!/usr/bin/env bash -# All directories are accumulated +# Quote the heredoc to prevent shell expansion +cat << "EOF" +variables: + GIT_SUBMODULE_STRATEGY: "recursive" + # Cache .npm + NPM_CONFIG_CACHE: "./tmp/npm" + # Prefer offline node module installation + NPM_CONFIG_PREFER_OFFLINE: "true" + # `ts-node` has its own cache + TS_CACHED_TRANSPILE_CACHE: "./tmp/ts-node-cache" +# Cached directories shared between jobs & pipelines per-branch +cache: + key: $CI_COMMIT_REF_SLUG + paths: + - ./tmp/npm/ + - ./tmp/ts-node-cache/ +EOF + +# Each top-level test directory has its own job for test in tests/*/; do test="${test%\/}" cat << EOF @@ -12,14 +30,13 @@ test ${test##*/}: script: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; + npm ci; npm test -- ./$test; ' EOF done -# All tests in the tests index are accumulated - +# All top-level test files are accumulated into 1 job tests=(tests/*.test.ts) cat << EOF test index: @@ -29,7 +46,7 @@ test index: script: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; + npm ci; npm test -- ./${tests[@]}; ' EOF From 6e48dbcee493621aa757f68ea49854c76900a6c5 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Fri, 7 Jan 2022 12:42:22 +1100 Subject: [PATCH 22/28] Integrating fd-lock to Status for serialising readStatus and writeStatus as well as file watching instead of polling for `waitFor` --- scripts/test-pipelines.sh | 52 ++-- src/PolykeyAgent.ts | 2 + src/bin/utils/processors.ts | 4 + src/bootstrap/utils.ts | 8 +- src/config.ts | 1 + src/sessions/Session.ts | 2 +- src/status/Status.ts | 146 ++++++--- src/status/errors.ts | 6 + tests/PolykeyAgent.test.ts | 2 + tests/bin/agent/start.test.ts | 46 ++- tests/bin/agent/status.test.ts | 13 + tests/bin/agent/stop.test.ts | 20 ++ tests/client/GRPCClientClient.test.ts | 1 + tests/client/rpcNodes.test.ts | 5 + tests/client/service/agentStop.test.ts | 6 + tests/status/Status.test.ts | 390 ++++++++++++++----------- tests/utils.ts | 1 + 17 files changed, 455 insertions(+), 250 deletions(-) diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh index 1016e4597..31a3be192 100755 --- a/scripts/test-pipelines.sh +++ b/scripts/test-pipelines.sh @@ -19,11 +19,9 @@ cache: - ./tmp/ts-node-cache/ EOF -# Each top-level test directory has its own job -for test in tests/*/; do -test="${test%\/}" +# SPECIAL CASE cat << EOF -test ${test##*/}: +test binagent: image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: test interruptible: true @@ -31,22 +29,38 @@ test ${test##*/}: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' npm ci; - npm test -- ./$test; + npm test -- ./tests/bin/agent; ' EOF -done + +# # Each top-level test directory has its own job +# for test in tests/*/; do +# test="${test%\/}" +# cat << EOF +# test ${test##*/}: +# image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner +# stage: test +# interruptible: true +# script: +# - > +# nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' +# npm ci; +# npm test -- ./$test; +# ' +# EOF +# done # All top-level test files are accumulated into 1 job -tests=(tests/*.test.ts) -cat << EOF -test index: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: test - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm ci; - npm test -- ./${tests[@]}; - ' -EOF +# tests=(tests/*.test.ts) +# cat << EOF +# test index: +# image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner +# stage: test +# interruptible: true +# script: +# - > +# nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' +# npm ci; +# npm test -- ./${tests[@]}; +# ' +# EOF diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 80311a0a5..00df9198d 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -146,6 +146,7 @@ class PolykeyAgent { }; await utils.mkdirExists(fs, nodePath); const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const statePath = path.join(nodePath, config.defaults.stateBase); const dbPath = path.join(statePath, config.defaults.dbBase); const keysPath = path.join(statePath, config.defaults.keysBase); @@ -155,6 +156,7 @@ class PolykeyAgent { status ?? new Status({ statusPath, + statusLockPath, fs: fs, logger: logger.getChild(Status.name), }); diff --git a/src/bin/utils/processors.ts b/src/bin/utils/processors.ts index 9d3353539..9bd561be7 100644 --- a/src/bin/utils/processors.ts +++ b/src/bin/utils/processors.ts @@ -203,8 +203,10 @@ async function processClientOptions( }> { if (nodeId == null || clientHost == null || clientPort == null) { const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger: logger.getChild(Status.name), }); @@ -270,8 +272,10 @@ async function processClientStatus( }; } const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger: logger.getChild(Status.name), }); diff --git a/src/bootstrap/utils.ts b/src/bootstrap/utils.ts index 5d09a11ee..3a7308349 100644 --- a/src/bootstrap/utils.ts +++ b/src/bootstrap/utils.ts @@ -58,20 +58,22 @@ async function bootstrapState({ await mkdirExists(fs, nodePath); // Setup node path and sub paths const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const statePath = path.join(nodePath, config.defaults.stateBase); const dbPath = path.join(statePath, config.defaults.dbBase); const keysPath = path.join(statePath, config.defaults.keysBase); const vaultsPath = path.join(statePath, config.defaults.vaultsBase); const status = new Status({ + statusPath, + statusLockPath, fs, logger, - statusPath, }); try { await status.start({ pid: process.pid }); if (!fresh) { - // Check the if number of directory entries is greater than 1 due to status.json - if ((await fs.promises.readdir(nodePath)).length > 1) { + // Check the if number of directory entries is greater than 1 due to status.json and status.lock + if ((await fs.promises.readdir(nodePath)).length > 2) { throw new bootstrapErrors.ErrorBootstrapExistingState(); } } diff --git a/src/config.ts b/src/config.ts index db799104b..ffcdf5418 100644 --- a/src/config.ts +++ b/src/config.ts @@ -56,6 +56,7 @@ const config = { defaults: { nodePath: getDefaultNodePath(), statusBase: 'status.json', + statusLockBase: 'status.lock', stateBase: 'state', stateVersionBase: 'version', dbBase: 'db', diff --git a/src/sessions/Session.ts b/src/sessions/Session.ts index ff8c48c45..47b5f8dbf 100644 --- a/src/sessions/Session.ts +++ b/src/sessions/Session.ts @@ -103,7 +103,7 @@ class Session { } const sessionTokenData = await sessionTokenFile.readFile('utf-8'); const sessionToken = sessionTokenData.trim(); - // WriteToken may create an empty session token file before it completes + // `writeToken` may create an empty session token file before it completes if (sessionToken === '') { return; } diff --git a/src/status/Status.ts b/src/status/Status.ts index 7b46a6a48..2b1dda4e4 100644 --- a/src/status/Status.ts +++ b/src/status/Status.ts @@ -11,56 +11,60 @@ import lock from 'fd-lock'; import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import * as statusErrors from './errors'; import * as statusUtils from './utils'; -import { poll } from '../utils'; +import { sleep, poll } from '../utils'; interface Status extends StartStop {} @StartStop() class Status { public readonly statusPath: string; + public readonly statusLockPath: string; protected logger: Logger; protected fs: FileSystem; - protected statusFile: FileHandle; + protected statusLockFile: FileHandle; public constructor({ statusPath, + statusLockPath, fs = require('fs'), logger, }: { statusPath: string; + statusLockPath: string; fs?: FileSystem; logger?: Logger; }) { this.logger = logger ?? new Logger(this.constructor.name); this.statusPath = statusPath; + this.statusLockPath = statusLockPath; this.fs = fs; } public async start(data: StatusStarting['data']): Promise { this.logger.info(`Starting ${this.constructor.name}`); - const statusFile = await this.fs.promises.open( - this.statusPath, + const statusLockFile = await this.fs.promises.open( + this.statusLockPath, this.fs.constants.O_WRONLY | this.fs.constants.O_CREAT, ); - if (!lock(statusFile.fd)) { - await statusFile.close(); + if (!lock(statusLockFile.fd)) { + await statusLockFile.close(); throw new statusErrors.ErrorStatusLocked(); } - this.statusFile = statusFile; + this.statusLockFile = statusLockFile; try { await this.writeStatus({ status: 'STARTING', data, }); } catch (e) { - lock.unlock(this.statusFile.fd); - await this.statusFile.close(); + lock.unlock(this.statusLockFile.fd); + await this.statusLockFile.close(); throw e; } this.logger.info(`${this.constructor.name} is STARTING`); } - @ready(new statusErrors.ErrorStatusNotRunning()) + @ready(new statusErrors.ErrorStatusNotRunning(), true) public async finishStart(data: StatusLive['data']): Promise { this.logger.info(`Finish ${this.constructor.name} STARTING`); await this.writeStatus({ @@ -70,7 +74,7 @@ class Status { this.logger.info(`${this.constructor.name} is LIVE`); } - @ready(new statusErrors.ErrorStatusNotRunning()) + @ready(new statusErrors.ErrorStatusNotRunning(), true) public async beginStop(data: StatusStopping['data']): Promise { this.logger.info(`Begin ${this.constructor.name} STOPPING`); await this.writeStatus({ @@ -86,8 +90,9 @@ class Status { status: 'DEAD', data, }); - lock.unlock(this.statusFile.fd); - await this.statusFile.close(); + lock.unlock(this.statusLockFile.fd); + await this.statusLockFile.close(); + await this.fs.promises.rm(this.statusLockPath, { force: true }); this.logger.info(`${this.constructor.name} is DEAD`); } @@ -96,50 +101,97 @@ class Status { * This can be used without running Status */ public async readStatus(): Promise { - let statusData: string; + let statusFile; try { - statusData = await this.fs.promises.readFile(this.statusPath, 'utf-8'); - } catch (e) { - if (e.code === 'ENOENT') { + try { + statusFile = await this.fs.promises.open(this.statusPath, 'r'); + } catch (e) { + if (e.code === 'ENOENT') { + return; + } + throw new statusErrors.ErrorStatusRead(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + while (!lock(statusFile.fd)) { + await sleep(2); + } + let statusData; + try { + statusData = (await statusFile.readFile('utf-8')).trim(); + } catch (e) { + throw new statusErrors.ErrorStatusRead(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + // `writeStatus` may create an empty status file before it completes + if (statusData === '') { return; } - throw new statusErrors.ErrorStatusRead(e.message, { - errno: e.errno, - syscall: e.syscall, - code: e.code, - path: e.path, - }); - } - let statusInfo; - try { - statusInfo = JSON.parse(statusData); - } catch (e) { - throw new statusErrors.ErrorStatusParse('JSON parsing failed'); - } - if (!statusUtils.statusValidate(statusInfo)) { - throw new statusErrors.ErrorStatusParse('StatusInfo validation failed', { - errors: statusUtils.statusValidate.errors, - }); + let statusInfo; + try { + statusInfo = JSON.parse(statusData); + } catch (e) { + throw new statusErrors.ErrorStatusParse('JSON parsing failed'); + } + if (!statusUtils.statusValidate(statusInfo)) { + throw new statusErrors.ErrorStatusParse( + 'StatusInfo validation failed', + { + errors: statusUtils.statusValidate.errors, + }, + ); + } + return statusInfo as StatusInfo; + } finally { + if (statusFile != null) { + lock.unlock(statusFile.fd); + await statusFile.close(); + } } - return statusInfo as StatusInfo; } protected async writeStatus(statusInfo: StatusInfo): Promise { this.logger.info(`Writing Status file to ${this.statusPath}`); + let statusFile; try { - await this.statusFile.truncate(); - await this.statusFile.write( - JSON.stringify(statusInfo, undefined, 2) + '\n', - 0, - 'utf-8', + // Cannot use 'w', it truncates immediately + // should truncate only while holding the lock + statusFile = await this.fs.promises.open( + this.statusPath, + this.fs.constants.O_WRONLY | this.fs.constants.O_CREAT, ); - } catch (e) { - throw new statusErrors.ErrorStatusWrite(e.message, { - errno: e.errno, - syscall: e.syscall, - code: e.code, - path: e.path, - }); + while (!lock(statusFile.fd)) { + // Write sleep should be half of read sleep + // this ensures write-preferring locking + await sleep(1); + } + try { + await statusFile.truncate(); + await statusFile.write( + JSON.stringify(statusInfo, undefined, 2) + '\n', + 0, + 'utf-8', + ); + } catch (e) { + throw new statusErrors.ErrorStatusWrite(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + } finally { + if (statusFile != null) { + lock.unlock(statusFile.fd); + await statusFile.close(); + } } } diff --git a/src/status/errors.ts b/src/status/errors.ts index 216958b42..7de8a51d3 100644 --- a/src/status/errors.ts +++ b/src/status/errors.ts @@ -24,6 +24,11 @@ class ErrorStatusParse extends ErrorStatus { exitCode = sysexits.CONFIG; } +class ErrorStatusTimeout extends ErrorStatus { + description = 'Poll timed out'; + exitCode = sysexits.TEMPFAIL; +} + export { ErrorStatus, ErrorStatusNotRunning, @@ -31,4 +36,5 @@ export { ErrorStatusRead, ErrorStatusWrite, ErrorStatusParse, + ErrorStatusTimeout, }; diff --git a/tests/PolykeyAgent.test.ts b/tests/PolykeyAgent.test.ts index 41de45e74..9423050ab 100644 --- a/tests/PolykeyAgent.test.ts +++ b/tests/PolykeyAgent.test.ts @@ -99,6 +99,7 @@ describe('PolykeyAgent', () => { test('start after stop', async () => { const nodePath = `${dataDir}/polykey`; const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, @@ -106,6 +107,7 @@ describe('PolykeyAgent', () => { }); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index 017ce7fd8..e9870b204 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -66,6 +66,11 @@ describe('start', () => { // Check for graceful exit const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -138,6 +143,11 @@ describe('start', () => { expect(polykeyAgentOut).toHaveLength(0); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -265,7 +275,7 @@ describe('start', () => { logger.getChild('agentProcess'), ), testBinUtils.pkSpawn( - ['bootstrap', '--root-key-pair-bits', '1024', '--verbose'], + ['bootstrap', '--fresh', '--root-key-pair-bits', '1024', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -374,6 +384,11 @@ describe('start', () => { ); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -466,6 +481,11 @@ describe('start', () => { // Check for graceful exit const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -481,6 +501,11 @@ describe('start', () => { const password2 = 'new password'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -594,6 +619,11 @@ describe('start', () => { async () => { const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -686,9 +716,14 @@ describe('start', () => { async () => { const password = 'abc123'; const nodePath = path.join(dataDir, 'polykey'); - const statusPath = path.join(nodePath, 'status.json'); + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join( + nodePath, + config.defaults.statusLockBase, + ); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); @@ -741,9 +776,14 @@ describe('start', () => { async () => { const password = 'abc123'; const nodePath = path.join(dataDir, 'polykey'); - const statusPath = path.join(nodePath, 'status.json'); + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join( + nodePath, + config.defaults.statusLockBase, + ); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index a384da103..247b05805 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -31,6 +31,11 @@ describe('status', () => { const password = 'abc123'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -129,6 +134,10 @@ describe('status', () => { test('status on LIVE agent', async () => { const status = new Status({ statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join( + globalAgentDir, + config.defaults.statusLockBase, + ), fs, logger, }); @@ -166,6 +175,10 @@ describe('status', () => { await fs.promises.writeFile(passwordPath, globalAgentPassword); const status = new Status({ statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join( + globalAgentDir, + config.defaults.statusLockBase, + ), fs, logger, }); diff --git a/tests/bin/agent/stop.test.ts b/tests/bin/agent/stop.test.ts index 4ae96b641..2710fbc08 100644 --- a/tests/bin/agent/stop.test.ts +++ b/tests/bin/agent/stop.test.ts @@ -46,6 +46,11 @@ describe('stop', () => { expect(exitCode).toBe(0); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -69,6 +74,11 @@ describe('stop', () => { await fs.promises.writeFile(passwordPath, password); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -147,6 +157,11 @@ describe('stop', () => { const password = 'abc123'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -216,6 +231,11 @@ describe('stop', () => { ); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); diff --git a/tests/client/GRPCClientClient.test.ts b/tests/client/GRPCClientClient.test.ts index b3a43ec7a..1bd989c18 100644 --- a/tests/client/GRPCClientClient.test.ts +++ b/tests/client/GRPCClientClient.test.ts @@ -104,6 +104,7 @@ describe(GRPCClientClient.name, () => { ); const status = new Status({ statusPath: path.join(nodePath, config.defaults.statusBase), + statusLockPath: path.join(nodePath, config.defaults.statusLockBase), fs, logger, }); diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index 1e19e425d..833892a13 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -190,8 +190,13 @@ describe('Client service', () => { polykeyServer.nodePath, config.defaults.statusBase, ); + const statusLockPath = path.join( + polykeyServer.nodePath, + config.defaults.statusLockBase, + ); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); diff --git a/tests/client/service/agentStop.test.ts b/tests/client/service/agentStop.test.ts index a050b6dcd..c93b11e37 100644 --- a/tests/client/service/agentStop.test.ts +++ b/tests/client/service/agentStop.test.ts @@ -90,8 +90,10 @@ describe('agentStop', () => { }); test('stops the agent with password', async () => { const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); @@ -114,8 +116,10 @@ describe('agentStop', () => { test('stops the agent with token', async () => { const token = await pkAgent.sessionManager.createToken(); const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); @@ -137,8 +141,10 @@ describe('agentStop', () => { }); test('cannot stop the agent if not authenticated', async () => { const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); diff --git a/tests/status/Status.test.ts b/tests/status/Status.test.ts index 7b9e75c82..02c04ae15 100644 --- a/tests/status/Status.test.ts +++ b/tests/status/Status.test.ts @@ -5,56 +5,56 @@ import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import config from '@/config'; -import { sleep, errors as utilsErrors } from '@/utils'; import { Status, errors as statusErrors } from '@/status'; describe('Status', () => { const logger = new Logger(`${Status.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); - const waitForTimeout = 1000; let dataDir: string; - let status: Status; - let statusPath: string; beforeEach(async () => { dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'status-test-')); - statusPath = path.join(dataDir, config.defaults.statusBase); - status = new Status({ - statusPath, - fs: fs, - logger: logger, - }); }); - afterEach(async () => { - await status.stop({}); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - - test('type correct', () => { - expect(status).toBeInstanceOf(Status); - }); - - test('starting and stopping with correct side effects', async () => { + test('status readiness', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // Should be a noop await status.start({ pid: 0 }); - await status.readStatus(); expect(fs.existsSync(status.statusPath)).toBe(true); - - await status.stop({ lol: 2 }); - await sleep(1000); + expect(fs.existsSync(status.statusLockPath)).toBe(true); + await status.stop({ foo: 'bar' }); expect(fs.existsSync(status.statusPath)).toBe(true); - const state = await status.readStatus(); - expect(state?.status).toEqual('DEAD'); + expect(fs.existsSync(status.statusLockPath)).toBe(false); + let statusInfo = await status.readStatus(); + expect(statusInfo?.status).toEqual('DEAD'); + await status.start({ pid: 0 }); + statusInfo = await status.readStatus(); + expect(statusInfo?.status).toEqual('STARTING'); + await status.stop({}); }); - - test('updating data and parsing it correctly', async () => { + test('status transitions', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); await status.start({ pid: 0 }); - const lock1 = await status.readStatus(); - expect(lock1?.data.pid).toBeDefined(); - + const statusInfo1 = await status.readStatus(); + expect(statusInfo1).toBeDefined(); + expect(statusInfo1!.status).toBe('STARTING'); + expect(statusInfo1!.data.pid).toBe(0); await status.finishStart({ pid: 0, nodeId: 'node' as NodeId, @@ -66,181 +66,217 @@ describe('Status', () => { grpcPort: 12345, anything: 'something', }); - - const lock2 = await status.readStatus(); - if (lock2) { - expect(lock2.data.pid).toBeDefined(); - expect(lock2.data.grpcHost).toBe('localhost'); - expect(lock2.data.grpcPort).toBe(12345); - expect(lock2.data.anything).toBe('something'); - } else { - throw new Error('Lock should exist'); - } - + const statusInfo2 = await status.readStatus(); + expect(statusInfo2).toBeDefined(); + expect(statusInfo2!.status).toBe('LIVE'); + expect(statusInfo2!.data.pid).toBeDefined(); + expect(statusInfo2!.data.grpcHost).toBe('localhost'); + expect(statusInfo2!.data.grpcPort).toBe(12345); + expect(statusInfo2!.data.anything).toBe('something'); + await status.beginStop({ + pid: 1, + }); + const statusInfo3 = await status.readStatus(); + expect(statusInfo3).toBeDefined(); + expect(statusInfo3!.status).toBe('STOPPING'); + expect(statusInfo3!.data.pid).toBe(1); await status.stop({}); + const statusInfo4 = await status.readStatus(); + expect(statusInfo4).toBeDefined(); + expect(statusInfo4!.status).toBe('DEAD'); }); - - test('Working fine when a status already exists', async () => { + test('start with existing statusPath or statusLockPath', async () => { await fs.promises.writeFile( - status.statusPath, - JSON.stringify({ pid: 66666 }), + path.join(dataDir, config.defaults.statusBase), + 'hello world', ); - await status.start({ pid: 0 }); - let lock; - lock = await status.readStatus(); - if (lock) { - expect(lock.data.pid).toBeDefined(); - } else { - throw new Error('Lock should exist'); - } - - await status.finishStart({ - pid: 0, - nodeId: 'node' as NodeId, - clientHost: '::1' as Host, - clientPort: 0 as Port, - ingressHost: '127.0.0.1' as Host, - ingressPort: 0 as Port, - grpcHost: 'localhost', - grpcPort: 12345, - anything: 'something', + await fs.promises.writeFile( + path.join(dataDir, config.defaults.statusLockBase), + 'hello world', + ); + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, }); - - lock = await status.readStatus(); - if (lock) { - expect(lock.data.pid).toBeDefined(); - expect(lock.data.grpcHost).toBe('localhost'); - expect(lock.data.grpcPort).toBe(12345); - expect(lock.data.anything).toBe('something'); - } else { - throw new Error('Lock should exist'); - } - + await status.start({ pid: 0 }); + const statusInfo = await status.readStatus(); + expect(statusInfo).toBeDefined(); + expect(statusInfo!.status).toBe('STARTING'); + expect(statusInfo!.data.pid).toBe(0); await status.stop({}); }); - test('A running status holds a lock', async () => { - // Make sure that the status is running - await status.start({ pid: 0 }); - - // Try to start a new status. - // Creation should succeed. + test('readStatus on non-existent status', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + expect(await status.readStatus()).toBeUndefined(); + }); + test('singleton running status', async () => { + const status1 = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); const status2 = new Status({ statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), fs: fs, logger: logger, }); - - // Should be able to read the lock info. - const info = await status2.readStatus(); - expect(info).toBeDefined(); - expect(info?.data.pid).toBeDefined(); - - // Should fail to start a new lock. - await expect(() => status2.start({ pid: 0 })).rejects.toThrow( - statusErrors.ErrorStatusLocked, - ); + await status1.start({ pid: 1 }); + await expect(async () => { + await status2.start({ pid: 2 }); + }).rejects.toThrow(statusErrors.ErrorStatusLocked); + // Status 2 can still read the status + const statusInfo = await status2.readStatus(); + expect(statusInfo).toBeDefined(); + expect(statusInfo!.data.pid).toBe(1); + await status1.stop({}); }); - test('Lockfile has multiple states.', async () => { - // Should be starting now. + test('wait for transitions', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + let statusWaitFor = status.waitFor('STARTING'); await status.start({ pid: 0 }); - expect((await status.readStatus())?.status).toEqual('STARTING'); - - // Should be running. + const statusInfoStarting = await statusWaitFor; + expect(statusInfoStarting!.status).toBe('STARTING'); + statusWaitFor = status.waitFor('LIVE'); await status.finishStart({ clientHost: '' as Host, clientPort: 0 as Port, - nodeId: '' as NodeId, ingressHost: '127.0.0.1' as Host, ingressPort: 0 as Port, + nodeId: '' as NodeId, pid: 0, }); - expect((await status.readStatus())?.status).toEqual('LIVE'); - - // Should be stopping. + const statusInfoLive = await statusWaitFor; + expect(statusInfoLive!.status).toBe('LIVE'); + statusWaitFor = status.waitFor('STOPPING'); await status.beginStop({ pid: 0 }); - expect((await status.readStatus())?.status).toEqual('STOPPING'); - - // Should be removed now. + const statusInfoStopping = await statusWaitFor; + expect(statusInfoStopping!.status).toBe('STOPPING'); + statusWaitFor = status.waitFor('DEAD'); await status.stop({}); - expect((await status.readStatus())?.status).toEqual('DEAD'); - }); - test('Status can wait for its status to be LIVE if started.', async () => { - // We want to mimic the startup procedure. - const delayedStart = async () => { - await status.start({ pid: 0 }); - await sleep(500); - await status.finishStart({ - clientHost: '' as Host, - clientPort: 0 as Port, - ingressHost: '127.0.0.1' as Host, - ingressPort: 0 as Port, - nodeId: '' as NodeId, - pid: 0, - }); - }; - const prom = delayedStart(); - - const test = await status.waitFor('LIVE', waitForTimeout); - expect(test.status).toEqual('LIVE'); - await prom; - - // Checking that we throw an error when we can't wait for RUNNING. - const delayedStop = async () => { - await status.beginStop({ pid: 0 }); - await sleep(500); - await status.stop({}); - }; - const prom2 = delayedStop(); - const test2 = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test2; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - await prom2; - - // Should throw if no file was found / unlocked. - const test3 = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test3; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); + const statusInfoDead = await statusWaitFor; + expect(statusInfoDead!.status).toBe('DEAD'); }); - test('Status can wait for its status to be DEAD if Stopping.', async () => { - // Should succeed if not started. - const test4 = await status.waitFor('DEAD', waitForTimeout); - expect(test4.status).toEqual('DEAD'); - - // Should throw an error when starting. - await status.start({ pid: 0 }); - const test = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - - // Should throw an error whens started. - await status.start({ pid: 0 }); - const test2 = status.waitFor('DEAD', waitForTimeout); - await expect(async () => { - await test2; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - - // Should wait and succeed when stopping. - const delayedStart = async () => { - await status.beginStop({ pid: 0 }); - await sleep(500); - await status.stop({}); - }; - const prom2 = delayedStart(); - const test3 = await status.waitFor('DEAD', waitForTimeout); - expect(test3.status).toEqual('DEAD'); - await prom2; - }); - test('should throw an error when failing to parse.', async () => { - // Creating the status file. + test('parse error when statusPath is corrupted', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); await status.start({ pid: 0 }); - // Corrupting the status file. - await fs.promises.writeFile(statusPath, '{'); - // Should throw. + await fs.promises.writeFile(status.statusPath, '{'); await expect(() => status.readStatus()).rejects.toThrow( statusErrors.ErrorStatusParse, ); + await status.stop({}); + }); + test('status transitions are serialised', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // The key point here is that there are no parsing errors + // And that the status info is always defined + for (let i = 0; i < 100; i++) { + const [, statusInfo1, , , , statusInfo2] = await Promise.all([ + status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 3425 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + status.readStatus(), + status.beginStop({ + pid: 4, + }), + status.finishStart({ + clientHost: '' as Host, + clientPort: 3445 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + status.beginStop({ + pid: 2, + }), + status.readStatus(), + status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + ]); + expect(statusInfo1).toBeDefined(); + expect(statusInfo2).toBeDefined(); + expect(['LIVE', 'STARTING', 'STOPPING']).toContainEqual( + statusInfo1!.status, + ); + expect(['LIVE', 'STARTING', 'STOPPING']).toContainEqual( + statusInfo2!.status, + ); + } + await status.stop({ pid: 0 }); + }); + test('wait for has at-least-once semantics', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // `waitFor` relies on filesystem watching + // It does not guarantee exactly-once semantics for status events + // In this case, it is possible that upon reacting to `LIVE` status + // When it reads the status, it has already changed to `STOPPING` + // Which means the `statusWaitFor` never resolves + const statusWaitFor = status.waitFor('LIVE', 1000); + const p1 = status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }); + const p2 = status.beginStop({ pid: 1 }); + try { + const statusInfo = await statusWaitFor; + expect(statusInfo!.status).toBe('LIVE'); + logger.info('Succeeds waiting for LIVE'); + } catch (e) { + expect(e).toBeInstanceOf(statusErrors.ErrorStatusTimeout); + logger.info('Times out waiting for LIVE'); + } + await Promise.all([p1, p2]); + // The last promise to be resolved might be p1 and not p2 + const statusInfo = await status.readStatus(); + expect( + statusInfo!.status === 'LIVE' || statusInfo!.status === 'STOPPING', + ).toBe(true); + await status.stop({}); }); }); diff --git a/tests/utils.ts b/tests/utils.ts index f6a04a0dd..4eed7f203 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -102,6 +102,7 @@ async function setupGlobalAgent( } const status = new Status({ statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join(globalAgentDir, config.defaults.statusLockBase), fs, }); let statusInfo = await status.readStatus(); From 8029b57bcdb6a05726e98d39e8627afbdf1df034 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sat, 8 Jan 2022 21:42:44 +1100 Subject: [PATCH 23/28] Split tests according to test directories recursively --- scripts/test-pipelines.sh | 77 +++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 32 deletions(-) diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh index 31a3be192..e82ff89f5 100755 --- a/scripts/test-pipelines.sh +++ b/scripts/test-pipelines.sh @@ -1,5 +1,8 @@ #!/usr/bin/env bash +shopt -s globstar +shopt -s nullglob + # Quote the heredoc to prevent shell expansion cat << "EOF" variables: @@ -19,25 +22,11 @@ cache: - ./tmp/ts-node-cache/ EOF -# SPECIAL CASE -cat << EOF -test binagent: - image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - stage: test - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm ci; - npm test -- ./tests/bin/agent; - ' -EOF +printf "\n" -# # Each top-level test directory has its own job -# for test in tests/*/; do -# test="${test%\/}" +# # SPECIAL CASE # cat << EOF -# test ${test##*/}: +# test binagent: # image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner # stage: test # interruptible: true @@ -45,22 +34,46 @@ EOF # - > # nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' # npm ci; -# npm test -- ./$test; +# npm test -- ./tests/bin/agent; # ' # EOF -# done + +# Each test directory has its own job +for test_dir in tests/**/*/; do + test_files=("$test_dir"*.test.ts) + if [ ${#test_files[@]} -eq 0 ]; then + continue + fi + # Remove trailing slash + test_dir="${test_dir%\/}" + # Remove `tests/` prefix + test_dir="${test_dir#*/}" + cat << EOF +test $test_dir: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm ci; + npm test -- ${test_files[@]}; + ' +EOF + printf "\n" +done # All top-level test files are accumulated into 1 job -# tests=(tests/*.test.ts) -# cat << EOF -# test index: -# image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner -# stage: test -# interruptible: true -# script: -# - > -# nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' -# npm ci; -# npm test -- ./${tests[@]}; -# ' -# EOF +test_files=(tests/*.test.ts) +cat << EOF +test index: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm ci; + npm test -- ${test_files[@]}; + ' +EOF From 8387c798a0f15903f11317c25d31eac2c5e0c908 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sat, 8 Jan 2022 22:15:40 +1100 Subject: [PATCH 24/28] Set jest cache to `tmp/jest` and integrate it into CI/CD --- .gitlab-ci.yml | 2 ++ jest.config.js | 1 + scripts/test-pipelines.sh | 2 ++ 3 files changed, 5 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d8e460277..cd3fb2369 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -13,6 +13,8 @@ cache: paths: - ./tmp/npm/ - ./tmp/ts-node-cache/ + # `jest` cache is configured in jest.config.js + - ./tmp/jest/ stages: - check diff --git a/jest.config.js b/jest.config.js index fb0524bfc..46fe908e2 100644 --- a/jest.config.js +++ b/jest.config.js @@ -40,6 +40,7 @@ process.env['GLOBAL_DATA_DIR'] = globals.dataDir; module.exports = { testEnvironment: "node", + cacheDirectory: '/tmp/jest', verbose: true, roots: [ "/tests" diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh index e82ff89f5..2e3293a1f 100755 --- a/scripts/test-pipelines.sh +++ b/scripts/test-pipelines.sh @@ -20,6 +20,8 @@ cache: paths: - ./tmp/npm/ - ./tmp/ts-node-cache/ + # `jest` cache is configured in jest.config.js + - ./tmp/jest/ EOF printf "\n" From 74bec1811c61b419166c33d41baa1c8b2823635b Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sun, 9 Jan 2022 15:42:10 +1100 Subject: [PATCH 25/28] Fixing ts-node-cache in child pipeline by not inheriting variables from the parent pipeline --- .gitlab-ci.yml | 10 +++++++++- scripts/test-pipelines.sh | 4 +++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index cd3fb2369..f8bd013ce 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -5,7 +5,9 @@ variables: # Prefer offline node module installation NPM_CONFIG_PREFER_OFFLINE: "true" # `ts-node` has its own cache - TS_CACHED_TRANSPILE_CACHE: "./tmp/ts-node-cache" + # It must use an absolute path, otherwise ts-node calls will CWD + TS_CACHED_TRANSPILE_CACHE: "${CI_PROJECT_DIR}/tmp/ts-node-cache" + TS_CACHED_TRANSPILE_PORTABLE: "true" # Cached directories shared between jobs & pipelines per-branch cache: @@ -60,6 +62,12 @@ test-generate: test: stage: test + # Don't implicitly inherit top-level variables in child pipeline + # All inherited variables should be explicitly defined here + # Note that variables defined here override any variables defined in the child pipeline + # This causes a bug with $CI_PROJECT_DIR, which is expanded into an empty string + inherit: + variables: false trigger: include: - artifact: tmp/test-pipelines.yml diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh index 2e3293a1f..323850fdd 100755 --- a/scripts/test-pipelines.sh +++ b/scripts/test-pipelines.sh @@ -12,7 +12,9 @@ variables: # Prefer offline node module installation NPM_CONFIG_PREFER_OFFLINE: "true" # `ts-node` has its own cache - TS_CACHED_TRANSPILE_CACHE: "./tmp/ts-node-cache" + # It must use an absolute path, otherwise ts-node calls will CWD + TS_CACHED_TRANSPILE_CACHE: "${CI_PROJECT_DIR}/tmp/ts-node-cache" + TS_CACHED_TRANSPILE_PORTABLE: "true" # Cached directories shared between jobs & pipelines per-branch cache: From 4f5013a91591977289471a515c63cdf415b814e0 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sun, 9 Jan 2022 16:05:55 +1100 Subject: [PATCH 26/28] Status on remote live agent test needs node path during CI/CD testing --- tests/bin/agent/status.test.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index 247b05805..4181b1a16 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -183,9 +183,12 @@ describe('status', () => { logger, }); const statusInfo = (await status.readStatus())!; + // This still needs a `nodePath` because of session token path const { exitCode, stdout } = await testBinUtils.pkStdio([ 'agent', 'status', + '--node-path', + dataDir, '--password-file', passwordPath, '--node-id', From ed3427792e690c8118f674523604ae4f742764ba Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sun, 9 Jan 2022 17:52:30 +1100 Subject: [PATCH 27/28] Use --public on pkg to ensure that pkg accepts GPL-3.0 source code --- release.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/release.nix b/release.nix index aab9030e6..f4a5365f1 100644 --- a/release.nix +++ b/release.nix @@ -20,9 +20,9 @@ let pkg . \ --targets linux-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ - --output out\ - --verbose + --output out ''; installPhase = '' cp out $out @@ -46,6 +46,7 @@ let pkg . \ --targets win-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ --output out.exe ''; @@ -71,6 +72,7 @@ let pkg . \ --targets macos-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ --output out ''; From 760ae54446cd5ac55372e9d1bdb224fa37870b80 Mon Sep 17 00:00:00 2001 From: Roger Qiu Date: Sun, 9 Jan 2022 16:26:08 +1100 Subject: [PATCH 28/28] Removed qa-testing scaffolding for .gitlab-ci.yml --- .gitlab-ci.yml | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f8bd013ce..11a1eb825 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,7 +25,7 @@ stages: - quality - release -.lint: +lint: image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true @@ -36,7 +36,7 @@ stages: npm run lint; ' -.nix-dry: +nix-dry: stage: check image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner script: @@ -101,7 +101,6 @@ nix: --attr package.macos.x64.macho)" - cp -r $builds ./builds/ only: - - qa-testing - master artifacts: paths: @@ -122,7 +121,6 @@ application run: - $build_application/bin/polykey only: - master - - qa-testing docker run: stage: quality @@ -139,7 +137,6 @@ docker run: - image="$(docker load --input ./builds/*docker* | cut -d' ' -f3)" - docker run "$image" only: - - qa-testing - master linux run: @@ -150,7 +147,6 @@ linux run: script: - for f in ./builds/*-linux-*; do "$f"; done only: - - qa-testing - master windows run: @@ -162,7 +158,6 @@ windows run: tags: - windows only: - - qa-testing - master macos run: @@ -173,7 +168,6 @@ macos run: script: - for f in ./builds/*-macos-*; do "$f"; done only: - - qa-testing - master tags: - shared-macos-amd64