diff --git a/.gitignore b/.gitignore index cf000140c..cd3216165 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ .env* !.env.example /result* +/builds # Logs logs diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a9cbf4640..11a1eb825 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,36 +1,44 @@ variables: - GIT_SUBMODULE_STRATEGY: recursive + GIT_SUBMODULE_STRATEGY: "recursive" + # Cache .npm + NPM_CONFIG_CACHE: "./tmp/npm" + # Prefer offline node module installation + NPM_CONFIG_PREFER_OFFLINE: "true" + # `ts-node` has its own cache + # It must use an absolute path, otherwise ts-node calls will CWD + TS_CACHED_TRANSPILE_CACHE: "${CI_PROJECT_DIR}/tmp/ts-node-cache" + TS_CACHED_TRANSPILE_PORTABLE: "true" + +# Cached directories shared between jobs & pipelines per-branch +cache: + key: $CI_COMMIT_REF_SLUG + paths: + - ./tmp/npm/ + - ./tmp/ts-node-cache/ + # `jest` cache is configured in jest.config.js + - ./tmp/jest/ stages: - check + - test - build + - quality - release -image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner - lint: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner stage: check interruptible: true script: - > nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; + npm ci; npm run lint; ' -test: - stage: check - interruptible: true - script: - - > - nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' - npm install; - npm run test; - ' - nix-dry: stage: check - interruptible: true + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner script: - nix-build -v -v --dry-run ./release.nix --attr application - nix-build -v -v --dry-run ./release.nix --attr docker @@ -38,33 +46,147 @@ nix-dry: - nix-build -v -v --dry-run ./release.nix --attr package.windows.x64.exe - nix-build -v -v --dry-run ./release.nix --attr package.macos.x64.macho +test-generate: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: check + interruptible: true + script: + - mkdir -p ./tmp + - > + nix-shell -I nixpkgs=./pkgs.nix --packages bash --run ' + ./scripts/test-pipelines.sh > ./tmp/test-pipelines.yml + ' + artifacts: + paths: + - ./tmp/test-pipelines.yml + +test: + stage: test + # Don't implicitly inherit top-level variables in child pipeline + # All inherited variables should be explicitly defined here + # Note that variables defined here override any variables defined in the child pipeline + # This causes a bug with $CI_PROJECT_DIR, which is expanded into an empty string + inherit: + variables: false + trigger: + include: + - artifact: tmp/test-pipelines.yml + job: test-generate + strategy: depend + nix: stage: build - interruptible: true + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner script: + - mkdir -p ./builds + # nix-specific application target + - > + build_application="$(nix-build \ + --max-jobs "$(nproc)" --cores "$(nproc)" \ + ./release.nix \ + --attr application \ + )" + - > + nix-store --export $( \ + nix-store --query --requisites "$build_application" \ + ) | gzip > ./builds/js-polykey.closure.gz + # non-nix targets - > - nix-build ./release.nix - --max-jobs $(nproc) - --attr application - --attr docker + builds="$(nix-build \ + --max-jobs "$(nproc)" --cores "$(nproc)" \ + ./release.nix \ + --attr docker \ + --attr package.linux.x64.elf \ + --attr package.windows.x64.exe \ + --attr package.macos.x64.macho)" + - cp -r $builds ./builds/ + only: + - master + artifacts: + paths: + - ./builds/ + +application run: + stage: quality + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + dependencies: + - nix + script: + - > + build_application="$( \ + gunzip -c ./builds/js-polykey.closure.gz | \ + nix-store --import | \ + tail -1 \ + )" + - $build_application/bin/polykey + only: + - master + +docker run: + stage: quality + image: docker:20.10.11 + dependencies: + - nix + services: + - docker:20.10.11-dind + variables: + DOCKER_TLS_CERTDIR: "/certs" + before_script: + - docker info + script: + - image="$(docker load --input ./builds/*docker* | cut -d' ' -f3)" + - docker run "$image" + only: + - master + +linux run: + stage: quality + image: ubuntu:latest + dependencies: + - nix + script: + - for f in ./builds/*-linux-*; do "$f"; done + only: + - master + +windows run: + stage: quality + dependencies: + - nix + script: + - Get-ChildItem -File ./builds/*-win32-* | ForEach {& $_.FullName} + tags: + - windows only: - master +macos run: + stage: quality + image: macos-11-xcode-12 + dependencies: + - nix + script: + - for f in ./builds/*-macos-*; do "$f"; done + only: + - master + tags: + - shared-macos-amd64 + packages: stage: release - interruptible: true + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + dependencies: + - nix script: - > nix-shell -I nixpkgs=./pkgs.nix --packages git gitAndTools.gh --run ' - builds="$(nix-build \ - --max-jobs $(nproc) --cores $(nproc) \ - ./release.nix \ - --attr package.linux.x64.elf \ - --attr package.windows.x64.exe \ - --attr package.macos.x64.macho)"; commit="$(git rev-parse --short HEAD)"; gh release \ - create "$commit" $builds \ + create "$commit" \ + builds/*.closure.gz \ + builds/*-linux-* \ + builds/*-win32-* \ + builds/*-macos-* \ --title "Build-$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ --prerelease \ --notes "" \ diff --git a/.npmignore b/.npmignore index afe09caa5..4a1857eea 100644 --- a/.npmignore +++ b/.npmignore @@ -13,3 +13,4 @@ /tmp /docs /benches +/builds diff --git a/README.md b/README.md index 435046cbe..04e68a90f 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Once you update the `src/proto/schemas` files, run this to update the `src/proto npm run proto-generate ``` -### Executing Commands +### Calling Commands When calling commands in development, use this style: @@ -85,7 +85,7 @@ npm run docs See the docs at: https://matrixai.github.io/Polykey/ -### Publishing +### Publishing to NPM ```sh # npm login @@ -96,6 +96,18 @@ git push git push --tags ``` +### Packaging Cross-Platform Executables + +We use `pkg` to package the source code into executables. + +This requires a specific version of `pkg` and also `node-gyp-build`. + +Configuration for `pkg` is done in: + +* `package.json` - Pins `pkg` and `node-gyp-build`, and configures assets and scripts. +* `utils.nix` - Pins `pkg` for Nix usage +* `release.nix` - Build expressions for executables + ## Deployment ### Deploying to AWS ECS: diff --git a/default.nix b/default.nix index 478ceca03..79e7317a4 100644 --- a/default.nix +++ b/default.nix @@ -12,7 +12,9 @@ let } '' mkdir -p $out/lib/node_modules/${utils.node2nixDev.packageName} - # copy only the dist + # copy the package.json + cp ${utils.node2nixDev}/lib/node_modules/${utils.node2nixDev.packageName}/package.json $out/lib/node_modules/${utils.node2nixDev.packageName}/ + # copy the dist cp -r ${utils.node2nixDev}/lib/node_modules/${utils.node2nixDev.packageName}/dist $out/lib/node_modules/${utils.node2nixDev.packageName}/ # copy over the production dependencies if [ -d "${utils.node2nixProd}/lib/node_modules" ]; then diff --git a/jest.config.js b/jest.config.js index e372fda36..46fe908e2 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,3 +1,7 @@ +const os = require('os'); +const path = require('path'); +const fs = require('fs'); +const process = require('process'); const { pathsToModuleNameMapper } = require('ts-jest/utils'); const { compilerOptions } = require('./tsconfig'); @@ -10,8 +14,33 @@ const moduleNameMapper = pathsToModuleNameMapper( // https://github.com/panva/jose/discussions/105 moduleNameMapper['^jose/(.*)$'] = "/node_modules/jose/dist/node/cjs/$1"; +// Global variables that are shared across the jest worker pool +// These variables must be static and serialisable +const globals = { + // Absolute directory to the project root + projectDir: __dirname, + // Absolute directory to the test root + testDir: path.join(__dirname, 'tests'), + // Default global data directory + dataDir: fs.mkdtempSync( + path.join(os.tmpdir(), 'polykey-test-global-'), + ), + // Default asynchronous test timeout + defaultTimeout: 20000, + polykeyStartupTimeout: 30000, + failedConnectionTimeout: 50000, + // Timeouts rely on setTimeout which takes 32 bit numbers + maxTimeout: Math.pow(2, 31) - 1, +}; + +// The `globalSetup` and `globalTeardown` cannot access the `globals` +// They run in their own process context +// They can receive process environment +process.env['GLOBAL_DATA_DIR'] = globals.dataDir; + module.exports = { testEnvironment: "node", + cacheDirectory: '/tmp/jest', verbose: true, roots: [ "/tests" @@ -23,13 +52,21 @@ module.exports = { "^.+\\.tsx?$": "ts-jest", "^.+\\.jsx?$": "babel-jest" }, + globals, + // Global setup script executed once before all test files + globalSetup: "/tests/globalSetup.ts", + // Global teardown script executed once after all test files + globalTeardown: "/tests/globalTeardown.ts", + // Setup files are executed before each test file + // Can access globals setupFiles: [ "/tests/setup.ts" ], + // Setup files after env are executed before each test file + // after the jest test environment is installed + // Can access globals setupFilesAfterEnv: [ "/tests/setupAfterEnv.ts" ], - globalSetup: "/tests/globalSetup.ts", - globalTeardown: "/tests/globalTeardown.ts", moduleNameMapper: moduleNameMapper }; diff --git a/package-lock.json b/package-lock.json index 758918abc..bf05be89d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1574,23 +1574,34 @@ } }, "@matrixai/async-init": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.2.0.tgz", - "integrity": "sha512-JM8bEvE9v5woWS2FohgWi66CV3cCD/j1cQvNPIBxAiKCoVPlJC/8geROinx3DGO5Wj7jTXkfzI9Ldu0tf8aPbg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@matrixai/async-init/-/async-init-1.6.0.tgz", + "integrity": "sha512-I24u6McZnSH2yX1l5e2H3O/Lu8IVb2fM/sVbDeRYrzejV2XLv/9g/goz2fglSrXgJ877BBFJNW2GMxVzvvyA5A==", "requires": { + "async-mutex": "^0.3.2", "ts-custom-error": "^3.2.0" + }, + "dependencies": { + "async-mutex": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.3.2.tgz", + "integrity": "sha512-HuTK7E7MT7jZEh1P9GtRW9+aTWiDWWi9InbZ5hjxrnRa39KS4BW04+xLBhYNS2aXhHUIKZSw3gj4Pn1pj+qGAA==", + "requires": { + "tslib": "^2.3.1" + } + } } }, "@matrixai/db": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.0.tgz", - "integrity": "sha512-qoNbSbGzhZwjq+vXQcJ3Lk/z9NMNDlvSKd6wWe80i5ZMuVM+rbE8PenlWC4EXX77i+pfnV8rV51kz+V5FPt3Vg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.2.tgz", + "integrity": "sha512-wkVEEAJZaWS5Kbg6T/LcI6lS8AdWqszp8L1Dxmk7vwr1ihIkoIVQNSQ+FQryaFpor2eqh/wJaOKjDUpcHo+hEg==", "requires": { "@matrixai/logger": "^2.0.1", "@matrixai/workers": "^1.2.3", "abstract-leveldown": "^7.0.0", "async-mutex": "^0.3.1", - "level": "^6.0.1", + "level": "7.0.1", "levelup": "^5.0.1", "sublevel-prefixer": "^1.0.0", "subleveldown": "^5.0.1", @@ -1605,16 +1616,6 @@ "requires": { "tslib": "^2.3.1" } - }, - "level": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/level/-/level-6.0.1.tgz", - "integrity": "sha512-psRSqJZCsC/irNhfHzrVZbmPYXDcEYhA5TVNwr+V92jF44rbf86hqGp8fiT702FyiArScYIlPSBTDUASCVNSpw==", - "requires": { - "level-js": "^5.0.0", - "level-packager": "^5.1.0", - "leveldown": "^5.4.0" - } } } }, @@ -1633,11 +1634,11 @@ "integrity": "sha512-UmLuXi2PJ03v0Scfl57217RPnjEZDRLlpfdIjIwCfju+kofnhhCI9P7OZu3/FgW147vbvSzWCrrtpwJiLROUUA==" }, "@matrixai/workers": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@matrixai/workers/-/workers-1.2.3.tgz", - "integrity": "sha512-IRhUy25BnjjFn1d96Q7ZtGkqaWR8GgK70QHeoVy/WDzC0dfWLzlxPwu0D5t1/CilXYKOOnHgbC3EareSnbSorQ==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@matrixai/workers/-/workers-1.2.5.tgz", + "integrity": "sha512-ikI4K6RGKQbG68it7TXJJ5wX2csW+WpokUehTnz5r66d7o6FC3PkojE46LPLCDSwk3NVCGoQ743OZS2nuA8SRA==", "requires": { - "@matrixai/logger": "^2.0.1", + "@matrixai/logger": "^2.1.0", "threads": "^1.6.5", "ts-custom-error": "^3.2.0" } @@ -3136,48 +3137,12 @@ "dev": true }, "deferred-leveldown": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", - "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz", + "integrity": "sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg==", "requires": { - "abstract-leveldown": "~6.2.1", + "abstract-leveldown": "^7.2.0", "inherits": "^2.0.3" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } } }, "define-properties": { @@ -3362,50 +3327,14 @@ "dev": true }, "encoding-down": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", - "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-7.1.0.tgz", + "integrity": "sha512-ky47X5jP84ryk5EQmvedQzELwVJPjCgXDQZGeb9F6r4PdChByCGHTBrVcF3h8ynKVJ1wVbkxTsDC8zBROPypgQ==", "requires": { - "abstract-leveldown": "^6.2.1", + "abstract-leveldown": "^7.2.0", "inherits": "^2.0.3", - "level-codec": "^9.0.0", - "level-errors": "^2.0.0" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz", - "integrity": "sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } + "level-codec": "^10.0.0", + "level-errors": "^3.0.0" } }, "encryptedfs": { @@ -5894,6 +5823,12 @@ "integrity": "sha512-ZZUKRlEBizutngoO4KngzN30YoeAYP3nnwimk4cpi9WqLxQUf6SlAPK5p1D9usEpxDS3Uif2MIez3Bq0vGYR+g==", "dev": true }, + "jest-mock-props": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/jest-mock-props/-/jest-mock-props-1.9.0.tgz", + "integrity": "sha512-8IlIiZRvovnRuvqcvWZyDv4CyhrUGTbEW/1eKurHr2JY4VhIWQIPlbpt9lqL2nxdGnco+OcgpPBwGYCEeDb2+A==", + "dev": true + }, "jest-pnp-resolver": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz", @@ -6504,93 +6439,56 @@ "integrity": "sha1-eZllXoZGwX8In90YfRUNMyTVRRM=", "dev": true }, + "level": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/level/-/level-7.0.1.tgz", + "integrity": "sha512-w3E64+ALx2eZf8RV5JL4kIcE0BFAvQscRYd1yU4YVqZN9RGTQxXSvH202xvK15yZwFFxRXe60f13LJjcJ//I4Q==", + "requires": { + "level-js": "^6.1.0", + "level-packager": "^6.0.1", + "leveldown": "^6.1.0" + } + }, "level-codec": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", - "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-10.0.0.tgz", + "integrity": "sha512-QW3VteVNAp6c/LuV6nDjg7XDXx9XHK4abmQarxZmlRSDyXYk20UdaJTSX6yzVvQ4i0JyWSB7jert0DsyD/kk6g==", "requires": { - "buffer": "^5.6.0" - }, - "dependencies": { - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - } + "buffer": "^6.0.3" } }, "level-concat-iterator": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.0.0.tgz", - "integrity": "sha512-UHGiIdj+uiFQorOrURRvJF3Ei0uHc89ciM/aRi0qsWDV2f0HXypeXUPhJKL6DsONgSR76Pc0AI4sKYEYYRn2Dg==" - }, - "level-errors": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", - "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz", + "integrity": "sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ==", "requires": { - "errno": "~0.1.1" + "catering": "^2.1.0" } }, + "level-errors": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-3.0.1.tgz", + "integrity": "sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ==" + }, "level-iterator-stream": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", - "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz", + "integrity": "sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA==", "requires": { "inherits": "^2.0.4", - "readable-stream": "^3.4.0", - "xtend": "^4.0.2" + "readable-stream": "^3.4.0" } }, "level-js": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/level-js/-/level-js-5.0.2.tgz", - "integrity": "sha512-SnBIDo2pdO5VXh02ZmtAyPP6/+6YTJg2ibLtl9C34pWvmtMEmRTWpra+qO/hifkUtBTOtfx6S9vLDjBsBK4gRg==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/level-js/-/level-js-6.1.0.tgz", + "integrity": "sha512-i7mPtkZm68aewfv0FnIUWvFUFfoyzIvVKnUmuQGrelEkP72vSPTaA1SGneWWoCV5KZJG4wlzbJLp1WxVNGuc6A==", "requires": { - "abstract-leveldown": "~6.2.3", - "buffer": "^5.5.0", + "abstract-leveldown": "^7.2.0", + "buffer": "^6.0.3", "inherits": "^2.0.3", - "ltgt": "^2.1.2" - }, - "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - } + "ltgt": "^2.1.2", + "run-parallel-limit": "^1.1.0" } }, "level-option-wrap": { @@ -6602,84 +6500,33 @@ } }, "level-packager": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-5.1.1.tgz", - "integrity": "sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/level-packager/-/level-packager-6.0.1.tgz", + "integrity": "sha512-8Ezr0XM6hmAwqX9uu8IGzGNkWz/9doyPA8Oo9/D7qcMI6meJC+XhIbNYHukJhIn8OGdlzQs/JPcL9B8lA2F6EQ==", "requires": { - "encoding-down": "^6.3.0", - "levelup": "^4.3.2" - }, - "dependencies": { - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } - }, - "levelup": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/levelup/-/levelup-4.4.0.tgz", - "integrity": "sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ==", - "requires": { - "deferred-leveldown": "~5.3.0", - "level-errors": "~2.0.0", - "level-iterator-stream": "~4.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - } + "encoding-down": "^7.1.0", + "levelup": "^5.1.1" } }, "level-supports": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.0.2.tgz", - "integrity": "sha512-dU1W7OnntoCXeNfy9c93K7KEoGNsuP+zZLbUQrIbBzhdZ75U0h8GEcioqmJc1QpYVORyFfeR+kyFeNx2N4t7lg==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-2.1.0.tgz", + "integrity": "sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA==" }, "leveldown": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-5.6.0.tgz", - "integrity": "sha512-iB8O/7Db9lPaITU1aA2txU/cBEXAt4vWwKQRrrWuS6XDgbP4QZGj9BL2aNbwb002atoQ/lIotJkfyzz+ygQnUQ==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/leveldown/-/leveldown-6.1.0.tgz", + "integrity": "sha512-8C7oJDT44JXxh04aSSsfcMI8YiaGRhOFI9/pMEL7nWJLVsWajDPTRxsSHTM2WcTVY5nXM+SuRHzPPi0GbnDX+w==", "requires": { - "abstract-leveldown": "~6.2.1", + "abstract-leveldown": "^7.2.0", "napi-macros": "~2.0.0", - "node-gyp-build": "~4.1.0" + "node-gyp-build": "^4.3.0" }, "dependencies": { - "abstract-leveldown": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", - "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", - "requires": { - "buffer": "^5.5.0", - "immediate": "^3.2.3", - "level-concat-iterator": "~2.0.0", - "level-supports": "~1.0.0", - "xtend": "~4.0.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "level-concat-iterator": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", - "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" - }, - "level-supports": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", - "integrity": "sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg==", - "requires": { - "xtend": "^4.0.2" - } + "node-gyp-build": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz", + "integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==" } } }, @@ -6694,31 +6541,6 @@ "level-iterator-stream": "^5.0.0", "level-supports": "^2.0.1", "queue-microtask": "^1.2.3" - }, - "dependencies": { - "deferred-leveldown": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-7.0.0.tgz", - "integrity": "sha512-QKN8NtuS3BC6m0B8vAnBls44tX1WXAFATUsJlruyAYbZpysWV3siH6o/i3g9DCHauzodksO60bdj5NazNbjCmg==", - "requires": { - "abstract-leveldown": "^7.2.0", - "inherits": "^2.0.3" - } - }, - "level-errors": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-3.0.1.tgz", - "integrity": "sha512-tqTL2DxzPDzpwl0iV5+rBCv65HWbHp6eutluHNcVIftKZlQN//b6GEnZDM2CvGZvzGYMwyPtYppYnydBQd2SMQ==" - }, - "level-iterator-stream": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-5.0.0.tgz", - "integrity": "sha512-wnb1+o+CVFUDdiSMR/ZymE2prPs3cjVLlXuDeSq9Zb8o032XrabGEXcTCsBxprAtseO3qvFeGzh6406z9sOTRA==", - "requires": { - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - } } }, "leven": { @@ -7133,9 +6955,10 @@ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" }, "node-gyp-build": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.1.1.tgz", - "integrity": "sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ==" + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", + "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "dev": true }, "node-int64": { "version": "0.4.0", @@ -8135,6 +7958,14 @@ "queue-microtask": "^1.2.2" } }, + "run-parallel-limit": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz", + "integrity": "sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw==", + "requires": { + "queue-microtask": "^1.2.2" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -8855,11 +8686,71 @@ "ieee754": "^1.1.13" } }, + "deferred-leveldown": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz", + "integrity": "sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw==", + "requires": { + "abstract-leveldown": "~6.2.1", + "inherits": "^2.0.3" + }, + "dependencies": { + "abstract-leveldown": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz", + "integrity": "sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ==", + "requires": { + "buffer": "^5.5.0", + "immediate": "^3.2.3", + "level-concat-iterator": "~2.0.0", + "level-supports": "~1.0.0", + "xtend": "~4.0.0" + } + } + } + }, + "encoding-down": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/encoding-down/-/encoding-down-6.3.0.tgz", + "integrity": "sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw==", + "requires": { + "abstract-leveldown": "^6.2.1", + "inherits": "^2.0.3", + "level-codec": "^9.0.0", + "level-errors": "^2.0.0" + } + }, + "level-codec": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/level-codec/-/level-codec-9.0.2.tgz", + "integrity": "sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ==", + "requires": { + "buffer": "^5.6.0" + } + }, "level-concat-iterator": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz", "integrity": "sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw==" }, + "level-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/level-errors/-/level-errors-2.0.1.tgz", + "integrity": "sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw==", + "requires": { + "errno": "~0.1.1" + } + }, + "level-iterator-stream": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz", + "integrity": "sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q==", + "requires": { + "inherits": "^2.0.4", + "readable-stream": "^3.4.0", + "xtend": "^4.0.2" + } + }, "level-supports": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/level-supports/-/level-supports-1.0.1.tgz", diff --git a/package.json b/package.json index f17d64668..d560c3458 100644 --- a/package.json +++ b/package.json @@ -52,13 +52,15 @@ "assets": [ "node_modules/jose/**/*", "node_modules/utp-native/**/*", - "node_modules/leveldown/**/*" + "node_modules/leveldown/**/*", + "node_modules/fd-lock/**/*", + "dist/**/*.json" ], "scripts": "dist/workers/polykeyWorker.js" }, "scripts": { "build": "rm -r ./dist || true; tsc -p ./tsconfig.build.json", - "postbuild": "cp -fR src/proto dist", + "postbuild": "cp -fR src/proto dist; cp src/notifications/*.json dist/notifications/; cp src/claims/*.json dist/claims/; cp src/status/*.json dist/status/;", "ts-node": "ts-node --require tsconfig-paths/register", "test": "jest", "lint": "eslint '{src,tests}/**/*.{js,ts}'", @@ -69,11 +71,11 @@ }, "dependencies": { "@grpc/grpc-js": "1.3.7", - "@matrixai/async-init": "^1.2.0", - "@matrixai/db": "^1.1.0", + "@matrixai/async-init": "^1.6.0", + "@matrixai/db": "^1.1.2", "@matrixai/id": "^2.1.0", "@matrixai/logger": "^2.1.0", - "@matrixai/workers": "^1.2.3", + "@matrixai/workers": "^1.2.5", "ajv": "^7.0.4", "async-mutex": "^0.2.4", "bip39": "^3.0.3", @@ -123,8 +125,10 @@ "grpc_tools_node_protoc_ts": "^5.1.3", "jest": "^26.6.3", "jest-mock-process": "^1.4.1", + "jest-mock-props": "^1.9.0", "mocked-env": "^1.3.5", "nexpect": "^0.6.0", + "node-gyp-build": "4.2.3", "pkg": "5.3.0", "prettier": "^2.2.1", "ts-jest": "^26.4.4", diff --git a/release.nix b/release.nix index aab9030e6..f4a5365f1 100644 --- a/release.nix +++ b/release.nix @@ -20,9 +20,9 @@ let pkg . \ --targets linux-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ - --output out\ - --verbose + --output out ''; installPhase = '' cp out $out @@ -46,6 +46,7 @@ let pkg . \ --targets win-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ --output out.exe ''; @@ -71,6 +72,7 @@ let pkg . \ --targets macos-${arch} \ --no-bytecode \ + --public \ --public-packages "*" \ --output out ''; diff --git a/scripts/test-pipelines.sh b/scripts/test-pipelines.sh new file mode 100755 index 000000000..323850fdd --- /dev/null +++ b/scripts/test-pipelines.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +shopt -s globstar +shopt -s nullglob + +# Quote the heredoc to prevent shell expansion +cat << "EOF" +variables: + GIT_SUBMODULE_STRATEGY: "recursive" + # Cache .npm + NPM_CONFIG_CACHE: "./tmp/npm" + # Prefer offline node module installation + NPM_CONFIG_PREFER_OFFLINE: "true" + # `ts-node` has its own cache + # It must use an absolute path, otherwise ts-node calls will CWD + TS_CACHED_TRANSPILE_CACHE: "${CI_PROJECT_DIR}/tmp/ts-node-cache" + TS_CACHED_TRANSPILE_PORTABLE: "true" + +# Cached directories shared between jobs & pipelines per-branch +cache: + key: $CI_COMMIT_REF_SLUG + paths: + - ./tmp/npm/ + - ./tmp/ts-node-cache/ + # `jest` cache is configured in jest.config.js + - ./tmp/jest/ +EOF + +printf "\n" + +# # SPECIAL CASE +# cat << EOF +# test binagent: +# image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner +# stage: test +# interruptible: true +# script: +# - > +# nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' +# npm ci; +# npm test -- ./tests/bin/agent; +# ' +# EOF + +# Each test directory has its own job +for test_dir in tests/**/*/; do + test_files=("$test_dir"*.test.ts) + if [ ${#test_files[@]} -eq 0 ]; then + continue + fi + # Remove trailing slash + test_dir="${test_dir%\/}" + # Remove `tests/` prefix + test_dir="${test_dir#*/}" + cat << EOF +test $test_dir: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm ci; + npm test -- ${test_files[@]}; + ' +EOF + printf "\n" +done + +# All top-level test files are accumulated into 1 job +test_files=(tests/*.test.ts) +cat << EOF +test index: + image: registry.gitlab.com/matrixai/engineering/maintenance/gitlab-runner + stage: test + interruptible: true + script: + - > + nix-shell -I nixpkgs=./pkgs.nix --packages nodejs --run ' + npm ci; + npm test -- ${test_files[@]}; + ' +EOF diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index d32229ff6..00df9198d 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -146,163 +146,187 @@ class PolykeyAgent { }; await utils.mkdirExists(fs, nodePath); const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const statePath = path.join(nodePath, config.defaults.stateBase); const dbPath = path.join(statePath, config.defaults.dbBase); const keysPath = path.join(statePath, config.defaults.keysBase); const vaultsPath = path.join(statePath, config.defaults.vaultsBase); - status = - status ?? - new Status({ - statusPath, - fs: fs, - logger: logger.getChild(Status.name), - }); - // Start locking the status - await status.start({ pid: process.pid }); - schema = - schema ?? - (await Schema.createSchema({ - statePath, - fs, - logger: logger.getChild(Schema.name), - fresh, - })); - keyManager = - keyManager ?? - (await KeyManager.createKeyManager({ - ...keysConfig_, - keysPath, - password, - fs, - logger: logger.getChild(KeyManager.name), - fresh, - })); - db = - db ?? - (await DB.createDB({ - dbPath, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, + try { + status = + status ?? + new Status({ + statusPath, + statusLockPath, + fs: fs, + logger: logger.getChild(Status.name), + }); + // Start locking the status + await status.start({ pid: process.pid }); + schema = + schema ?? + (await Schema.createSchema({ + statePath, + fs, + logger: logger.getChild(Schema.name), + fresh, + })); + keyManager = + keyManager ?? + (await KeyManager.createKeyManager({ + ...keysConfig_, + keysPath, + password, + fs, + logger: logger.getChild(KeyManager.name), + fresh, + })); + db = + db ?? + (await DB.createDB({ + dbPath, + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, }, - }, - fs, - logger: logger.getChild(DB.name), - fresh, - })); - identitiesManager = - identitiesManager ?? - (await IdentitiesManager.createIdentitiesManager({ - db, - logger: logger.getChild(IdentitiesManager.name), - fresh, - })); - // Registering providers - const githubProvider = new providers.GithubProvider({ - clientId: config.providers['github.com'].clientId, - logger: logger.getChild(providers.GithubProvider.name), - }); - identitiesManager.registerProvider(githubProvider); - sigchain = - sigchain ?? - (await Sigchain.createSigchain({ - keyManager, - db, - logger: logger.getChild(Sigchain.name), - fresh, - })); - acl = - acl ?? - (await ACL.createACL({ - db, - logger: logger.getChild(ACL.name), - fresh, - })); - gestaltGraph = - gestaltGraph ?? - (await GestaltGraph.createGestaltGraph({ - db, - acl, - logger: logger.getChild(GestaltGraph.name), - fresh, - })); - fwdProxy = - fwdProxy ?? - new ForwardProxy({ - ...forwardProxyConfig_, - logger: logger.getChild(ForwardProxy.name), + fs, + logger: logger.getChild(DB.name), + fresh, + })); + identitiesManager = + identitiesManager ?? + (await IdentitiesManager.createIdentitiesManager({ + db, + logger: logger.getChild(IdentitiesManager.name), + fresh, + })); + // Registering providers + const githubProvider = new providers.GithubProvider({ + clientId: config.providers['github.com'].clientId, + logger: logger.getChild(providers.GithubProvider.name), }); - revProxy = - revProxy ?? - new ReverseProxy({ - ...reverseProxyConfig_, - logger: logger.getChild(ReverseProxy.name), - }); - nodeManager = - nodeManager ?? - (await NodeManager.createNodeManager({ - db, - seedNodes, - sigchain, - keyManager, - fwdProxy, - revProxy, - logger: logger.getChild(NodeManager.name), - fresh, - })); - discovery = - discovery ?? - (await Discovery.createDiscovery({ - gestaltGraph, - identitiesManager, - nodeManager, - logger: logger.getChild(Discovery.name), - })); - vaultManager = - vaultManager ?? - (await VaultManager.createVaultManager({ - vaultsKey: keyManager.vaultKey, - vaultsPath, - keyManager, - nodeManager, - gestaltGraph, - acl, - db, - fs, - logger: logger.getChild(VaultManager.name), - fresh, - })); - notificationsManager = - notificationsManager ?? - (await NotificationsManager.createNotificationsManager({ - acl, - db, - nodeManager, - keyManager, - logger: logger.getChild(NotificationsManager.name), - fresh, - })); - sessionManager = - sessionManager ?? - (await SessionManager.createSessionManager({ - db, - keyManager, - logger: logger.getChild(SessionManager.name), - fresh, - })); - grpcServerClient = - grpcServerClient ?? - new GRPCServer({ - logger: logger.getChild(GRPCServer.name + 'Client'), - }); - grpcServerAgent = - grpcServerAgent ?? - new GRPCServer({ - logger: logger.getChild(GRPCServer.name + 'Agent'), - }); - const polykeyAgent = new PolykeyAgent({ + identitiesManager.registerProvider(githubProvider); + sigchain = + sigchain ?? + (await Sigchain.createSigchain({ + keyManager, + db, + logger: logger.getChild(Sigchain.name), + fresh, + })); + acl = + acl ?? + (await ACL.createACL({ + db, + logger: logger.getChild(ACL.name), + fresh, + })); + gestaltGraph = + gestaltGraph ?? + (await GestaltGraph.createGestaltGraph({ + db, + acl, + logger: logger.getChild(GestaltGraph.name), + fresh, + })); + fwdProxy = + fwdProxy ?? + new ForwardProxy({ + ...forwardProxyConfig_, + logger: logger.getChild(ForwardProxy.name), + }); + revProxy = + revProxy ?? + new ReverseProxy({ + ...reverseProxyConfig_, + logger: logger.getChild(ReverseProxy.name), + }); + nodeManager = + nodeManager ?? + (await NodeManager.createNodeManager({ + db, + seedNodes, + sigchain, + keyManager, + fwdProxy, + revProxy, + logger: logger.getChild(NodeManager.name), + fresh, + })); + // Discovery uses in-memory CreateDestroy pattern + // Therefore it should be destroyed during stop + discovery = + discovery ?? + (await Discovery.createDiscovery({ + gestaltGraph, + identitiesManager, + nodeManager, + logger: logger.getChild(Discovery.name), + })); + vaultManager = + vaultManager ?? + (await VaultManager.createVaultManager({ + vaultsKey: keyManager.vaultKey, + vaultsPath, + keyManager, + nodeManager, + gestaltGraph, + acl, + db, + fs, + logger: logger.getChild(VaultManager.name), + fresh, + })); + notificationsManager = + notificationsManager ?? + (await NotificationsManager.createNotificationsManager({ + acl, + db, + nodeManager, + keyManager, + logger: logger.getChild(NotificationsManager.name), + fresh, + })); + sessionManager = + sessionManager ?? + (await SessionManager.createSessionManager({ + db, + keyManager, + logger: logger.getChild(SessionManager.name), + fresh, + })); + grpcServerClient = + grpcServerClient ?? + new GRPCServer({ + logger: logger.getChild(GRPCServer.name + 'Client'), + }); + grpcServerAgent = + grpcServerAgent ?? + new GRPCServer({ + logger: logger.getChild(GRPCServer.name + 'Agent'), + }); + } catch (e) { + logger.warn(`Failed Creating ${this.name}`); + await sessionManager?.stop(); + await notificationsManager?.stop(); + await vaultManager?.stop(); + await discovery?.destroy(); + await nodeManager?.stop(); + await revProxy?.stop(); + await fwdProxy?.stop(); + await gestaltGraph?.stop(); + await acl?.stop(); + await sigchain?.stop(); + await identitiesManager?.stop(); + await db?.stop(); + await keyManager?.stop(); + await schema?.stop(); + await status?.stop({}); + throw e; + } + const pkAgent = new PolykeyAgent({ nodePath, status, schema, @@ -324,13 +348,13 @@ class PolykeyAgent { fs, logger, }); - await polykeyAgent.start({ + await pkAgent.start({ password, networkConfig, fresh, }); logger.info(`Created ${this.name}`); - return polykeyAgent; + return pkAgent; } public readonly nodePath: string; @@ -429,100 +453,117 @@ class PolykeyAgent { networkConfig?: NetworkConfig; fresh?: boolean; }) { - this.logger.info(`Starting ${this.constructor.name}`); - const networkConfig_ = { - ...config.defaults.networkConfig, - ...utils.filterEmptyObject(networkConfig), - }; - await this.status.start({ pid: process.pid }); - await this.schema.start({ fresh }); - const agentService = createAgentService({ - keyManager: this.keyManager, - vaultManager: this.vaultManager, - nodeManager: this.nodeManager, - sigchain: this.sigchain, - notificationsManager: this.notificationsManager, - }); - const clientService = createClientService({ - polykeyAgent: this, - discovery: this.discovery, - gestaltGraph: this.gestaltGraph, - identitiesManager: this.identitiesManager, - keyManager: this.keyManager, - nodeManager: this.nodeManager, - notificationsManager: this.notificationsManager, - sessionManager: this.sessionManager, - vaultManager: this.vaultManager, - sigchain: this.sigchain, - grpcServerClient: this.grpcServerClient, - grpcServerAgent: this.grpcServerAgent, - fwdProxy: this.fwdProxy, - revProxy: this.revProxy, - fs: this.fs, - }); - - // Starting modules - await this.keyManager.start({ - password, - fresh, - }); - await this.db.start({ fresh }); - await this.identitiesManager.start({ fresh }); - await this.sigchain.start({ fresh }); - await this.acl.start({ fresh }); - await this.gestaltGraph.start({ fresh }); - - // GRPC Server - const tlsConfig = { - keyPrivatePem: this.keyManager.getRootKeyPairPem().privateKey, - certChainPem: await this.keyManager.getRootCertChainPem(), - }; - - // Client server - await this.grpcServerClient.start({ - services: [[ClientServiceService, clientService]], - host: networkConfig_.clientHost, - port: networkConfig_.clientPort, - tlsConfig, - }); - // Agent server - await this.grpcServerAgent.start({ - services: [[AgentServiceService, agentService]], - host: networkConfig_.agentHost, - port: networkConfig_.agentPort, - }); - await this.fwdProxy.start({ - proxyHost: networkConfig_.proxyHost, - proxyPort: networkConfig_.proxyPort, - egressHost: networkConfig_.egressHost, - egressPort: networkConfig_.egressPort, - tlsConfig, - }); - await this.revProxy.start({ - serverHost: this.grpcServerAgent.host, - serverPort: this.grpcServerAgent.port, - ingressHost: networkConfig_.ingressHost, - ingressPort: networkConfig_.ingressPort, - tlsConfig, - }); - - await this.nodeManager.start({ fresh }); - await this.nodeManager.getConnectionsToSeedNodes(); - await this.nodeManager.syncNodeGraph(); - await this.vaultManager.start({ fresh }); - await this.notificationsManager.start({ fresh }); - await this.sessionManager.start({ fresh }); - - await this.status.finishStart({ - pid: process.pid, - nodeId: this.keyManager.getNodeId(), - clientHost: this.grpcServerClient.host, - clientPort: this.grpcServerClient.port, - ingressHost: this.revProxy.ingressHost, - ingressPort: this.revProxy.ingressPort, - }); - - this.logger.info(`Started ${this.constructor.name}`); + try { + this.logger.info(`Starting ${this.constructor.name}`); + const networkConfig_ = { + ...config.defaults.networkConfig, + ...utils.filterEmptyObject(networkConfig), + }; + await this.status.start({ pid: process.pid }); + await this.schema.start({ fresh }); + const agentService = createAgentService({ + keyManager: this.keyManager, + vaultManager: this.vaultManager, + nodeManager: this.nodeManager, + sigchain: this.sigchain, + notificationsManager: this.notificationsManager, + }); + const clientService = createClientService({ + pkAgent: this, + discovery: this.discovery, + gestaltGraph: this.gestaltGraph, + identitiesManager: this.identitiesManager, + keyManager: this.keyManager, + nodeManager: this.nodeManager, + notificationsManager: this.notificationsManager, + sessionManager: this.sessionManager, + vaultManager: this.vaultManager, + sigchain: this.sigchain, + grpcServerClient: this.grpcServerClient, + grpcServerAgent: this.grpcServerAgent, + fwdProxy: this.fwdProxy, + revProxy: this.revProxy, + fs: this.fs, + }); + // Starting modules + await this.keyManager.start({ + password, + fresh, + }); + await this.db.start({ fresh }); + await this.identitiesManager.start({ fresh }); + await this.sigchain.start({ fresh }); + await this.acl.start({ fresh }); + await this.gestaltGraph.start({ fresh }); + // GRPC Server + const tlsConfig = { + keyPrivatePem: this.keyManager.getRootKeyPairPem().privateKey, + certChainPem: await this.keyManager.getRootCertChainPem(), + }; + // Client server + await this.grpcServerClient.start({ + services: [[ClientServiceService, clientService]], + host: networkConfig_.clientHost, + port: networkConfig_.clientPort, + tlsConfig, + }); + // Agent server + await this.grpcServerAgent.start({ + services: [[AgentServiceService, agentService]], + host: networkConfig_.agentHost, + port: networkConfig_.agentPort, + }); + await this.fwdProxy.start({ + proxyHost: networkConfig_.proxyHost, + proxyPort: networkConfig_.proxyPort, + egressHost: networkConfig_.egressHost, + egressPort: networkConfig_.egressPort, + tlsConfig, + }); + await this.revProxy.start({ + serverHost: this.grpcServerAgent.host, + serverPort: this.grpcServerAgent.port, + ingressHost: networkConfig_.ingressHost, + ingressPort: networkConfig_.ingressPort, + tlsConfig, + }); + await this.nodeManager.start({ fresh }); + await this.nodeManager.getConnectionsToSeedNodes(); + await this.nodeManager.syncNodeGraph(); + await this.vaultManager.start({ fresh }); + await this.notificationsManager.start({ fresh }); + await this.sessionManager.start({ fresh }); + await this.status.finishStart({ + pid: process.pid, + nodeId: this.keyManager.getNodeId(), + clientHost: this.grpcServerClient.host, + clientPort: this.grpcServerClient.port, + ingressHost: this.revProxy.getIngressHost(), + ingressPort: this.revProxy.getIngressPort(), + }); + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.status?.beginStop({ pid: process.pid }); + await this.sessionManager?.stop(); + await this.notificationsManager?.stop(); + await this.vaultManager?.stop(); + await this.discovery?.destroy(); + await this.nodeManager?.stop(); + await this.revProxy?.stop(); + await this.fwdProxy?.stop(); + await this.grpcServerAgent?.stop(); + await this.grpcServerClient?.stop(); + await this.gestaltGraph?.stop(); + await this.acl?.stop(); + await this.sigchain?.stop(); + await this.identitiesManager?.stop(); + await this.db?.stop(); + await this.keyManager?.stop(); + await this.schema?.stop(); + await this.status?.stop({}); + throw e; + } } /** @@ -534,6 +575,7 @@ class PolykeyAgent { await this.sessionManager.stop(); await this.notificationsManager.stop(); await this.vaultManager.stop(); + await this.discovery.destroy(); await this.nodeManager.stop(); await this.revProxy.stop(); await this.fwdProxy.stop(); @@ -556,7 +598,6 @@ class PolykeyAgent { await this.db.start(); await this.sessionManager.destroy(); await this.notificationsManager.destroy(); - await this.discovery.destroy(); await this.vaultManager.destroy(); await this.nodeManager.destroy(); await this.gestaltGraph.destroy(); diff --git a/src/PolykeyClient.ts b/src/PolykeyClient.ts index d39259bfe..b124feefa 100644 --- a/src/PolykeyClient.ts +++ b/src/PolykeyClient.ts @@ -62,8 +62,8 @@ class PolykeyClient { grpcClient ?? (await GRPCClientClient.createGRPCClientClient({ nodeId, - host: host, - port: port, + host, + port, tlsConfig: { keyPrivatePem: undefined, certChainPem: undefined }, session, timeout, diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index ce9a22c9a..19f63f3cb 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -1,6 +1,11 @@ -import type { Host, Port, ProxyConfig } from '../network/types'; +import type { ClientDuplexStream } from '@grpc/grpc-js'; +import type { ClientReadableStream } from '@grpc/grpc-js/build/src/call'; +import type { + AsyncGeneratorReadableStreamClient, + AsyncGeneratorDuplexStreamClient, +} from '../grpc/types'; import type { NodeId } from '../nodes/types'; -import type { TLSConfig } from '../network/types'; +import type { Host, Port, ProxyConfig, TLSConfig } from '../network/types'; import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import type * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; @@ -37,7 +42,6 @@ class GRPCClientAgent extends GRPCClient { timeout?: number; logger?: Logger; }): Promise { - logger.info(`Creating ${this.name}`); const { client, serverCertChain, flowCountInterceptor } = await super.createClient({ clientConstructor: AgentServiceClient, @@ -60,14 +64,11 @@ class GRPCClientAgent extends GRPCClient { flowCountInterceptor, logger, }); - logger.info(`Created ${this.name}`); return grpcClientAgent; } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready(new agentErrors.ErrorAgentClientDestroyed()) @@ -79,7 +80,12 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsGitInfoGet(...args) { + public vaultsGitInfoGet( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.PackChunk, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsGitInfoGet, @@ -87,12 +93,19 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsGitPackGet(...args) { + public vaultsGitPackGet( + ...args + ): ClientDuplexStream { return this.client.vaultsGitPackGet(...args); } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsScan(...args) { + public vaultsScan( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.Vault, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, @@ -140,15 +153,21 @@ class GRPCClientAgent extends GRPCClient { } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsPermisssionsCheck(...args) { + public vaultsPermissionsCheck(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermisssionsCheck, + this.client.vaultsPermissionsCheck, )(...args); } @ready(new agentErrors.ErrorAgentClientDestroyed()) - public nodesCrossSignClaim(...args) { + public nodesCrossSignClaim( + ...args + ): AsyncGeneratorDuplexStreamClient< + nodesPB.CrossSign, + nodesPB.CrossSign, + ClientDuplexStream + > { return grpcUtils.promisifyDuplexStreamCall< nodesPB.CrossSign, nodesPB.CrossSign diff --git a/src/agent/agentService.ts b/src/agent/agentService.ts deleted file mode 100644 index 97811084f..000000000 --- a/src/agent/agentService.ts +++ /dev/null @@ -1,471 +0,0 @@ -import type { - ClaimEncoded, - ClaimIntermediary, - ClaimIdString, -} from '../claims/types'; -import type { VaultName } from '../vaults/types'; - -import type { NodeManager } from '../nodes'; -import type { VaultManager } from '../vaults'; -import type { Sigchain } from '../sigchain'; -import type { KeyManager } from '../keys'; -import type { NotificationsManager } from '../notifications'; -import type { IAgentServiceServer } from '../proto/js/polykey/v1/agent_service_grpc_pb'; -import type * as notificationsPB from '../proto/js/polykey/v1/notifications/notifications_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { promisify } from '../utils'; -import * as networkUtils from '../network/utils'; -import { ErrorGRPC } from '../grpc/errors'; -import { AgentServiceService } from '../proto/js/polykey/v1/agent_service_grpc_pb'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import * as grpcUtils from '../grpc/utils'; -import { - utils as notificationsUtils, - errors as notificationsErrors, -} from '../notifications'; -import { errors as vaultsErrors } from '../vaults'; -import { utils as claimsUtils, errors as claimsErrors } from '../claims'; -import { makeVaultId, makeVaultIdPretty } from '../vaults/utils'; -import { makeNodeId } from '../nodes/utils'; - -/** - * Creates the client service for use with a GRPCServer - * @param domains An object representing all the domains / managers the agent server uses. - * @returns an IAgentServer object - */ -function createAgentService({ - keyManager, - vaultManager, - nodeManager, - notificationsManager, - sigchain, -}: { - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - sigchain: Sigchain; - notificationsManager: NotificationsManager; -}): IAgentServiceServer { - const agentService: IAgentServiceServer = { - echo: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EchoMessage(); - response.setChallenge(call.request.getChallenge()); - callback(null, response); - }, - vaultsGitInfoGet: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - const request = call.request; - const vaultNameOrId = request.getNameOrId(); - let vaultId, vaultName; - try { - vaultId = makeVaultId(idUtils.fromString(vaultNameOrId)); - await vaultManager.openVault(vaultId); - vaultName = await vaultManager.getVaultName(vaultId); - } catch (err) { - if (err instanceof vaultsErrors.ErrorVaultUndefined) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - vaultName = vaultNameOrId; - } else { - throw err; - } - } - // TODO: Check the permissions here - const meta = new grpc.Metadata(); - meta.set('vaultName', vaultName); - meta.set('vaultId', makeVaultIdPretty(vaultId)); - genWritable.stream.sendMetadata(meta); - const response = new vaultsPB.PackChunk(); - const responseGen = vaultManager.handleInfoRequest(vaultId); - for await (const byte of responseGen) { - if (byte !== null) { - response.setChunk(byte); - await genWritable.next(response); - } else { - await genWritable.next(null); - } - } - await genWritable.next(null); - }, - vaultsGitPackGet: async ( - call: grpc.ServerDuplexStream, - ) => { - const write = promisify(call.write).bind(call); - const clientBodyBuffers: Buffer[] = []; - call.on('data', (d) => { - clientBodyBuffers.push(d.getChunk_asU8()); - }); - - call.on('end', async () => { - const body = Buffer.concat(clientBodyBuffers); - const meta = call.metadata; - const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); - if (vaultNameOrId == null) - throw new ErrorGRPC('vault-name not in metadata.'); - let vaultId; - try { - vaultId = makeVaultId(vaultNameOrId); - await vaultManager.openVault(vaultId); - } catch (err) { - if ( - err instanceof vaultsErrors.ErrorVaultUndefined || - err instanceof SyntaxError - ) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - } else { - throw err; - } - } - // TODO: Check the permissions here - const response = new vaultsPB.PackChunk(); - const [sideBand, progressStream] = await vaultManager.handlePackRequest( - vaultId, - Buffer.from(body), - ); - response.setChunk(Buffer.from('0008NAK\n')); - await write(response); - const responseBuffers: Buffer[] = []; - await new Promise((resolve, reject) => { - sideBand.on('data', async (data: Buffer) => { - responseBuffers.push(data); - }); - sideBand.on('end', async () => { - response.setChunk(Buffer.concat(responseBuffers)); - await write(response); - resolve(); - }); - sideBand.on('error', (err) => { - reject(err); - }); - progressStream.write(Buffer.from('0014progress is at 50%\n')); - progressStream.end(); - }); - call.end(); - }); - }, - vaultsScan: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - // Const response = new vaultsPB.Vault(); - // const id = makeNodeId(call.request.getNodeId()); - try { - throw Error('Not implemented'); - // FIXME: handleVaultNamesRequest doesn't exist. - // const listResponse = vaultManager.handleVaultNamesRequest(id); - // let listResponse; - // for await (const vault of listResponse) { - // if (vault !== null) { - // response.setNameOrId(vault); - // await genWritable.next(response); - // } else { - // await genWritable.next(null); - // } - // } - // await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); - } - }, - /** - * Retrieves the local nodes (i.e. from the current node) that are closest - * to some provided node ID. - * @param call call that encodes a nodeId representing the target search node. - * @param callback - */ - nodesClosestLocalNodesGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.NodeTable(); - try { - const targetNodeId = makeNodeId(call.request.getNodeId()); - // Get all local nodes that are closest to the target node from the request - const closestNodes = await nodeManager.getClosestLocalNodes( - targetNodeId, - ); - for (const node of closestNodes) { - const addressMessage = new nodesPB.Address(); - addressMessage.setHost(node.address.host); - addressMessage.setPort(node.address.port); - // Add the node to the response's map (mapping of node ID -> node address) - response.getNodeTableMap().set(node.id, addressMessage); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - /** - * Retrieves all claims (of a specific type) of this node (within its sigchain). - * TODO: Currently not required. Will need to refactor once we filter on what - * claims we desire from the sigchain (e.g. in discoverGestalt). - */ - nodesClaimsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.Claims(); - // Response.setClaimsList( - // await sigchain.getClaims(call.request.getClaimtype() as ClaimType) - // ); - callback(null, response); - }, - /** - * Retrieves the ChainDataEncoded of this node. - */ - nodesChainDataGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.ChainData(); - try { - const chainData = await nodeManager.getChainData(); - // Iterate through each claim in the chain, and serialize for transport - for (const c in chainData) { - const claimId = c as ClaimIdString; - const claim = chainData[claimId]; - const claimMessage = new nodesPB.AgentClaim(); - // Will always have a payload (never undefined) so cast as string - claimMessage.setPayload(claim.payload as string); - // Add the signatures - for (const signatureData of claim.signatures) { - const signature = new nodesPB.Signature(); - // Will always have a protected header (never undefined) so cast as string - signature.setProtected(signatureData.protected as string); - signature.setSignature(signatureData.signature); - claimMessage.getSignaturesList().push(signature); - } - // Add the serialized claim - response.getChainDataMap().set(claimId, claimMessage); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - nodesHolePunchMessageSend: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Firstly, check if this node is the desired node - // If so, then we want to make this node start sending hole punching packets - // back to the source node. - if ( - nodeManager.getNodeId() === makeNodeId(call.request.getTargetId()) - ) { - const [host, port] = networkUtils.parseAddress( - call.request.getEgressAddress(), - ); - await nodeManager.openConnection(host, port); - // Otherwise, find if node in table - // If so, ask the nodeManager to relay to the node - } else if ( - await nodeManager.knowsNode(makeNodeId(call.request.getSrcId())) - ) { - await nodeManager.relayHolePunchMessage(call.request); - } - } catch (err) { - callback(grpcUtils.fromError(err), response); - } - callback(null, response); - }, - notificationsSend: async ( - call: grpc.ServerUnaryCall< - notificationsPB.AgentNotification, - utilsPB.EmptyMessage - >, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const jwt = call.request.getContent(); - const notification = await notificationsUtils.verifyAndDecodeNotif(jwt); - await notificationsManager.receiveNotification(notification); - } catch (err) { - if (err instanceof notificationsErrors.ErrorNotifications) { - callback(grpcUtils.fromError(err), response); - } else { - throw err; - } - } - callback(null, response); - }, - vaultsPermisssionsCheck: async ( - call: grpc.ServerUnaryCall< - vaultsPB.NodePermission, - vaultsPB.NodePermissionAllowed - >, - callback: grpc.sendUnaryData, - ): Promise => { - // Const response = new vaultsPB.NodePermissionAllowed(); - try { - // Const nodeId = makeNodeId(call.request.getNodeId()); - // const vaultId = makeVaultId(call.request.getVaultId()); - throw Error('Not Implemented'); - // FIXME: getVaultPermissions not implemented. - // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); - // let result; - // if (result[nodeId] === undefined) { - // response.setPermission(false); - // } else if (result[nodeId]['pull'] === undefined) { - // response.setPermission(false); - // } else { - // response.setPermission(true); - // } - // callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - } - }, - nodesCrossSignClaim: async ( - call: grpc.ServerDuplexStream, - ) => { - // TODO: Move all "await genClaims.throw" to a final catch(). Wrap this - // entire thing in a try block. And re-throw whatever error is caught - const genClaims = grpcUtils.generatorDuplex(call); - try { - await sigchain.transaction(async (sigchain) => { - const readStatus = await genClaims.read(); - // If nothing to read, end and destroy - if (readStatus.done) { - throw new claimsErrors.ErrorEmptyStream(); - } - const receivedMessage = readStatus.value; - const intermediaryClaimMessage = - receivedMessage.getSinglySignedClaim(); - if (!intermediaryClaimMessage) { - throw new claimsErrors.ErrorUndefinedSinglySignedClaim(); - } - const intermediarySignature = intermediaryClaimMessage.getSignature(); - if (!intermediarySignature) { - throw new claimsErrors.ErrorUndefinedSignature(); - } - - // 3. X --> responds with double signing the Y signed claim, and also --> Y - // bundles it with its own signed claim (intermediate) - // Reconstruct the claim to verify its signature - const constructedIntermediaryClaim: ClaimIntermediary = { - payload: intermediaryClaimMessage.getPayload(), - signature: { - protected: intermediarySignature.getProtected(), - signature: intermediarySignature.getSignature(), - }, - }; - // Get the sender's node ID from the claim - const constructedEncodedClaim: ClaimEncoded = { - payload: intermediaryClaimMessage.getPayload(), - signatures: [ - { - protected: intermediarySignature.getProtected(), - signature: intermediarySignature.getSignature(), - }, - ], - }; - const decodedClaim = claimsUtils.decodeClaim(constructedEncodedClaim); - const payloadData = decodedClaim.payload.data; - if (payloadData.type !== 'node') { - throw new claimsErrors.ErrorNodesClaimType(); - } - // Verify the claim - const senderPublicKey = await nodeManager.getPublicKey( - payloadData.node1, - ); - const verified = await claimsUtils.verifyClaimSignature( - constructedEncodedClaim, - senderPublicKey, - ); - if (!verified) { - throw new claimsErrors.ErrorSinglySignedClaimVerificationFailed(); - } - // If verified, add your own signature to the received claim - const doublySignedClaim = await claimsUtils.signIntermediaryClaim({ - claim: constructedIntermediaryClaim, - privateKey: keyManager.getRootKeyPairPem().privateKey, - signeeNodeId: nodeManager.getNodeId(), - }); - // Then create your own intermediary node claim (from X -> Y) - const singlySignedClaim = await sigchain.createIntermediaryClaim({ - type: 'node', - node1: nodeManager.getNodeId(), - node2: payloadData.node1, - }); - // Should never be reached, but just for type safety - if (!doublySignedClaim.payload || !singlySignedClaim.payload) { - throw new claimsErrors.ErrorClaimsUndefinedClaimPayload(); - } - // Write both these claims to a message to send - const crossSignMessage = claimsUtils.createCrossSignMessage({ - singlySignedClaim, - doublySignedClaim, - }); - await genClaims.write(crossSignMessage); - // 4. We expect to receive our singly signed claim we sent to now be a - // doubly signed claim (signed by the other node). - const responseStatus = await genClaims.read(); - if (responseStatus.done) { - throw new claimsErrors.ErrorEmptyStream(); - } - const receivedResponse = responseStatus.value; - const receivedDoublySignedClaimMessage = - receivedResponse.getDoublySignedClaim(); - if (!receivedDoublySignedClaimMessage) { - throw new claimsErrors.ErrorUndefinedDoublySignedClaim(); - } - // Reconstruct the expected object from message - const constructedDoublySignedClaim: ClaimEncoded = { - payload: receivedDoublySignedClaimMessage.getPayload(), - signatures: receivedDoublySignedClaimMessage - .getSignaturesList() - .map((sMsg) => { - return { - protected: sMsg.getProtected(), - signature: sMsg.getSignature(), - }; - }), - }; - // Verify the doubly signed claim with both our public key, and the sender's - const verifiedDoubly = - (await claimsUtils.verifyClaimSignature( - constructedDoublySignedClaim, - keyManager.getRootKeyPairPem().publicKey, - )) && - (await claimsUtils.verifyClaimSignature( - constructedDoublySignedClaim, - senderPublicKey, - )); - if (!verifiedDoubly) { - await genClaims.throw( - new claimsErrors.ErrorDoublySignedClaimVerificationFailed(), - ); - } - // If verified, then we can safely add to our sigchain - await sigchain.addExistingClaim(constructedDoublySignedClaim); - // Close the stream - await genClaims.next(null); - }); - } catch (e) { - await genClaims.throw(e); - // TODO: Handle the exception on this server - throw e? - // throw e; - } - }, - }; - - return agentService; -} - -export default createAgentService; - -export { AgentServiceService }; diff --git a/src/agent/index.ts b/src/agent/index.ts index 8a9bebd20..f45d230fe 100644 --- a/src/agent/index.ts +++ b/src/agent/index.ts @@ -1,6 +1,3 @@ -export { - default as createAgentService, - AgentServiceService, -} from './agentService'; +export { default as createAgentService, AgentServiceService } from './service'; export { default as GRPCClientAgent } from './GRPCClientAgent'; export * as errors from './errors'; diff --git a/src/agent/service/echo.ts b/src/agent/service/echo.ts new file mode 100644 index 000000000..45b8f0279 --- /dev/null +++ b/src/agent/service/echo.ts @@ -0,0 +1,15 @@ +import type * as grpc from '@grpc/grpc-js'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function echo(_) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EchoMessage(); + response.setChallenge(call.request.getChallenge()); + callback(null, response); + }; +} + +export default echo; diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts new file mode 100644 index 000000000..43af3c005 --- /dev/null +++ b/src/agent/service/index.ts @@ -0,0 +1,48 @@ +import type { KeyManager } from '../../keys'; +import type { VaultManager } from '../../vaults'; +import type { NodeManager } from '../../nodes'; +import type { NotificationsManager } from '../../notifications'; +import type { Sigchain } from '../../sigchain'; +import type { IAgentServiceServer } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import echo from './echo'; +import nodesChainDataGet from './nodesChainDataGet'; +import nodesClaimsGet from './nodesClaimsGet'; +import nodesClosestLocalNodesGet from './nodesClosestLocalNodesGet'; +import nodesCrossSignClaim from './nodesCrossSignClaim'; +import nodesHolePunchMessageSend from './nodesHolePunchMessageSend'; +import notificationsSend from './notificationsSend'; +import vaultsGitInfoGet from './vaultsGitInfoGet'; +import vaultsGitPackGet from './vaultsGitPackGet'; +import vaultsPermissionsCheck from './vaultsPermissionsCheck'; +import vaultsScan from './vaultsScan'; +import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; + +function createService(container: { + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + notificationsManager: NotificationsManager; + sigchain: Sigchain; +}) { + const container_ = { + ...container, + }; + const service: IAgentServiceServer = { + echo: echo(container_), + nodesChainDataGet: nodesChainDataGet(container_), + nodesClaimsGet: nodesClaimsGet(container_), + nodesClosestLocalNodesGet: nodesClosestLocalNodesGet(container_), + nodesCrossSignClaim: nodesCrossSignClaim(container_), + nodesHolePunchMessageSend: nodesHolePunchMessageSend(container_), + notificationsSend: notificationsSend(container_), + vaultsGitInfoGet: vaultsGitInfoGet(container_), + vaultsGitPackGet: vaultsGitPackGet(container_), + vaultsPermissionsCheck: vaultsPermissionsCheck(container_), + vaultsScan: vaultsScan(container_), + }; + return service; +} + +export default createService; + +export { AgentServiceService }; diff --git a/src/agent/service/nodesChainDataGet.ts b/src/agent/service/nodesChainDataGet.ts new file mode 100644 index 000000000..0f4c201c7 --- /dev/null +++ b/src/agent/service/nodesChainDataGet.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { ClaimIdString } from '../../claims/types'; +import type { NodeManager } from '../../nodes'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Retrieves the ChainDataEncoded of this node. + */ +function nodesChainDataGet({ nodeManager }: { nodeManager: NodeManager }) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.ChainData(); + try { + const chainData = await nodeManager.getChainData(); + // Iterate through each claim in the chain, and serialize for transport + for (const c in chainData) { + const claimId = c as ClaimIdString; + const claim = chainData[claimId]; + const claimMessage = new nodesPB.AgentClaim(); + // Will always have a payload (never undefined) so cast as string + claimMessage.setPayload(claim.payload as string); + // Add the signatures + for (const signatureData of claim.signatures) { + const signature = new nodesPB.Signature(); + // Will always have a protected header (never undefined) so cast as string + signature.setProtected(signatureData.protected as string); + signature.setSignature(signatureData.signature); + claimMessage.getSignaturesList().push(signature); + } + // Add the serialized claim + response.getChainDataMap().set(claimId, claimMessage); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesChainDataGet; diff --git a/src/agent/service/nodesClaimsGet.ts b/src/agent/service/nodesClaimsGet.ts new file mode 100644 index 000000000..920555388 --- /dev/null +++ b/src/agent/service/nodesClaimsGet.ts @@ -0,0 +1,22 @@ +import type * as grpc from '@grpc/grpc-js'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Retrieves all claims (of a specific type) of this node (within its sigchain). + * TODO: Currently not required. Will need to refactor once we filter on what + * claims we desire from the sigchain (e.g. in discoverGestalt). + */ +function nodesClaimsGet(_) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.Claims(); + // Response.setClaimsList( + // await sigchain.getClaims(call.request.getClaimtype() as ClaimType) + // ); + callback(null, response); + }; +} + +export default nodesClaimsGet; diff --git a/src/agent/service/nodesClosestLocalNodesGet.ts b/src/agent/service/nodesClosestLocalNodesGet.ts new file mode 100644 index 000000000..5ef5fc071 --- /dev/null +++ b/src/agent/service/nodesClosestLocalNodesGet.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NodeManager } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Retrieves the local nodes (i.e. from the current node) that are closest + * to some provided node ID. + */ +function nodesClosestLocalNodesGet({ + nodeManager, +}: { + nodeManager: NodeManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.NodeTable(); + try { + const targetNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + // Get all local nodes that are closest to the target node from the request + const closestNodes = await nodeManager.getClosestLocalNodes(targetNodeId); + for (const node of closestNodes) { + const addressMessage = new nodesPB.Address(); + addressMessage.setHost(node.address.host); + addressMessage.setPort(node.address.port); + // Add the node to the response's map (mapping of node ID -> node address) + response.getNodeTableMap().set(node.id, addressMessage); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesClosestLocalNodesGet; diff --git a/src/agent/service/nodesCrossSignClaim.ts b/src/agent/service/nodesCrossSignClaim.ts new file mode 100644 index 000000000..55857e140 --- /dev/null +++ b/src/agent/service/nodesCrossSignClaim.ts @@ -0,0 +1,152 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { ClaimEncoded, ClaimIntermediary } from '../../claims/types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { KeyManager } from '../../keys'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as claimsUtils, errors as claimsErrors } from '../../claims'; + +function nodesCrossSignClaim({ + keyManager, + nodeManager, + sigchain, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + sigchain: Sigchain; +}) { + return async ( + call: grpc.ServerDuplexStream, + ) => { + // TODO: Move all "await genClaims.throw" to a final catch(). Wrap this + // entire thing in a try block. And re-throw whatever error is caught + const genClaims = grpcUtils.generatorDuplex(call); + try { + await sigchain.transaction(async (sigchain) => { + const readStatus = await genClaims.read(); + // If nothing to read, end and destroy + if (readStatus.done) { + throw new claimsErrors.ErrorEmptyStream(); + } + const receivedMessage = readStatus.value; + const intermediaryClaimMessage = receivedMessage.getSinglySignedClaim(); + if (!intermediaryClaimMessage) { + throw new claimsErrors.ErrorUndefinedSinglySignedClaim(); + } + const intermediarySignature = intermediaryClaimMessage.getSignature(); + if (!intermediarySignature) { + throw new claimsErrors.ErrorUndefinedSignature(); + } + + // 3. X --> responds with double signing the Y signed claim, and also --> Y + // bundles it with its own signed claim (intermediate) + // Reconstruct the claim to verify its signature + const constructedIntermediaryClaim: ClaimIntermediary = { + payload: intermediaryClaimMessage.getPayload(), + signature: { + protected: intermediarySignature.getProtected(), + signature: intermediarySignature.getSignature(), + }, + }; + // Get the sender's node ID from the claim + const constructedEncodedClaim: ClaimEncoded = { + payload: intermediaryClaimMessage.getPayload(), + signatures: [ + { + protected: intermediarySignature.getProtected(), + signature: intermediarySignature.getSignature(), + }, + ], + }; + const decodedClaim = claimsUtils.decodeClaim(constructedEncodedClaim); + const payloadData = decodedClaim.payload.data; + if (payloadData.type !== 'node') { + throw new claimsErrors.ErrorNodesClaimType(); + } + // Verify the claim + const senderPublicKey = await nodeManager.getPublicKey( + payloadData.node1, + ); + const verified = await claimsUtils.verifyClaimSignature( + constructedEncodedClaim, + senderPublicKey, + ); + if (!verified) { + throw new claimsErrors.ErrorSinglySignedClaimVerificationFailed(); + } + // If verified, add your own signature to the received claim + const doublySignedClaim = await claimsUtils.signIntermediaryClaim({ + claim: constructedIntermediaryClaim, + privateKey: keyManager.getRootKeyPairPem().privateKey, + signeeNodeId: nodeManager.getNodeId(), + }); + // Then create your own intermediary node claim (from X -> Y) + const singlySignedClaim = await sigchain.createIntermediaryClaim({ + type: 'node', + node1: nodeManager.getNodeId(), + node2: payloadData.node1, + }); + // Should never be reached, but just for type safety + if (!doublySignedClaim.payload || !singlySignedClaim.payload) { + throw new claimsErrors.ErrorClaimsUndefinedClaimPayload(); + } + // Write both these claims to a message to send + const crossSignMessage = claimsUtils.createCrossSignMessage({ + singlySignedClaim, + doublySignedClaim, + }); + await genClaims.write(crossSignMessage); + // 4. We expect to receive our singly signed claim we sent to now be a + // doubly signed claim (signed by the other node). + const responseStatus = await genClaims.read(); + if (responseStatus.done) { + throw new claimsErrors.ErrorEmptyStream(); + } + const receivedResponse = responseStatus.value; + const receivedDoublySignedClaimMessage = + receivedResponse.getDoublySignedClaim(); + if (!receivedDoublySignedClaimMessage) { + throw new claimsErrors.ErrorUndefinedDoublySignedClaim(); + } + // Reconstruct the expected object from message + const constructedDoublySignedClaim: ClaimEncoded = { + payload: receivedDoublySignedClaimMessage.getPayload(), + signatures: receivedDoublySignedClaimMessage + .getSignaturesList() + .map((sMsg) => { + return { + protected: sMsg.getProtected(), + signature: sMsg.getSignature(), + }; + }), + }; + // Verify the doubly signed claim with both our public key, and the sender's + const verifiedDoubly = + (await claimsUtils.verifyClaimSignature( + constructedDoublySignedClaim, + keyManager.getRootKeyPairPem().publicKey, + )) && + (await claimsUtils.verifyClaimSignature( + constructedDoublySignedClaim, + senderPublicKey, + )); + if (!verifiedDoubly) { + await genClaims.throw( + new claimsErrors.ErrorDoublySignedClaimVerificationFailed(), + ); + } + // If verified, then we can safely add to our sigchain + await sigchain.addExistingClaim(constructedDoublySignedClaim); + // Close the stream + await genClaims.next(null); + }); + } catch (e) { + await genClaims.throw(e); + // TODO: Handle the exception on this server - throw e? + // throw e; + } + }; +} + +export default nodesCrossSignClaim; diff --git a/src/agent/service/nodesHolePunchMessageSend.ts b/src/agent/service/nodesHolePunchMessageSend.ts new file mode 100644 index 000000000..d1a1ea8aa --- /dev/null +++ b/src/agent/service/nodesHolePunchMessageSend.ts @@ -0,0 +1,47 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NodeManager } from '../../nodes'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as networkUtils } from '../../network'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function nodesHolePunchMessageSend({ + nodeManager, +}: { + nodeManager: NodeManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Firstly, check if this node is the desired node + // If so, then we want to make this node start sending hole punching packets + // back to the source node. + if ( + nodeManager.getNodeId() === + nodesUtils.makeNodeId(call.request.getTargetId()) + ) { + const [host, port] = networkUtils.parseAddress( + call.request.getEgressAddress(), + ); + await nodeManager.openConnection(host, port); + // Otherwise, find if node in table + // If so, ask the nodeManager to relay to the node + } else if ( + await nodeManager.knowsNode( + nodesUtils.makeNodeId(call.request.getSrcId()), + ) + ) { + await nodeManager.relayHolePunchMessage(call.request); + } + } catch (err) { + callback(grpcUtils.fromError(err), response); + } + callback(null, response); + }; +} + +export default nodesHolePunchMessageSend; diff --git a/src/agent/service/notificationsSend.ts b/src/agent/service/notificationsSend.ts new file mode 100644 index 000000000..3631d8af9 --- /dev/null +++ b/src/agent/service/notificationsSend.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { NotificationsManager } from '../../notifications'; +import type * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { + utils as notificationsUtils, + errors as notificationsErrors, +} from '../../notifications'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function notificationsSend({ + notificationsManager, +}: { + notificationsManager: NotificationsManager; +}) { + return async ( + call: grpc.ServerUnaryCall< + notificationsPB.AgentNotification, + utilsPB.EmptyMessage + >, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const jwt = call.request.getContent(); + const notification = await notificationsUtils.verifyAndDecodeNotif(jwt); + await notificationsManager.receiveNotification(notification); + } catch (err) { + if (err instanceof notificationsErrors.ErrorNotifications) { + callback(grpcUtils.fromError(err), response); + } else { + throw err; + } + } + callback(null, response); + }; +} + +export default notificationsSend; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts new file mode 100644 index 000000000..8ee13efed --- /dev/null +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -0,0 +1,49 @@ +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsGitInfoGet({ vaultManager }: { vaultManager: VaultManager }) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + const request = call.request; + const vaultNameOrId = request.getNameOrId(); + let vaultId, vaultName; + try { + vaultId = vaultsUtils.makeVaultId(idUtils.fromString(vaultNameOrId)); + await vaultManager.openVault(vaultId); + vaultName = await vaultManager.getVaultName(vaultId); + } catch (err) { + if (err instanceof vaultsErrors.ErrorVaultUndefined) { + vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + await vaultManager.openVault(vaultId); + vaultName = vaultNameOrId; + } else { + throw err; + } + } + // TODO: Check the permissions here + const meta = new grpc.Metadata(); + meta.set('vaultName', vaultName); + meta.set('vaultId', vaultsUtils.makeVaultIdPretty(vaultId)); + genWritable.stream.sendMetadata(meta); + const response = new vaultsPB.PackChunk(); + const responseGen = vaultManager.handleInfoRequest(vaultId); + for await (const byte of responseGen) { + if (byte !== null) { + response.setChunk(byte); + await genWritable.next(response); + } else { + await genWritable.next(null); + } + } + await genWritable.next(null); + }; +} + +export default vaultsGitInfoGet; diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts new file mode 100644 index 000000000..8590fcd29 --- /dev/null +++ b/src/agent/service/vaultsGitPackGet.ts @@ -0,0 +1,69 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import { promisify } from '../../utils'; +import { errors as grpcErrors } from '../../grpc'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { + return async ( + call: grpc.ServerDuplexStream, + ) => { + const write = promisify(call.write).bind(call); + const clientBodyBuffers: Buffer[] = []; + call.on('data', (d) => { + clientBodyBuffers.push(d.getChunk_asU8()); + }); + + call.on('end', async () => { + const body = Buffer.concat(clientBodyBuffers); + const meta = call.metadata; + const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); + if (vaultNameOrId == null) + throw new grpcErrors.ErrorGRPC('vault-name not in metadata.'); + let vaultId; + try { + vaultId = vaultsUtils.makeVaultId(vaultNameOrId); + await vaultManager.openVault(vaultId); + } catch (err) { + if ( + err instanceof vaultsErrors.ErrorVaultUndefined || + err instanceof SyntaxError + ) { + vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + await vaultManager.openVault(vaultId); + } else { + throw err; + } + } + // TODO: Check the permissions here + const response = new vaultsPB.PackChunk(); + const [sideBand, progressStream] = await vaultManager.handlePackRequest( + vaultId, + Buffer.from(body), + ); + response.setChunk(Buffer.from('0008NAK\n')); + await write(response); + const responseBuffers: Buffer[] = []; + await new Promise((resolve, reject) => { + sideBand.on('data', async (data: Buffer) => { + responseBuffers.push(data); + }); + sideBand.on('end', async () => { + response.setChunk(Buffer.concat(responseBuffers)); + await write(response); + resolve(); + }); + sideBand.on('error', (err) => { + reject(err); + }); + progressStream.write(Buffer.from('0014progress is at 50%\n')); + progressStream.end(); + }); + call.end(); + }); + }; +} + +export default vaultsGitPackGet; diff --git a/src/agent/service/vaultsPermissionsCheck.ts b/src/agent/service/vaultsPermissionsCheck.ts new file mode 100644 index 000000000..8b3046f06 --- /dev/null +++ b/src/agent/service/vaultsPermissionsCheck.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { utils as grpcUtils } from '../../grpc'; + +function vaultsPermissionsCheck(_) { + return async ( + call: grpc.ServerUnaryCall< + vaultsPB.NodePermission, + vaultsPB.NodePermissionAllowed + >, + callback: grpc.sendUnaryData, + ): Promise => { + // Const response = new vaultsPB.NodePermissionAllowed(); + try { + // Const nodeId = makeNodeId(call.request.getNodeId()); + // const vaultId = makeVaultId(call.request.getVaultId()); + throw Error('Not Implemented'); + // FIXME: getVaultPermissions not implemented. + // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); + // let result; + // if (result[nodeId] === undefined) { + // response.setPermission(false); + // } else if (result[nodeId]['pull'] === undefined) { + // response.setPermission(false); + // } else { + // response.setPermission(true); + // } + // callback(null, response); + } catch (err) { + callback(grpcUtils.fromError(err), null); + } + }; +} + +export default vaultsPermissionsCheck; diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts new file mode 100644 index 000000000..f7e618664 --- /dev/null +++ b/src/agent/service/vaultsScan.ts @@ -0,0 +1,33 @@ +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as grpcUtils } from '../../grpc'; + +function vaultsScan(_) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + // Const response = new vaultsPB.Vault(); + // const id = makeNodeId(call.request.getNodeId()); + try { + throw Error('Not implemented'); + // FIXME: handleVaultNamesRequest doesn't exist. + // const listResponse = vaultManager.handleVaultNamesRequest(id); + // let listResponse; + // for await (const vault of listResponse) { + // if (vault !== null) { + // response.setNameOrId(vault); + // await genWritable.next(response); + // } else { + // await genWritable.next(null); + // } + // } + // await genWritable.next(null); + } catch (err) { + await genWritable.throw(err); + } + }; +} + +export default vaultsScan; diff --git a/src/bin/agent/CommandLockAll.ts b/src/bin/agent/CommandLockAll.ts index faa154072..865438286 100644 --- a/src/bin/agent/CommandLockAll.ts +++ b/src/bin/agent/CommandLockAll.ts @@ -53,7 +53,7 @@ class CommandLockAll extends CommandPolykey { }); const emptyMessage = new utilsPB.EmptyMessage(); await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.sessionsLockAll(emptyMessage, auth), + (auth) => pkClient.grpcClient.agentLockAll(emptyMessage, auth), meta, ); // Destroy local session diff --git a/src/bin/agent/CommandStart.ts b/src/bin/agent/CommandStart.ts index df69d403c..5c71999d9 100644 --- a/src/bin/agent/CommandStart.ts +++ b/src/bin/agent/CommandStart.ts @@ -2,6 +2,7 @@ import type { StdioOptions } from 'child_process'; import type { AgentChildProcessInput, AgentChildProcessOutput } from '../types'; import type PolykeyAgent from '../../PolykeyAgent'; import type { RecoveryCode } from '../../keys/types'; +import type { PolykeyWorkerManagerInterface } from '../../workers/types'; import path from 'path'; import child_process from 'child_process'; import process from 'process'; @@ -26,6 +27,7 @@ class CommandStart extends CommandPolykey { this.addOption(binOptions.connTimeoutTime); this.addOption(binOptions.seedNodes); this.addOption(binOptions.network); + this.addOption(binOptions.workers); this.addOption(binOptions.background); this.addOption(binOptions.backgroundOutFile); this.addOption(binOptions.backgroundErrFile); @@ -36,6 +38,9 @@ class CommandStart extends CommandPolykey { options.clientPort = options.clientPort ?? config.defaults.networkConfig.clientPort; const { default: PolykeyAgent } = await import('../../PolykeyAgent'); + const { WorkerManager, utils: workersUtils } = await import( + '../../workers' + ); let password: string | undefined; if (options.fresh) { // If fresh, then get a new password @@ -165,6 +170,7 @@ class CommandStart extends CommandPolykey { }); const messageIn: AgentChildProcessInput = { logLevel: this.logger.getEffectiveLevel(), + workers: options.workers, agentConfig, }; agentProcess.send(messageIn, (e) => { @@ -180,15 +186,26 @@ class CommandStart extends CommandPolykey { // Change process name to polykey-agent process.title = 'polykey-agent'; // eslint-disable-next-line prefer-const - let pkAgent: PolykeyAgent | undefined; + let pkAgent: PolykeyAgent; + // eslint-disable-next-line prefer-const + let workerManager: PolykeyWorkerManagerInterface; this.exitHandlers.handlers.push(async () => { - if (pkAgent != null) await pkAgent.stop(); + pkAgent?.unsetWorkerManager(); + await workerManager?.destroy(); + await pkAgent?.stop(); }); pkAgent = await PolykeyAgent.createPolykeyAgent({ fs: this.fs, logger: this.logger.getChild(PolykeyAgent.name), ...agentConfig, }); + if (options.workers !== 0) { + workerManager = await workersUtils.createWorkerManager({ + cores: options.workers, + logger: this.logger.getChild(WorkerManager.name), + }); + pkAgent.setWorkerManager(workerManager); + } recoveryCodeOut = pkAgent.keyManager.getRecoveryCode(); } // Recovery code is only available if it was newly generated diff --git a/src/bin/agent/CommandUnlock.ts b/src/bin/agent/CommandUnlock.ts index 3215253a5..b8804d9e6 100644 --- a/src/bin/agent/CommandUnlock.ts +++ b/src/bin/agent/CommandUnlock.ts @@ -41,7 +41,7 @@ class CommandUnlock extends CommandPolykey { }); const emptyMessage = new utilsPB.EmptyMessage(); await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.sessionsUnlock(emptyMessage, auth), + (auth) => pkClient.grpcClient.agentUnlock(emptyMessage, auth), meta, ); } finally { diff --git a/src/bin/polykey-agent.ts b/src/bin/polykey-agent.ts index 7e06e3a6f..3689bd201 100644 --- a/src/bin/polykey-agent.ts +++ b/src/bin/polykey-agent.ts @@ -5,6 +5,7 @@ * @module */ import type { AgentChildProcessInput, AgentChildProcessOutput } from './types'; +import type { PolykeyWorkerManagerInterface } from '../workers/types'; import fs from 'fs'; import process from 'process'; /** @@ -21,6 +22,7 @@ process.removeAllListeners('SIGTERM'); import Logger, { StreamHandler } from '@matrixai/logger'; import * as binUtils from './utils'; import PolykeyAgent from '../PolykeyAgent'; +import { WorkerManager, utils as workersUtils } from '../workers'; import ErrorPolykey from '../ErrorPolykey'; import { promisify, promise } from '../utils'; @@ -42,8 +44,11 @@ async function main(_argv = process.argv): Promise { const messageIn = await messageInP; logger.setLevel(messageIn.logLevel); let pkAgent: PolykeyAgent; + let workerManager: PolykeyWorkerManagerInterface; exitHandlers.handlers.push(async () => { - if (pkAgent != null) await pkAgent.stop(); + pkAgent?.unsetWorkerManager(); + await workerManager?.destroy(); + await pkAgent?.stop(); }); try { pkAgent = await PolykeyAgent.createPolykeyAgent({ @@ -51,6 +56,13 @@ async function main(_argv = process.argv): Promise { logger: logger.getChild(PolykeyAgent.name), ...messageIn.agentConfig, }); + if (messageIn.workers !== 0) { + workerManager = await workersUtils.createWorkerManager({ + cores: messageIn.workers, + logger: logger.getChild(WorkerManager.name), + }); + pkAgent.setWorkerManager(workerManager); + } } catch (e) { if (e instanceof ErrorPolykey) { process.stderr.write( diff --git a/src/bin/types.ts b/src/bin/types.ts index 79c281887..a842f9033 100644 --- a/src/bin/types.ts +++ b/src/bin/types.ts @@ -11,6 +11,7 @@ import type { Host, Port } from '../network/types'; */ type AgentChildProcessInput = { logLevel: LogLevel; + workers?: number; agentConfig: { password: string; nodePath?: string; diff --git a/src/bin/utils/options.ts b/src/bin/utils/options.ts index ea95d2d9e..2fffd0ecb 100644 --- a/src/bin/utils/options.ts +++ b/src/bin/utils/options.ts @@ -141,6 +141,13 @@ const network = new commander.Option( .env('PK_NETWORK') .default(config.defaults.network.mainnet); +const workers = new commander.Option( + '-w --workers ', + 'Number of workers to use, defaults to number of cores with `all`, 0 means no multi-threading', +) + .argParser(binParsers.parseCoreCount) + .default(undefined); + export { nodePath, format, @@ -161,4 +168,5 @@ export { rootKeyPairBits, seedNodes, network, + workers, }; diff --git a/src/bin/utils/parsers.ts b/src/bin/utils/parsers.ts index 3778d8000..83ca45876 100644 --- a/src/bin/utils/parsers.ts +++ b/src/bin/utils/parsers.ts @@ -15,6 +15,13 @@ function parseNumber(v: string): number { return num; } +function parseCoreCount(v: string): number | undefined { + if (v === 'all') { + return undefined; + } + return parseNumber(v); +} + function parseSecretPath( secretPath: string, ): [string, string, string | undefined] { @@ -92,7 +99,7 @@ function getDefaultSeedNodes(network: string): NodeMapping { * Seed nodes expected to be of form 'nodeId1@host:port;nodeId2@host:port;...' * By default, any specified seed nodes (in CLI option, or environment variable) * will overwrite the default nodes in src/config.ts. - * Special flag '' in the content indicates that the default seed + * Special flag `` indicates that the default seed * nodes should be added to the starting seed nodes instead of being overwritten. */ function parseSeedNodes(rawSeedNodes: string): [NodeMapping, boolean] { @@ -105,7 +112,7 @@ function parseSeedNodes(rawSeedNodes: string): [NodeMapping, boolean] { // Empty string will occur if there's an extraneous ';' (e.g. at end of env) if (rawSeedNode === '') continue; // Append the default seed nodes if we encounter the special flag - if (rawSeedNode === '') { + if (rawSeedNode === '') { defaults = true; continue; } @@ -148,6 +155,7 @@ function parseNetwork(network: string): NodeMapping { export { parseNumber, + parseCoreCount, parseSecretPath, parseGestaltId, getDefaultSeedNodes, diff --git a/src/bin/utils/processors.ts b/src/bin/utils/processors.ts index 9d3353539..9bd561be7 100644 --- a/src/bin/utils/processors.ts +++ b/src/bin/utils/processors.ts @@ -203,8 +203,10 @@ async function processClientOptions( }> { if (nodeId == null || clientHost == null || clientPort == null) { const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger: logger.getChild(Status.name), }); @@ -270,8 +272,10 @@ async function processClientStatus( }; } const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const status = new Status({ statusPath, + statusLockPath, fs, logger: logger.getChild(Status.name), }); diff --git a/src/bootstrap/utils.ts b/src/bootstrap/utils.ts index 5d09a11ee..3a7308349 100644 --- a/src/bootstrap/utils.ts +++ b/src/bootstrap/utils.ts @@ -58,20 +58,22 @@ async function bootstrapState({ await mkdirExists(fs, nodePath); // Setup node path and sub paths const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); const statePath = path.join(nodePath, config.defaults.stateBase); const dbPath = path.join(statePath, config.defaults.dbBase); const keysPath = path.join(statePath, config.defaults.keysBase); const vaultsPath = path.join(statePath, config.defaults.vaultsBase); const status = new Status({ + statusPath, + statusLockPath, fs, logger, - statusPath, }); try { await status.start({ pid: process.pid }); if (!fresh) { - // Check the if number of directory entries is greater than 1 due to status.json - if ((await fs.promises.readdir(nodePath)).length > 1) { + // Check the if number of directory entries is greater than 1 due to status.json and status.lock + if ((await fs.promises.readdir(nodePath)).length > 2) { throw new bootstrapErrors.ErrorBootstrapExistingState(); } } diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index 19437c188..7f7d36008 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -1,8 +1,9 @@ import type { Interceptor } from '@grpc/grpc-js'; +import type { ClientReadableStream } from '@grpc/grpc-js/build/src/call'; +import type { AsyncGeneratorReadableStreamClient } from '../grpc/types'; import type { Session } from '../sessions'; import type { NodeId } from '../nodes/types'; import type { Host, Port, TLSConfig, ProxyConfig } from '../network/types'; - import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import type * as agentPB from '../proto/js/polykey/v1/agent/agent_pb'; import type * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; @@ -48,7 +49,6 @@ class GRPCClientClient extends GRPCClient { timeout?: number; logger?: Logger; }): Promise { - logger.info(`Creating ${this.name}`); const interceptors: Array = []; if (session != null) { interceptors.push(clientUtils.sessionInterceptor(session)); @@ -76,14 +76,11 @@ class GRPCClientClient extends GRPCClient { flowCountInterceptor, logger, }); - logger.info(`Created ${this.name}`); return grpcClientClient; } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready(new clientErrors.ErrorClientClientDestroyed()) @@ -103,23 +100,28 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public sessionsUnlock(...args) { + public agentUnlock(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.sessionsUnlock, + this.client.agentUnlock, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public sessionsLockAll(...args) { + public agentLockAll(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.sessionsLockAll, + this.client.agentLockAll, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsList(...args) { + public vaultsList( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.List, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsList, @@ -167,7 +169,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsScan(...args) { + public vaultsScan( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.List, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, @@ -191,7 +198,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultPermissions(...args) { + public vaultPermissions( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.Permission, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsPermissions, @@ -199,7 +211,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsSecretsList(...args) { + public vaultsSecretsList( + ...args + ): AsyncGeneratorReadableStreamClient< + secretsPB.Secret, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsSecretsList, @@ -279,7 +296,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsLog(...args) { + public vaultsLog( + ...args + ): AsyncGeneratorReadableStreamClient< + vaultsPB.LogEntry, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsLog, @@ -359,7 +381,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public keysCertsChainGet(...args) { + public keysCertsChainGet( + ...args + ): AsyncGeneratorReadableStreamClient< + keysPB.Certificate, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.keysCertsChainGet, @@ -367,7 +394,12 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public gestaltsGestaltList(...args) { + public gestaltsGestaltList( + ...args + ): AsyncGeneratorReadableStreamClient< + gestaltsPB.Gestalt, + ClientReadableStream + > { return grpcUtils.promisifyReadableStreamCall( this.client, this.client.gestaltsGestaltList, diff --git a/src/client/clientService.ts b/src/client/clientService.ts deleted file mode 100644 index db7d1a549..000000000 --- a/src/client/clientService.ts +++ /dev/null @@ -1,144 +0,0 @@ -import type PolykeyAgent from '../PolykeyAgent'; -import type { KeyManager } from '../keys'; -import type { VaultManager } from '../vaults'; -import type { NodeManager } from '../nodes'; -import type { IdentitiesManager } from '../identities'; -import type { GestaltGraph } from '../gestalts'; -import type { SessionManager } from '../sessions'; -import type { NotificationsManager } from '../notifications'; -import type { Discovery } from '../discovery'; -import type { Sigchain } from '../sigchain'; -import type { GRPCServer } from '../grpc'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type { FileSystem } from '../types'; - -import type * as grpc from '@grpc/grpc-js'; -import type { IClientServiceServer } from '../proto/js/polykey/v1/client_service_grpc_pb'; -import createStatusRPC from './rpcStatus'; -import createSessionsRPC from './rpcSessions'; -import createVaultRPC from './rpcVaults'; -import createKeysRPC from './rpcKeys'; -import createNodesRPC from './rpcNodes'; -import createGestaltRPC from './rpcGestalts'; -import createIdentitiesRPC from './rpcIdentities'; -import createNotificationsRPC from './rpcNotifications'; -import * as clientUtils from './utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import { ClientServiceService } from '../proto/js/polykey/v1/client_service_grpc_pb'; - -/** - * Creates the client service for use with a GRPCServer - * @param domains An object representing all the domains / managers the client server uses. - * @returns an IClientServer object - */ -function createClientService({ - polykeyAgent, - keyManager, - vaultManager, - nodeManager, - identitiesManager, - gestaltGraph, - sessionManager, - notificationsManager, - discovery, - sigchain, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, - fs, -}: { - polykeyAgent: PolykeyAgent; - keyManager: KeyManager; - vaultManager: VaultManager; - nodeManager: NodeManager; - identitiesManager: IdentitiesManager; - gestaltGraph: GestaltGraph; - sessionManager: SessionManager; - notificationsManager: NotificationsManager; - discovery: Discovery; - sigchain: Sigchain; - grpcServerClient: GRPCServer; - grpcServerAgent: GRPCServer; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; - fs: FileSystem; -}) { - const authenticate = clientUtils.authenticator(sessionManager, keyManager); - const clientService: IClientServiceServer = { - ...createStatusRPC({ - authenticate, - keyManager, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, - }), - ...createSessionsRPC({ - authenticate, - sessionManager, - }), - ...createVaultRPC({ - vaultManager, - authenticate, - fs, - }), - ...createKeysRPC({ - keyManager, - nodeManager, - authenticate, - fwdProxy, - revProxy, - grpcServerClient, - }), - ...createIdentitiesRPC({ - identitiesManager, - sigchain, - nodeManager, - authenticate, - }), - ...createGestaltRPC({ - gestaltGraph, - authenticate, - discovery, - }), - ...createNodesRPC({ - nodeManager, - notificationsManager, - authenticate, - }), - ...createNotificationsRPC({ - notificationsManager, - authenticate, - }), - agentStop: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - if (!polykeyAgent.running) { - callback(null, response); - return; - } - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Respond first to close the GRPC connection - callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - // Stop is called after GRPC resources are cleared - await polykeyAgent.stop(); - return; - }, - }; - - return clientService; -} - -export default createClientService; - -export { ClientServiceService }; diff --git a/src/client/index.ts b/src/client/index.ts index 44ff1f832..d5959db3e 100644 --- a/src/client/index.ts +++ b/src/client/index.ts @@ -1,12 +1,8 @@ export { default as createClientService, ClientServiceService, -} from './clientService'; +} from './service'; export { default as GRPCClientClient } from './GRPCClientClient'; export * as errors from './errors'; export * as utils from './utils'; - -/** - * This allows us to create a MetaData() object without explicitly importing `@grpc/grpc-js`. - */ -export { Metadata } from '@grpc/grpc-js'; +export * as types from './types'; diff --git a/src/client/rpcGestalts.ts b/src/client/rpcGestalts.ts deleted file mode 100644 index ab3a21933..000000000 --- a/src/client/rpcGestalts.ts +++ /dev/null @@ -1,295 +0,0 @@ -import type { Discovery } from '../discovery'; -import type { GestaltGraph } from '../gestalts'; -import type { Gestalt } from '../gestalts/types'; -import type { IdentityId, ProviderId } from '../identities/types'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as clientUtils from './utils'; -import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import type * as identitiesPB from '../proto/js/polykey/v1/identities/identities_pb'; - -import { makeGestaltAction } from '../gestalts/utils'; - -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as gestaltsPB from '../proto/js/polykey/v1/gestalts/gestalts_pb'; -import * as permissionsPB from '../proto/js/polykey/v1/permissions/permissions_pb'; -import { makeNodeId } from '../nodes/utils'; - -const createGestaltsRPC = ({ - gestaltGraph, - authenticate, - discovery, -}: { - gestaltGraph: GestaltGraph; - authenticate: clientUtils.Authenticate; - discovery: Discovery; -}) => { - return { - gestaltsGestaltGetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new gestaltsPB.Graph(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const gestalt = await gestaltGraph.getGestaltByNode( - makeNodeId(call.request.getNodeId()), - ); - if (gestalt != null) { - response.setGestaltGraph(JSON.stringify(gestalt)); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsGestaltGetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new gestaltsPB.Graph(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const gestalt = await gestaltGraph.getGestaltByIdentity( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - if (gestalt != null) { - response.setGestaltGraph(JSON.stringify(gestalt)); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsGestaltList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - let gestaltMessage: gestaltsPB.Gestalt; - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const certs: Array = await gestaltGraph.getGestalts(); - for (const cert of certs) { - gestaltMessage = new gestaltsPB.Gestalt(); - gestaltMessage.setName(JSON.stringify(cert)); - await genWritable.next(gestaltMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - gestaltsDiscoveryByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Constructing identity info. - const gen = discovery.discoverGestaltByNode( - makeNodeId(info.getNodeId()), - ); - for await (const _ of gen) { - // Empty - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsDiscoveryByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Constructing identity info. - const gen = discovery.discoverGestaltByIdentity( - info.getProviderId() as ProviderId, - info.getIdentityId() as IdentityId, - ); - for await (const _ of gen) { - // Empty - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsGetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new permissionsPB.Actions(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const result = await gestaltGraph.getGestaltActionsByNode( - makeNodeId(info.getNodeId()), - ); - if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. - response.setActionList([]); - } else { - // Contains permission - const actions = Object.keys(result); - response.setActionList(actions); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsGetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new permissionsPB.Actions(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providerId = info.getProviderId() as ProviderId; - const identityId = info.getIdentityId() as IdentityId; - const result = await gestaltGraph.getGestaltActionsByIdentity( - providerId, - identityId, - ); - if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. - response.setActionList([]); - } else { - // Contains permission - const actions = Object.keys(result); - response.setActionList(actions); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsSetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const nodeId = makeNodeId(info.getNode()?.getNodeId()); - await gestaltGraph.setGestaltActionByNode(nodeId, action); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsSetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const providerId = info.getIdentity()?.getProviderId() as ProviderId; - const identityId = info.getIdentity()?.getIdentityId() as IdentityId; - await gestaltGraph.setGestaltActionByIdentity( - providerId, - identityId, - action, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsUnsetByNode: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const nodeId = makeNodeId(info.getNode()?.getNodeId()); - await gestaltGraph.unsetGestaltActionByNode(nodeId, action); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - gestaltsActionsUnsetByIdentity: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const info = call.request; - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Setting the action. - const action = makeGestaltAction(info.getAction()); - const providerId = info.getIdentity()?.getProviderId() as ProviderId; - const identityId = info.getIdentity()?.getIdentityId() as IdentityId; - await gestaltGraph.unsetGestaltActionByIdentity( - providerId, - identityId, - action, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createGestaltsRPC; diff --git a/src/client/rpcIdentities.ts b/src/client/rpcIdentities.ts deleted file mode 100644 index aa13785fb..000000000 --- a/src/client/rpcIdentities.ts +++ /dev/null @@ -1,269 +0,0 @@ -import type * as utils from './utils'; -import type { NodeManager } from '../nodes'; -import type { Sigchain } from '../sigchain'; -import type { IdentitiesManager } from '../identities'; -import type { IdentityId, ProviderId, TokenData } from '../identities/types'; -import type * as grpc from '@grpc/grpc-js'; -import * as clientErrors from './errors'; -import * as claimsUtils from '../claims/utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as identitiesPB from '../proto/js/polykey/v1/identities/identities_pb'; -import * as identitiesErrors from '../identities/errors'; -import { never } from '../utils'; - -const createIdentitiesRPC = ({ - identitiesManager, - sigchain, - nodeManager, - authenticate, -}: { - identitiesManager: IdentitiesManager; - sigchain: Sigchain; - nodeManager: NodeManager; - authenticate: utils.Authenticate; -}) => { - return { - identitiesAuthenticate: async ( - call: grpc.ServerWritableStream< - identitiesPB.Provider, - identitiesPB.AuthenticationProcess - >, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - const provider = identitiesManager.getProvider( - call.request.getProviderId() as ProviderId, - ); - if (provider == null) { - throw new clientErrors.ErrorClientInvalidProvider(); - } - const authFlow = provider.authenticate(); - let authFlowResult = await authFlow.next(); - if (authFlowResult.done) { - never(); - } - const authProcess = new identitiesPB.AuthenticationProcess(); - const authRequest = new identitiesPB.AuthenticationRequest(); - authRequest.setUrl(authFlowResult.value.url); - const map = authRequest.getDataMap(); - for (const [k, v] of Object.entries(authFlowResult.value.data)) { - map.set(k, v); - } - authProcess.setRequest(authRequest); - await genWritable.next(authProcess); - authFlowResult = await authFlow.next(); - if (!authFlowResult.done) { - never(); - } - const authResponse = new identitiesPB.AuthenticationResponse(); - authResponse.setIdentityId(authFlowResult.value); - authProcess.setResponse(authResponse); - await genWritable.next(authProcess); - await genWritable.next(null); - return; - } catch (e) { - await genWritable.throw(e); - return; - } - }, - identitiesTokenPut: async ( - call: grpc.ServerUnaryCall< - identitiesPB.TokenSpecific, - utilsPB.EmptyMessage - >, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const provider = call.request.getProvider(); - await identitiesManager.putToken( - provider?.getProviderId() as ProviderId, - provider?.getIdentityId() as IdentityId, - { accessToken: call.request.getToken() } as TokenData, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesTokenGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Token(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const tokens = await identitiesManager.getToken( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - response.setToken(JSON.stringify(tokens)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesTokenDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await identitiesManager.delToken( - call.request.getProviderId() as ProviderId, - call.request.getIdentityId() as IdentityId, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesProvidersList: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Provider(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providers = identitiesManager.getProviders(); - response.setProviderId(JSON.stringify(Object.keys(providers))); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - identitiesInfoGetConnected: async ( - call: grpc.ServerWritableStream< - identitiesPB.ProviderSearch, - identitiesPB.Info - >, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const providerId = call.request - .getProvider() - ?.getProviderId() as ProviderId; - const identityId = call.request - .getProvider() - ?.getIdentityId() as IdentityId; - const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); - - const identities = provider.getConnectedIdentityDatas( - identityId, - call.request.getSearchTermList(), - ); - - for await (const identity of identities) { - const identityInfoMessage = new identitiesPB.Info(); - const providerMessage = new identitiesPB.Provider(); - providerMessage.setProviderId(identity.providerId); - providerMessage.setIdentityId(identity.identityId); - identityInfoMessage.setProvider(providerMessage); - identityInfoMessage.setName(identity.name ?? ''); - identityInfoMessage.setEmail(identity.email ?? ''); - identityInfoMessage.setUrl(identity.url ?? ''); - await genWritable.next(identityInfoMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - /** - * Gets the first identityId of the local keynode. - */ - identitiesInfoGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new identitiesPB.Provider(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Get's an identity out of all identities. - const providerId = call.request.getProviderId() as ProviderId; - const provider = identitiesManager.getProvider(providerId); - if (provider !== undefined) { - const identities = await provider.getAuthIdentityIds(); - response.setProviderId(providerId); - if (identities.length !== 0) { - response.setIdentityId(identities[0]); - } - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Augments the keynode with a new identity. - */ - identitiesClaim: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Check provider is authenticated - const providerId = call.request.getProviderId() as ProviderId; - const provider = identitiesManager.getProvider(providerId); - if (provider == null) - throw new clientErrors.ErrorClientInvalidProvider(); - const identityId = call.request.getIdentityId() as IdentityId; - const identities = await provider.getAuthIdentityIds(); - if (!identities.includes(identityId)) { - throw new identitiesErrors.ErrorProviderUnauthenticated(); - } - // Create identity claim on our node - const claim = await sigchain.addClaim({ - type: 'identity', - node: nodeManager.getNodeId(), - provider: providerId, - identity: identityId, - }); - // Publish claim on identity - const claimDecoded = claimsUtils.decodeClaim(claim); - await provider.publishClaim(identityId, claimDecoded); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createIdentitiesRPC; diff --git a/src/client/rpcKeys.ts b/src/client/rpcKeys.ts deleted file mode 100644 index 3363dd326..000000000 --- a/src/client/rpcKeys.ts +++ /dev/null @@ -1,249 +0,0 @@ -import type { KeyManager } from '../keys'; -import type { NodeManager } from '../nodes'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type { TLSConfig } from '../network/types'; -import type { GRPCServer } from '../grpc'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import type * as sessionsPB from '../proto/js/polykey/v1/sessions/sessions_pb'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as keysPB from '../proto/js/polykey/v1/keys/keys_pb'; - -const createKeysRPC = ({ - keyManager, - nodeManager, - authenticate, - fwdProxy, - revProxy, - grpcServerClient, -}: { - keyManager: KeyManager; - nodeManager: NodeManager; - authenticate: utils.Authenticate; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; - grpcServerClient: GRPCServer; -}) => { - return { - keysKeyPairRoot: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.KeyPair(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const keyPair = keyManager.getRootKeyPairPem(); - response.setPublic(keyPair.publicKey); - response.setPrivate(keyPair.privateKey); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysKeyPairReset: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Lock the nodeManager - because we need to do a database refresh too - await nodeManager.transaction(async (nodeManager) => { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await keyManager.resetRootKeyPair(call.request.getName()); - // Reset the TLS config with new keypair + certificate - const tlsConfig: TLSConfig = { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }; - fwdProxy.setTLSConfig(tlsConfig); - revProxy.setTLSConfig(tlsConfig); - grpcServerClient.setTLSConfig(tlsConfig); - // Finally, refresh the node buckets - await nodeManager.refreshBuckets(); - }); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysKeyPairRenew: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - // Lock the nodeManager - because we need to do a database refresh too - await nodeManager.transaction(async (nodeManager) => { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - await keyManager.renewRootKeyPair(call.request.getName()); - // Reset the TLS config with new keypair + certificate - const tlsConfig: TLSConfig = { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }; - fwdProxy.setTLSConfig(tlsConfig); - revProxy.setTLSConfig(tlsConfig); - grpcServerClient.setTLSConfig(tlsConfig); - // Finally, refresh the node buckets - await nodeManager.refreshBuckets(); - }); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysEncrypt: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const data = await keyManager.encryptWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setData(data.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysDecrypt: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const data = await keyManager.decryptWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setData(data.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysSign: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Crypto(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const signature = await keyManager.signWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - ); - response.setSignature(signature.toString('binary')); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysVerify: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const status = await keyManager.verifyWithRootKeyPair( - Buffer.from(call.request.getData(), 'binary'), - Buffer.from(call.request.getSignature(), 'binary'), - ); - response.setSuccess(status); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysPasswordChange: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await keyManager.changePassword(call.request.getPassword()); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysCertsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new keysPB.Certificate(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const cert = keyManager.getRootCertPem(); - response.setCert(cert); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - keysCertsChainGet: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const certs: Array = await keyManager.getRootCertChainPems(); - let certMessage: keysPB.Certificate; - for (const cert of certs) { - certMessage = new keysPB.Certificate(); - certMessage.setCert(cert); - await genWritable.next(certMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - }; -}; - -export default createKeysRPC; diff --git a/src/client/rpcNodes.ts b/src/client/rpcNodes.ts deleted file mode 100644 index 34458e0ab..000000000 --- a/src/client/rpcNodes.ts +++ /dev/null @@ -1,155 +0,0 @@ -import type { NodeManager } from '../nodes'; -import type { NodeAddress } from '../nodes/types'; -import type { NotificationData } from '../notifications/types'; -import type { NotificationsManager } from '../notifications'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from '../client/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import { utils as nodesUtils, errors as nodesErrors } from '../nodes'; -import * as grpcUtils from '../grpc/utils'; -import * as networkUtils from '../network/utils'; - -const createNodesRPC = ({ - nodeManager, - authenticate, - notificationsManager, -}: { - nodeManager: NodeManager; - authenticate: utils.Authenticate; - notificationsManager: NotificationsManager; -}) => { - return { - /** - * Adds a node ID -> node address mapping into the buckets database. - * This is an unrestricted add: no validity checks are made for the correctness - * of the passed ID or host/port. - */ - nodesAdd: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Validate the passed node ID and host - const validNodeId = nodesUtils.isNodeId(call.request.getNodeId()); - if (!validNodeId) { - throw new nodesErrors.ErrorInvalidNodeId(); - } - const validHost = networkUtils.isValidHost( - call.request.getAddress()!.getHost(), - ); - if (!validHost) { - throw new nodesErrors.ErrorInvalidHost(); - } - await nodeManager.setNode( - nodesUtils.makeNodeId(call.request.getNodeId()), - { - host: call.request.getAddress()!.getHost(), - port: call.request.getAddress()!.getPort(), - } as NodeAddress, - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Checks if a remote node is online. - */ - nodesPing: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const status = await nodeManager.pingNode( - nodesUtils.makeNodeId(call.request.getNodeId()), - ); - response.setSuccess(status); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Checks whether there is an existing Gestalt Invitation from the other node. - * If not, send an invitation, if so, create a cryptolink claim between the - * other node and host node. - */ - nodesClaim: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const remoteNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); - const gestaltInvite = await notificationsManager.findGestaltInvite( - remoteNodeId, - ); - - // Check first whether there is an existing gestalt invite from the remote node - // or if we want to force an invitation rather than a claim - if (gestaltInvite === undefined || call.request.getForceInvite()) { - const data = { - type: 'GestaltInvite', - } as NotificationData; - await notificationsManager.sendNotification(remoteNodeId, data); - response.setSuccess(false); - } else { - // There is an existing invitation, and we want to claim the node - await nodeManager.claimNode(remoteNodeId); - response.setSuccess(true); - } - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - /** - * Attempts to get the node address of a provided node ID (by contacting - * keynodes in the wider Polykey network). - * @throws ErrorNodeGraphNodeNotFound if node address cannot be found - */ - nodesFind: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new nodesPB.NodeAddress(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeId = nodesUtils.makeNodeId(call.request.getNodeId()); - const address = await nodeManager.findNode(nodeId); - response - .setNodeId(nodeId) - .setAddress( - new nodesPB.Address().setHost(address.host).setPort(address.port), - ); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createNodesRPC; diff --git a/src/client/rpcNotifications.ts b/src/client/rpcNotifications.ts deleted file mode 100644 index 62507d444..000000000 --- a/src/client/rpcNotifications.ts +++ /dev/null @@ -1,123 +0,0 @@ -import type { NotificationsManager } from '../notifications'; - -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as notificationsPB from '../proto/js/polykey/v1/notifications/notifications_pb'; -import * as grpcUtils from '../grpc/utils'; -import * as notificationsUtils from '../notifications/utils'; -import { makeNodeId } from '../nodes/utils'; - -const createNotificationsRPC = ({ - notificationsManager, - authenticate, -}: { - notificationsManager: NotificationsManager; - authenticate: utils.Authenticate; -}) => { - return { - notificationsSend: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const receivingId = makeNodeId(call.request.getReceiverId()); - const data = { - type: 'General', - message: call.request.getData()?.getMessage(), - }; - const validatedData = - notificationsUtils.validateGeneralNotification(data); - await notificationsManager.sendNotification(receivingId, validatedData); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - notificationsRead: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new notificationsPB.List(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const unread = call.request.getUnread(); - const order = call.request.getOrder() as 'newest' | 'oldest'; - const numberField = call.request.getNumber(); - let number: number | 'all'; - if (numberField === 'all') { - number = numberField; - } else { - number = parseInt(numberField); - } - - const notifications = await notificationsManager.readNotifications({ - unread, - number, - order, - }); - - const notifMessages: Array = []; - for (const notif of notifications) { - const notificationsMessage = new notificationsPB.Notification(); - switch (notif.data.type) { - case 'General': { - const generalMessage = new notificationsPB.General(); - generalMessage.setMessage(notif.data.message); - notificationsMessage.setGeneral(generalMessage); - break; - } - case 'GestaltInvite': { - notificationsMessage.setGestaltInvite('GestaltInvite'); - break; - } - case 'VaultShare': { - const vaultShareMessage = new notificationsPB.Share(); - vaultShareMessage.setVaultId(notif.data.vaultId); - vaultShareMessage.setVaultName(notif.data.vaultName); - vaultShareMessage.setActionsList(Object.keys(notif.data.actions)); - notificationsMessage.setVaultShare(vaultShareMessage); - break; - } - } - notificationsMessage.setSenderId(notif.senderId); - notificationsMessage.setIsRead(notif.isRead); - notifMessages.push(notificationsMessage); - } - response.setNotificationList(notifMessages); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - notificationsClear: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - await notificationsManager.clearNotifications(); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createNotificationsRPC; diff --git a/src/client/rpcSessions.ts b/src/client/rpcSessions.ts deleted file mode 100644 index e535c5f67..000000000 --- a/src/client/rpcSessions.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { SessionManager } from '../sessions'; -import type * as grpc from '@grpc/grpc-js'; -import type * as utils from './utils'; -import * as grpcUtils from '../grpc/utils'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; - -const createSessionsRPC = ({ - authenticate, - sessionManager, -}: { - authenticate: utils.Authenticate; - sessionManager: SessionManager; -}) => { - return { - sessionsUnlock: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - sessionsLockAll: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.EmptyMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - await sessionManager.resetKey(); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createSessionsRPC; diff --git a/src/client/rpcStatus.ts b/src/client/rpcStatus.ts deleted file mode 100644 index d0d1e5480..000000000 --- a/src/client/rpcStatus.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type { Authenticate } from './utils'; -import type { KeyManager } from '../keys'; -import type { GRPCServer } from '../grpc'; -import type { ForwardProxy, ReverseProxy } from '../network'; -import type * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import process from 'process'; -import * as grpcUtils from '../grpc/utils'; -import * as agentPB from '../proto/js/polykey/v1/agent/agent_pb'; - -const createStatusRPC = ({ - authenticate, - keyManager, - grpcServerClient, - grpcServerAgent, - fwdProxy, - revProxy, -}: { - authenticate: Authenticate; - keyManager: KeyManager; - grpcServerClient: GRPCServer; - grpcServerAgent: GRPCServer; - fwdProxy: ForwardProxy; - revProxy: ReverseProxy; -}) => { - return { - agentStatus: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new agentPB.InfoMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - response.setPid(process.pid); - response.setNodeId(keyManager.getNodeId()); - response.setClientHost(grpcServerClient.host); - response.setClientPort(grpcServerClient.port); - response.setIngressHost(revProxy.ingressHost); - response.setIngressPort(revProxy.ingressPort); - response.setEgressHost(fwdProxy.egressHost); - response.setEgressPort(fwdProxy.egressPort); - response.setAgentHost(grpcServerAgent.host); - response.setAgentPort(grpcServerAgent.port); - response.setProxyHost(fwdProxy.proxyHost); - response.setProxyPort(fwdProxy.proxyPort); - response.setRootPublicKeyPem(keyManager.getRootKeyPairPem().publicKey); - response.setRootCertPem(keyManager.getRootCertPem()); - response.setRootCertChainPem(await keyManager.getRootCertChainPem()); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - }; -}; - -export default createStatusRPC; diff --git a/src/client/rpcVaults.ts b/src/client/rpcVaults.ts deleted file mode 100644 index 16be5875b..000000000 --- a/src/client/rpcVaults.ts +++ /dev/null @@ -1,690 +0,0 @@ -import type { Vault, VaultId, VaultName } from '../vaults/types'; -import type { VaultManager } from '../vaults'; -import type { FileSystem } from '../types'; - -import type * as utils from './utils'; -import type * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import { utils as idUtils } from '@matrixai/id'; -import * as grpc from '@grpc/grpc-js'; -import * as grpcUtils from '../grpc/utils'; -import { - vaultOps, - utils as vaultsUtils, - errors as vaultsErrors, -} from '../vaults'; -import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as secretsPB from '../proto/js/polykey/v1/secrets/secrets_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - -const createVaultRPC = ({ - vaultManager, - authenticate, - fs, -}: { - vaultManager: VaultManager; - authenticate: utils.Authenticate; - fs: FileSystem; -}) => { - return { - vaultsList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - // Call.on('error', (e) => console.error(e)); - // call.on('close', () => console.log('Got close')); - // call.on('finish', () => console.log('Got finish')); - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaults = await vaultManager.listVaults(); - for await (const [vaultName, vaultId] of vaults) { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); - await genWritable.next(((_) => vaultListMessage)()); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsCreate: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Vault(); - let vault: Vault; - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - vault = await vaultManager.createVault( - call.request.getNameOrId() as VaultName, - ); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsRename: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Vault(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const newName = call.request.getNewName() as VaultName; - await vaultManager.renameVault(vaultId, newName); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const vaultMessage = call.request; - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - await vaultManager.destroyVault(vaultId); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsClone: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Vault id - // const vaultId = parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - throw Error('Not implemented'); - // FIXME, not fully implemented - // await vaultManager.cloneVault(vaultId, id); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPull: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Vault name - // const vaultId = await parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - // Await vaultManager.pullVault(vaultId, id); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsScan: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - // Const nodeId = makeNodeId(call.request.getNodeId()); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaults = await vaultManager.listVaults(); - vaults.forEach(async (vaultId, vaultName) => { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); - await genWritable.next(vaultListMessage); - }); - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsSecretsList: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request; - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secrets = await vaultOps.listSecrets(vault); - let secretMessage: secretsPB.Secret; - for (const secret of secrets) { - secretMessage = new secretsPB.Secret(); - secretMessage.setSecretName(secret); - await genWritable.next(secretMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsSecretsMkdir: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMkdirMessge = call.request; - const vaultMessage = vaultMkdirMessge.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { - recursive: vaultMkdirMessge.getRecursive(), - }); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsStat: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Stat(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - // Const vaultMessage = call.request; - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // const vault = await vaultManager.openVault(id); - // FIXME, reimplement this. - throw Error('Not Implemented'); - // Const stats = await vaultManager.vaultStats(id); - // response.setStats(JSON.stringify(stats));); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsDelete: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = call.request.getSecretName(); - await vaultOps.deleteSecret(vault, secretName); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsEdit: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const secretMessage = call.request; - if (secretMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const vaultMessage = secretMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = secretMessage.getSecretName(); - const secretContent = Buffer.from(secretMessage.getSecretContent()); - await vaultOps.updateSecret(vault, secretName, secretContent); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsGet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new secretsPB.Secret(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretName = call.request.getSecretName(); - const secretContent = await vaultOps.getSecret(vault, secretName); - - response.setSecretContent(secretContent); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsRename: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const secretMessage = call.request.getOldSecret(); - if (!secretMessage) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const vaultMessage = secretMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const oldSecret = secretMessage.getSecretName(); - const newSecret = call.request.getNewName(); - await vaultOps.renameSecret(vault, oldSecret, newSecret); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsNew: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secret = call.request.getSecretName(); - const content = Buffer.from(call.request.getSecretContent()); - await vaultOps.addSecret(vault, secret, content); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsSecretsNewDir: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new utilsPB.StatusMessage(); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secretsPath = call.request.getSecretDirectory(); - await vaultOps.addSecretDirectory(vault, secretsPath, fs); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissionsSet: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not Implemented'); - // Await vaultManager.setVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissionsUnset: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not implemented'); - // Await vaultManager.unsetVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsPermissions: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const node = nodeMessage.getNodeId(); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // let perms: Record; - throw Error('Not implemented'); - // FIXME - // if (isNodeId(node)) { - // Perms = await vaultManager.getVaultPermissions(id, node); - // } else { - // Perms = await vaultManager.getVaultPermissions(id); - // } - // const permissionMessage = new vaultsPB.Permission(); - // For (const nodeId in perms) { - // permissionMessage.setNodeId(nodeId); - // if (perms[nodeId]['pull'] !== undefined) { - // permissionMessage.setAction('pull'); - // } - // await genWritable.next(permissionMessage); - // } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - vaultsVersion: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.VersionResult(); - try { - // Checking session token - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const vaultsVersionMessage = call.request; - - // Getting vault ID - const vaultMessage = vaultsVersionMessage.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - - // Doing the deed - const vault = await vaultManager.openVault(vaultId); - const latestOid = (await vault.log())[0].oid; - const versionId = vaultsVersionMessage.getVersionId(); - - await vault.version(versionId); - const currentVersionId = (await vault.log(0, versionId))[0]?.oid; - - // Checking if latest version ID. - const isLatestVersion = latestOid === currentVersionId; - - // Creating message - response.setIsLatestVersion(isLatestVersion); - - // Sending message - callback(null, response); - return; - } catch (err) { - callback(grpcUtils.fromError(err), null); - return; - } - }, - vaultsLog: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - // Getting the vault. - const vaultsLogMessage = call.request; - const vaultMessage = vaultsLogMessage.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - const nameOrId = vaultMessage.getNameOrId(); - let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - - // Getting the log - const depth = vaultsLogMessage.getLogDepth(); - let commitId: string | undefined = vaultsLogMessage.getCommitId(); - commitId = commitId ? commitId : undefined; - const log = await vault.log(depth, commitId); - - const vaultsLogEntryMessage = new vaultsPB.LogEntry(); - for (const entry of log) { - vaultsLogEntryMessage.setOid(entry.oid); - vaultsLogEntryMessage.setCommitter(entry.committer); - vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); - vaultsLogEntryMessage.setMessage(entry.message); - await genWritable.next(vaultsLogEntryMessage); - } - await genWritable.next(null); - return; - } catch (err) { - await genWritable.throw(err); - return; - } - }, - }; -}; - -export default createVaultRPC; diff --git a/src/client/service/agentLockAll.ts b/src/client/service/agentLockAll.ts new file mode 100644 index 000000000..3641e1f71 --- /dev/null +++ b/src/client/service/agentLockAll.ts @@ -0,0 +1,32 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { SessionManager } from '../../sessions'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentLockAll({ + sessionManager, + authenticate, +}: { + sessionManager: SessionManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await sessionManager.resetKey(); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentLockAll; diff --git a/src/client/service/agentStatus.ts b/src/client/service/agentStatus.ts new file mode 100644 index 000000000..e71cf5a82 --- /dev/null +++ b/src/client/service/agentStatus.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import process from 'process'; +import * as grpcUtils from '../../grpc/utils'; +import * as agentPB from '../../proto/js/polykey/v1/agent/agent_pb'; + +function agentStatus({ + authenticate, + keyManager, + grpcServerClient, + grpcServerAgent, + fwdProxy, + revProxy, +}: { + authenticate: Authenticate; + keyManager: KeyManager; + grpcServerClient: GRPCServer; + grpcServerAgent: GRPCServer; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new agentPB.InfoMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + response.setPid(process.pid); + response.setNodeId(keyManager.getNodeId()); + response.setClientHost(grpcServerClient.host); + response.setClientPort(grpcServerClient.port); + response.setIngressHost(revProxy.getIngressHost()); + response.setIngressPort(revProxy.getIngressPort()); + response.setEgressHost(fwdProxy.getEgressHost()); + response.setEgressPort(fwdProxy.getEgressPort()); + response.setAgentHost(grpcServerAgent.host); + response.setAgentPort(grpcServerAgent.port); + response.setProxyHost(fwdProxy.getProxyHost()); + response.setProxyPort(fwdProxy.getProxyPort()); + response.setRootPublicKeyPem(keyManager.getRootKeyPairPem().publicKey); + response.setRootCertPem(keyManager.getRootCertPem()); + response.setRootCertChainPem(await keyManager.getRootCertChainPem()); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentStatus; diff --git a/src/client/service/agentStop.ts b/src/client/service/agentStop.ts new file mode 100644 index 000000000..94f3e0ff3 --- /dev/null +++ b/src/client/service/agentStop.ts @@ -0,0 +1,40 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type PolykeyAgent from '../../PolykeyAgent'; +import { status, running } from '@matrixai/async-init'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentStop({ + authenticate, + pkAgent, +}: { + authenticate: Authenticate; + pkAgent: PolykeyAgent; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + // If not running or in stopping status, then respond successfully + if (!pkAgent[running] || pkAgent[status] === 'stopping') { + callback(null, response); + return; + } + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Respond first to close the GRPC connection + callback(null, response); + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + // Stop is called after GRPC resources are cleared + await pkAgent.stop(); + return; + }; +} + +export default agentStop; diff --git a/src/client/service/agentUnlock.ts b/src/client/service/agentUnlock.ts new file mode 100644 index 000000000..1ddef6f11 --- /dev/null +++ b/src/client/service/agentUnlock.ts @@ -0,0 +1,24 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function agentUnlock({ authenticate }: { authenticate: Authenticate }) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default agentUnlock; diff --git a/src/client/service/gestaltsActionsGetByIdentity.ts b/src/client/service/gestaltsActionsGetByIdentity.ts new file mode 100644 index 000000000..6c039cdec --- /dev/null +++ b/src/client/service/gestaltsActionsGetByIdentity.ts @@ -0,0 +1,48 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsGetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new permissionsPB.Actions(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const providerId = info.getProviderId() as ProviderId; + const identityId = info.getIdentityId() as IdentityId; + const result = await gestaltGraph.getGestaltActionsByIdentity( + providerId, + identityId, + ); + if (result == null) { + // Node doesn't exist, so no permissions. might throw error instead TBD. + response.setActionList([]); + } else { + // Contains permission + const actions = Object.keys(result); + response.setActionList(actions); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsGetByIdentity; diff --git a/src/client/service/gestaltsActionsGetByNode.ts b/src/client/service/gestaltsActionsGetByNode.ts new file mode 100644 index 000000000..9bc6d5e0b --- /dev/null +++ b/src/client/service/gestaltsActionsGetByNode.ts @@ -0,0 +1,45 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; + +function gestaltsActionsGetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new permissionsPB.Actions(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const result = await gestaltGraph.getGestaltActionsByNode( + nodesUtils.makeNodeId(info.getNodeId()), + ); + if (result == null) { + // Node doesn't exist, so no permissions. might throw error instead TBD. + response.setActionList([]); + } else { + // Contains permission + const actions = Object.keys(result); + response.setActionList(actions); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsGetByNode; diff --git a/src/client/service/gestaltsActionsSetByIdentity.ts b/src/client/service/gestaltsActionsSetByIdentity.ts new file mode 100644 index 000000000..976e2450f --- /dev/null +++ b/src/client/service/gestaltsActionsSetByIdentity.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsActionsSetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const providerId = info.getIdentity()?.getProviderId() as ProviderId; + const identityId = info.getIdentity()?.getIdentityId() as IdentityId; + await gestaltGraph.setGestaltActionByIdentity( + providerId, + identityId, + action, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsSetByIdentity; diff --git a/src/client/service/gestaltsActionsSetByNode.ts b/src/client/service/gestaltsActionsSetByNode.ts new file mode 100644 index 000000000..5c1303cdf --- /dev/null +++ b/src/client/service/gestaltsActionsSetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsActionsSetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const nodeId = nodesUtils.makeNodeId(info.getNode()?.getNodeId()); + await gestaltGraph.setGestaltActionByNode(nodeId, action); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsSetByNode; diff --git a/src/client/service/gestaltsActionsUnsetByIdentity.ts b/src/client/service/gestaltsActionsUnsetByIdentity.ts new file mode 100644 index 000000000..7d6bedd4e --- /dev/null +++ b/src/client/service/gestaltsActionsUnsetByIdentity.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsActionsUnsetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const providerId = info.getIdentity()?.getProviderId() as ProviderId; + const identityId = info.getIdentity()?.getIdentityId() as IdentityId; + await gestaltGraph.unsetGestaltActionByIdentity( + providerId, + identityId, + action, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsUnsetByIdentity; diff --git a/src/client/service/gestaltsActionsUnsetByNode.ts b/src/client/service/gestaltsActionsUnsetByNode.ts new file mode 100644 index 000000000..1f9e3c297 --- /dev/null +++ b/src/client/service/gestaltsActionsUnsetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type * as permissionsPB from '../../proto/js/polykey/v1/permissions/permissions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as gestaltsUtils } from '../../gestalts'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsActionsUnsetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Setting the action. + const action = gestaltsUtils.makeGestaltAction(info.getAction()); + const nodeId = nodesUtils.makeNodeId(info.getNode()?.getNodeId()); + await gestaltGraph.unsetGestaltActionByNode(nodeId, action); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsActionsUnsetByNode; diff --git a/src/client/service/gestaltsDiscoveryByIdentity.ts b/src/client/service/gestaltsDiscoveryByIdentity.ts new file mode 100644 index 000000000..11c54fb89 --- /dev/null +++ b/src/client/service/gestaltsDiscoveryByIdentity.ts @@ -0,0 +1,42 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { Discovery } from '../../discovery'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsDiscoveryByIdentity({ + authenticate, + discovery, +}: { + authenticate: Authenticate; + discovery: Discovery; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Constructing identity info. + const gen = discovery.discoverGestaltByIdentity( + info.getProviderId() as ProviderId, + info.getIdentityId() as IdentityId, + ); + for await (const _ of gen) { + // Empty + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsDiscoveryByIdentity; diff --git a/src/client/service/gestaltsDiscoveryByNode.ts b/src/client/service/gestaltsDiscoveryByNode.ts new file mode 100644 index 000000000..507215b8c --- /dev/null +++ b/src/client/service/gestaltsDiscoveryByNode.ts @@ -0,0 +1,41 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { Discovery } from '../../discovery'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function gestaltsDiscoveryByNode({ + authenticate, + discovery, +}: { + authenticate: Authenticate; + discovery: Discovery; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const info = call.request; + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Constructing identity info. + const gen = discovery.discoverGestaltByNode( + nodesUtils.makeNodeId(info.getNodeId()), + ); + for await (const _ of gen) { + // Empty + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsDiscoveryByNode; diff --git a/src/client/service/gestaltsGestaltGetByIdentity.ts b/src/client/service/gestaltsGestaltGetByIdentity.ts new file mode 100644 index 000000000..eaf2be7cb --- /dev/null +++ b/src/client/service/gestaltsGestaltGetByIdentity.ts @@ -0,0 +1,40 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; + +function gestaltsGestaltGetByIdentity({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new gestaltsPB.Graph(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const gestalt = await gestaltGraph.getGestaltByIdentity( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + if (gestalt != null) { + response.setGestaltGraph(JSON.stringify(gestalt)); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsGestaltGetByIdentity; diff --git a/src/client/service/gestaltsGestaltGetByNode.ts b/src/client/service/gestaltsGestaltGetByNode.ts new file mode 100644 index 000000000..beeab3df3 --- /dev/null +++ b/src/client/service/gestaltsGestaltGetByNode.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; + +function gestaltsGestaltGetByNode({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new gestaltsPB.Graph(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const gestalt = await gestaltGraph.getGestaltByNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + ); + if (gestalt != null) { + response.setGestaltGraph(JSON.stringify(gestalt)); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default gestaltsGestaltGetByNode; diff --git a/src/client/service/gestaltsGestaltList.ts b/src/client/service/gestaltsGestaltList.ts new file mode 100644 index 000000000..e4daf338f --- /dev/null +++ b/src/client/service/gestaltsGestaltList.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { GestaltGraph } from '../../gestalts'; +import type { Gestalt } from '../../gestalts/types'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as gestaltsPB from '../../proto/js/polykey/v1/gestalts/gestalts_pb'; + +function gestaltsGestaltList({ + authenticate, + gestaltGraph, +}: { + authenticate: Authenticate; + gestaltGraph: GestaltGraph; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + let gestaltMessage: gestaltsPB.Gestalt; + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const certs: Array = await gestaltGraph.getGestalts(); + for (const cert of certs) { + gestaltMessage = new gestaltsPB.Gestalt(); + gestaltMessage.setName(JSON.stringify(cert)); + await genWritable.next(gestaltMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default gestaltsGestaltList; diff --git a/src/client/service/identitiesAuthenticate.ts b/src/client/service/identitiesAuthenticate.ts new file mode 100644 index 000000000..6cb12f941 --- /dev/null +++ b/src/client/service/identitiesAuthenticate.ts @@ -0,0 +1,64 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { ProviderId } from '../../identities/types'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { never } from '../../utils'; + +function identitiesAuthenticate({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream< + identitiesPB.Provider, + identitiesPB.AuthenticationProcess + >, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const provider = identitiesManager.getProvider( + call.request.getProviderId() as ProviderId, + ); + if (provider == null) { + throw new clientErrors.ErrorClientInvalidProvider(); + } + const authFlow = provider.authenticate(); + let authFlowResult = await authFlow.next(); + if (authFlowResult.done) { + never(); + } + const authProcess = new identitiesPB.AuthenticationProcess(); + const authRequest = new identitiesPB.AuthenticationRequest(); + authRequest.setUrl(authFlowResult.value.url); + const map = authRequest.getDataMap(); + for (const [k, v] of Object.entries(authFlowResult.value.data)) { + map.set(k, v); + } + authProcess.setRequest(authRequest); + await genWritable.next(authProcess); + authFlowResult = await authFlow.next(); + if (!authFlowResult.done) { + never(); + } + const authResponse = new identitiesPB.AuthenticationResponse(); + authResponse.setIdentityId(authFlowResult.value); + authProcess.setResponse(authResponse); + await genWritable.next(authProcess); + await genWritable.next(null); + return; + } catch (e) { + await genWritable.throw(e); + return; + } + }; +} + +export default identitiesAuthenticate; diff --git a/src/client/service/identitiesClaim.ts b/src/client/service/identitiesClaim.ts new file mode 100644 index 000000000..ba5d8740d --- /dev/null +++ b/src/client/service/identitiesClaim.ts @@ -0,0 +1,64 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as claimsUtils } from '../../claims'; +import { errors as identitiesErrors } from '../../identities'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Augments the keynode with a new identity. + */ +function identitiesClaim({ + identitiesManager, + sigchain, + nodeManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + sigchain: Sigchain; + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Check provider is authenticated + const providerId = call.request.getProviderId() as ProviderId; + const provider = identitiesManager.getProvider(providerId); + if (provider == null) throw new clientErrors.ErrorClientInvalidProvider(); + const identityId = call.request.getIdentityId() as IdentityId; + const identities = await provider.getAuthIdentityIds(); + if (!identities.includes(identityId)) { + throw new identitiesErrors.ErrorProviderUnauthenticated(); + } + // Create identity claim on our node + const [, claim] = await sigchain.addClaim({ + type: 'identity', + node: nodeManager.getNodeId(), + provider: providerId, + identity: identityId, + }); + // Publish claim on identity + const claimDecoded = claimsUtils.decodeClaim(claim); + await provider.publishClaim(identityId, claimDecoded); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesClaim; diff --git a/src/client/service/identitiesInfoGet.ts b/src/client/service/identitiesInfoGet.ts new file mode 100644 index 000000000..1c16c10c2 --- /dev/null +++ b/src/client/service/identitiesInfoGet.ts @@ -0,0 +1,49 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { Sigchain } from '../../sigchain'; +import type { IdentitiesManager } from '../../identities'; +import type { ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +/** + * Gets the first identityId of the local keynode. + */ +function identitiesInfoGet({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + sigchain: Sigchain; + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Provider(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Get's an identity out of all identities. + const providerId = call.request.getProviderId() as ProviderId; + const provider = identitiesManager.getProvider(providerId); + if (provider !== undefined) { + const identities = await provider.getAuthIdentityIds(); + response.setProviderId(providerId); + if (identities.length !== 0) { + response.setIdentityId(identities[0]); + } + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesInfoGet; diff --git a/src/client/service/identitiesInfoGetConnected.ts b/src/client/service/identitiesInfoGetConnected.ts new file mode 100644 index 000000000..683f0702d --- /dev/null +++ b/src/client/service/identitiesInfoGetConnected.ts @@ -0,0 +1,60 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import * as clientErrors from '../errors'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function identitiesInfoGetConnected({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream< + identitiesPB.ProviderSearch, + identitiesPB.Info + >, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const providerId = call.request + .getProvider() + ?.getProviderId() as ProviderId; + const identityId = call.request + .getProvider() + ?.getIdentityId() as IdentityId; + const provider = identitiesManager.getProvider(providerId); + if (provider == null) throw new clientErrors.ErrorClientInvalidProvider(); + + const identities = provider.getConnectedIdentityDatas( + identityId, + call.request.getSearchTermList(), + ); + + for await (const identity of identities) { + const identityInfoMessage = new identitiesPB.Info(); + const providerMessage = new identitiesPB.Provider(); + providerMessage.setProviderId(identity.providerId); + providerMessage.setIdentityId(identity.identityId); + identityInfoMessage.setProvider(providerMessage); + identityInfoMessage.setName(identity.name ?? ''); + identityInfoMessage.setEmail(identity.email ?? ''); + identityInfoMessage.setUrl(identity.url ?? ''); + await genWritable.next(identityInfoMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default identitiesInfoGetConnected; diff --git a/src/client/service/identitiesProvidersList.ts b/src/client/service/identitiesProvidersList.ts new file mode 100644 index 000000000..62f883d65 --- /dev/null +++ b/src/client/service/identitiesProvidersList.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function identitiesProvidersList({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Provider(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const providers = identitiesManager.getProviders(); + response.setProviderId(JSON.stringify(Object.keys(providers))); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesProvidersList; diff --git a/src/client/service/identitiesTokenDelete.ts b/src/client/service/identitiesTokenDelete.ts new file mode 100644 index 000000000..383f0d51d --- /dev/null +++ b/src/client/service/identitiesTokenDelete.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function identitiesTokenDelete({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await identitiesManager.delToken( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenDelete; diff --git a/src/client/service/identitiesTokenGet.ts b/src/client/service/identitiesTokenGet.ts new file mode 100644 index 000000000..bb1f614d0 --- /dev/null +++ b/src/client/service/identitiesTokenGet.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId } from '../../identities/types'; +import { utils as grpcUtils } from '../../grpc'; +import * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; + +function identitiesTokenGet({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new identitiesPB.Token(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const tokens = await identitiesManager.getToken( + call.request.getProviderId() as ProviderId, + call.request.getIdentityId() as IdentityId, + ); + response.setToken(JSON.stringify(tokens)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenGet; diff --git a/src/client/service/identitiesTokenPut.ts b/src/client/service/identitiesTokenPut.ts new file mode 100644 index 000000000..b447368b5 --- /dev/null +++ b/src/client/service/identitiesTokenPut.ts @@ -0,0 +1,42 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { IdentitiesManager } from '../../identities'; +import type { IdentityId, ProviderId, TokenData } from '../../identities/types'; +import type * as identitiesPB from '../../proto/js/polykey/v1/identities/identities_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function identitiesTokenPut({ + identitiesManager, + authenticate, +}: { + identitiesManager: IdentitiesManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall< + identitiesPB.TokenSpecific, + utilsPB.EmptyMessage + >, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const provider = call.request.getProvider(); + await identitiesManager.putToken( + provider?.getProviderId() as ProviderId, + provider?.getIdentityId() as IdentityId, + { accessToken: call.request.getToken() } as TokenData, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default identitiesTokenPut; diff --git a/src/client/service/index.ts b/src/client/service/index.ts new file mode 100644 index 000000000..88a6ca861 --- /dev/null +++ b/src/client/service/index.ts @@ -0,0 +1,181 @@ +import type PolykeyAgent from '../../PolykeyAgent'; +import type { KeyManager } from '../../keys'; +import type { VaultManager } from '../../vaults'; +import type { NodeManager } from '../../nodes'; +import type { IdentitiesManager } from '../../identities'; +import type { GestaltGraph } from '../../gestalts'; +import type { SessionManager } from '../../sessions'; +import type { NotificationsManager } from '../../notifications'; +import type { Discovery } from '../../discovery'; +import type { Sigchain } from '../../sigchain'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { IClientServiceServer } from '../../proto/js/polykey/v1/client_service_grpc_pb'; +import type { FileSystem } from '../../types'; +import Logger from '@matrixai/logger'; +import agentLockAll from './agentLockAll'; +import agentStatus from './agentStatus'; +import agentStop from './agentStop'; +import agentUnlock from './agentUnlock'; +import gestaltsActionsGetByIdentity from './gestaltsActionsGetByIdentity'; +import gestaltsActionsGetByNode from './gestaltsActionsGetByNode'; +import gestaltsActionsSetByIdentity from './gestaltsActionsSetByIdentity'; +import gestaltsActionsSetByNode from './gestaltsActionsSetByNode'; +import gestaltsActionsUnsetByIdentity from './gestaltsActionsUnsetByIdentity'; +import gestaltsActionsUnsetByNode from './gestaltsActionsUnsetByNode'; +import gestaltsDiscoveryByIdentity from './gestaltsDiscoveryByIdentity'; +import gestaltsDiscoveryByNode from './gestaltsDiscoveryByNode'; +import gestaltsGestaltGetByIdentity from './gestaltsGestaltGetByIdentity'; +import gestaltsGestaltGetByNode from './gestaltsGestaltGetByNode'; +import gestaltsGestaltList from './gestaltsGestaltList'; +import identitiesAuthenticate from './identitiesAuthenticate'; +import identitiesClaim from './identitiesClaim'; +import identitiesInfoGet from './identitiesInfoGet'; +import identitiesInfoGetConnected from './identitiesInfoGetConnected'; +import identitiesProvidersList from './identitiesProvidersList'; +import identitiesTokenDelete from './identitiesTokenDelete'; +import identitiesTokenGet from './identitiesTokenGet'; +import identitiesTokenPut from './identitiesTokenPut'; +import keysCertsChainGet from './keysCertsChainGet'; +import keysCertsGet from './keysCertsGet'; +import keysDecrypt from './keysDecrypt'; +import keysEncrypt from './keysEncrypt'; +import keysKeyPairRenew from './keysKeyPairRenew'; +import keysKeyPairReset from './keysKeyPairReset'; +import keysKeyPairRoot from './keysKeyPairRoot'; +import keysPasswordChange from './keysPasswordChange'; +import keysSign from './keysSign'; +import keysVerify from './keysVerify'; +import nodesAdd from './nodesAdd'; +import nodesClaim from './nodesClaim'; +import nodesFind from './nodesFind'; +import nodesPing from './nodesPing'; +import notificationsClear from './notificationsClear'; +import notificationsRead from './notificationsRead'; +import notificationsSend from './notificationsSend'; +import vaultsClone from './vaultsClone'; +import vaultsCreate from './vaultsCreate'; +import vaultsDelete from './vaultsDelete'; +import vaultsList from './vaultsList'; +import vaultsLog from './vaultsLog'; +import vaultsPermissions from './vaultsPermissions'; +import vaultsPermissionsSet from './vaultsPermissionsSet'; +import vaultsPermissionsUnset from './vaultsPermissionsUnset'; +import vaultsPull from './vaultsPull'; +import vaultsRename from './vaultsRename'; +import vaultsScan from './vaultsScan'; +import vaultsVersion from './vaultsVersion'; +import vaultsSecretsDelete from './vaultsSecretsDelete'; +import vaultsSecretsEdit from './vaultsSecretsEdit'; +import vaultsSecretsGet from './vaultsSecretsGet'; +import vaultsSecretsList from './vaultsSecretsList'; +import vaultsSecretsMkdir from './vaultsSecretsMkdir'; +import vaultsSecretsNew from './vaultsSecretsNew'; +import vaultsSecretsNewDir from './vaultsSecretsNewDir'; +import vaultsSecretsRename from './vaultsSecretsRename'; +import vaultsSecretsStat from './vaultsSecretsStat'; +import * as clientUtils from '../utils'; +import { ClientServiceService } from '../../proto/js/polykey/v1/client_service_grpc_pb'; + +function createService({ + keyManager, + sessionManager, + logger = new Logger(createService.name), + fs = require('fs'), + ...containerRest +}: { + pkAgent: PolykeyAgent; + keyManager: KeyManager; + vaultManager: VaultManager; + nodeManager: NodeManager; + identitiesManager: IdentitiesManager; + gestaltGraph: GestaltGraph; + sessionManager: SessionManager; + notificationsManager: NotificationsManager; + discovery: Discovery; + sigchain: Sigchain; + grpcServerClient: GRPCServer; + grpcServerAgent: GRPCServer; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + logger?: Logger; + fs?: FileSystem; +}) { + const authenticate = clientUtils.authenticator(sessionManager, keyManager); + const container = { + ...containerRest, + keyManager, + sessionManager, + logger, + fs, + authenticate, + }; + const service: IClientServiceServer = { + agentLockAll: agentLockAll(container), + agentStatus: agentStatus(container), + agentStop: agentStop(container), + agentUnlock: agentUnlock(container), + gestaltsActionsGetByIdentity: gestaltsActionsGetByIdentity(container), + gestaltsActionsGetByNode: gestaltsActionsGetByNode(container), + gestaltsActionsSetByIdentity: gestaltsActionsSetByIdentity(container), + gestaltsActionsSetByNode: gestaltsActionsSetByNode(container), + gestaltsActionsUnsetByIdentity: gestaltsActionsUnsetByIdentity(container), + gestaltsActionsUnsetByNode: gestaltsActionsUnsetByNode(container), + gestaltsDiscoveryByIdentity: gestaltsDiscoveryByIdentity(container), + gestaltsDiscoveryByNode: gestaltsDiscoveryByNode(container), + gestaltsGestaltGetByIdentity: gestaltsGestaltGetByIdentity(container), + gestaltsGestaltGetByNode: gestaltsGestaltGetByNode(container), + gestaltsGestaltList: gestaltsGestaltList(container), + identitiesAuthenticate: identitiesAuthenticate(container), + identitiesClaim: identitiesClaim(container), + identitiesInfoGet: identitiesInfoGet(container), + identitiesInfoGetConnected: identitiesInfoGetConnected(container), + identitiesProvidersList: identitiesProvidersList(container), + identitiesTokenDelete: identitiesTokenDelete(container), + identitiesTokenGet: identitiesTokenGet(container), + identitiesTokenPut: identitiesTokenPut(container), + keysCertsChainGet: keysCertsChainGet(container), + keysCertsGet: keysCertsGet(container), + keysDecrypt: keysDecrypt(container), + keysEncrypt: keysEncrypt(container), + keysKeyPairRenew: keysKeyPairRenew(container), + keysKeyPairReset: keysKeyPairReset(container), + keysKeyPairRoot: keysKeyPairRoot(container), + keysPasswordChange: keysPasswordChange(container), + keysSign: keysSign(container), + keysVerify: keysVerify(container), + nodesAdd: nodesAdd(container), + nodesClaim: nodesClaim(container), + nodesFind: nodesFind(container), + nodesPing: nodesPing(container), + notificationsClear: notificationsClear(container), + notificationsRead: notificationsRead(container), + notificationsSend: notificationsSend(container), + vaultsClone: vaultsClone(container), + vaultsCreate: vaultsCreate(container), + vaultsDelete: vaultsDelete(container), + vaultsList: vaultsList(container), + vaultsLog: vaultsLog(container), + vaultsPermissions: vaultsPermissions(container), + vaultsPermissionsSet: vaultsPermissionsSet(container), + vaultsPermissionsUnset: vaultsPermissionsUnset(container), + vaultsPull: vaultsPull(container), + vaultsRename: vaultsRename(container), + vaultsScan: vaultsScan(container), + vaultsVersion: vaultsVersion(container), + vaultsSecretsDelete: vaultsSecretsDelete(container), + vaultsSecretsEdit: vaultsSecretsEdit(container), + vaultsSecretsGet: vaultsSecretsGet(container), + vaultsSecretsList: vaultsSecretsList(container), + vaultsSecretsMkdir: vaultsSecretsMkdir(container), + vaultsSecretsNew: vaultsSecretsNew(container), + vaultsSecretsNewDir: vaultsSecretsNewDir(container), + vaultsSecretsRename: vaultsSecretsRename(container), + vaultsSecretsStat: vaultsSecretsStat(container), + }; + return service; +} + +export default createService; + +export { ClientServiceService }; diff --git a/src/client/service/keysCertsChainGet.ts b/src/client/service/keysCertsChainGet.ts new file mode 100644 index 000000000..830381136 --- /dev/null +++ b/src/client/service/keysCertsChainGet.ts @@ -0,0 +1,39 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysCertsChainGet({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const certs: Array = await keyManager.getRootCertChainPems(); + let certMessage: keysPB.Certificate; + for (const cert of certs) { + certMessage = new keysPB.Certificate(); + certMessage.setCert(cert); + await genWritable.next(certMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default keysCertsChainGet; diff --git a/src/client/service/keysCertsGet.ts b/src/client/service/keysCertsGet.ts new file mode 100644 index 000000000..d70bff9b4 --- /dev/null +++ b/src/client/service/keysCertsGet.ts @@ -0,0 +1,34 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysCertsGet({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Certificate(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const cert = keyManager.getRootCertPem(); + response.setCert(cert); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysCertsGet; diff --git a/src/client/service/keysDecrypt.ts b/src/client/service/keysDecrypt.ts new file mode 100644 index 000000000..2e5e601ee --- /dev/null +++ b/src/client/service/keysDecrypt.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysDecrypt({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const data = await keyManager.decryptWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setData(data.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysDecrypt; diff --git a/src/client/service/keysEncrypt.ts b/src/client/service/keysEncrypt.ts new file mode 100644 index 000000000..c092458b5 --- /dev/null +++ b/src/client/service/keysEncrypt.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysEncrypt({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const data = await keyManager.encryptWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setData(data.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysEncrypt; diff --git a/src/client/service/keysKeyPairRenew.ts b/src/client/service/keysKeyPairRenew.ts new file mode 100644 index 000000000..ba32b7b2b --- /dev/null +++ b/src/client/service/keysKeyPairRenew.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { NodeManager } from '../../nodes'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { TLSConfig } from '../../network/types'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function keysKeyPairRenew({ + keyManager, + nodeManager, + fwdProxy, + revProxy, + grpcServerClient, + authenticate, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + grpcServerClient: GRPCServer; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Lock the nodeManager - because we need to do a database refresh too + await nodeManager.transaction(async (nodeManager) => { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await keyManager.renewRootKeyPair(call.request.getName()); + // Reset the TLS config with new keypair + certificate + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + fwdProxy.setTLSConfig(tlsConfig); + revProxy.setTLSConfig(tlsConfig); + grpcServerClient.setTLSConfig(tlsConfig); + // Finally, refresh the node buckets + await nodeManager.refreshBuckets(); + }); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairRenew; diff --git a/src/client/service/keysKeyPairReset.ts b/src/client/service/keysKeyPairReset.ts new file mode 100644 index 000000000..fef7a2b7a --- /dev/null +++ b/src/client/service/keysKeyPairReset.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type { NodeManager } from '../../nodes'; +import type { GRPCServer } from '../../grpc'; +import type { ForwardProxy, ReverseProxy } from '../../network'; +import type { TLSConfig } from '../../network/types'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function keysKeyPairReset({ + keyManager, + nodeManager, + fwdProxy, + revProxy, + grpcServerClient, + authenticate, +}: { + keyManager: KeyManager; + nodeManager: NodeManager; + fwdProxy: ForwardProxy; + revProxy: ReverseProxy; + grpcServerClient: GRPCServer; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + // Lock the nodeManager - because we need to do a database refresh too + await nodeManager.transaction(async (nodeManager) => { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + await keyManager.resetRootKeyPair(call.request.getName()); + // Reset the TLS config with new keypair + certificate + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + fwdProxy.setTLSConfig(tlsConfig); + revProxy.setTLSConfig(tlsConfig); + grpcServerClient.setTLSConfig(tlsConfig); + // Finally, refresh the node buckets + await nodeManager.refreshBuckets(); + }); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairReset; diff --git a/src/client/service/keysKeyPairRoot.ts b/src/client/service/keysKeyPairRoot.ts new file mode 100644 index 000000000..793c08e3c --- /dev/null +++ b/src/client/service/keysKeyPairRoot.ts @@ -0,0 +1,35 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysKeyPairRoot({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.KeyPair(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const keyPair = keyManager.getRootKeyPairPem(); + response.setPublic(keyPair.publicKey); + response.setPrivate(keyPair.privateKey); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysKeyPairRoot; diff --git a/src/client/service/keysPasswordChange.ts b/src/client/service/keysPasswordChange.ts new file mode 100644 index 000000000..80b128d3e --- /dev/null +++ b/src/client/service/keysPasswordChange.ts @@ -0,0 +1,34 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as sessionsPB from '../../proto/js/polykey/v1/sessions/sessions_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function keysPasswordChange({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + await keyManager.changePassword(call.request.getPassword()); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysPasswordChange; diff --git a/src/client/service/keysSign.ts b/src/client/service/keysSign.ts new file mode 100644 index 000000000..b48702e5a --- /dev/null +++ b/src/client/service/keysSign.ts @@ -0,0 +1,36 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import { utils as grpcUtils } from '../../grpc'; +import * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; + +function keysSign({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new keysPB.Crypto(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const signature = await keyManager.signWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + ); + response.setSignature(signature.toString('binary')); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysSign; diff --git a/src/client/service/keysVerify.ts b/src/client/service/keysVerify.ts new file mode 100644 index 000000000..007ab1681 --- /dev/null +++ b/src/client/service/keysVerify.ts @@ -0,0 +1,37 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { KeyManager } from '../../keys'; +import type * as keysPB from '../../proto/js/polykey/v1/keys/keys_pb'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function keysVerify({ + keyManager, + authenticate, +}: { + keyManager: KeyManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const status = await keyManager.verifyWithRootKeyPair( + Buffer.from(call.request.getData(), 'binary'), + Buffer.from(call.request.getSignature(), 'binary'), + ); + response.setSuccess(status); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default keysVerify; diff --git a/src/client/service/nodesAdd.ts b/src/client/service/nodesAdd.ts new file mode 100644 index 000000000..4560c0d9a --- /dev/null +++ b/src/client/service/nodesAdd.ts @@ -0,0 +1,58 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { NodeAddress } from '../../nodes/types'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as nodesUtils, errors as nodesErrors } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as networkUtils } from '../../network'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Adds a node ID -> node address mapping into the buckets database. + * This is an unrestricted add: no validity checks are made for the correctness + * of the passed ID or host/port. + */ +function nodesAdd({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Validate the passed node ID and host + const validNodeId = nodesUtils.isNodeId(call.request.getNodeId()); + if (!validNodeId) { + throw new nodesErrors.ErrorInvalidNodeId(); + } + const validHost = networkUtils.isValidHost( + call.request.getAddress()!.getHost(), + ); + if (!validHost) { + throw new nodesErrors.ErrorInvalidHost(); + } + await nodeManager.setNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + { + host: call.request.getAddress()!.getHost(), + port: call.request.getAddress()!.getPort(), + } as NodeAddress, + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesAdd; diff --git a/src/client/service/nodesClaim.ts b/src/client/service/nodesClaim.ts new file mode 100644 index 000000000..7c13ad584 --- /dev/null +++ b/src/client/service/nodesClaim.ts @@ -0,0 +1,59 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type { NotificationData } from '../../notifications/types'; +import type { NotificationsManager } from '../../notifications'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Checks whether there is an existing Gestalt Invitation from the other node. + * If not, send an invitation, if so, create a cryptolink claim between the + * other node and host node. + */ +function nodesClaim({ + nodeManager, + notificationsManager, + authenticate, +}: { + nodeManager: NodeManager; + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const remoteNodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + const gestaltInvite = await notificationsManager.findGestaltInvite( + remoteNodeId, + ); + // Check first whether there is an existing gestalt invite from the remote node + // or if we want to force an invitation rather than a claim + if (gestaltInvite === undefined || call.request.getForceInvite()) { + const data = { + type: 'GestaltInvite', + } as NotificationData; + await notificationsManager.sendNotification(remoteNodeId, data); + response.setSuccess(false); + } else { + // There is an existing invitation, and we want to claim the node + await nodeManager.claimNode(remoteNodeId); + response.setSuccess(true); + } + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesClaim; diff --git a/src/client/service/nodesFind.ts b/src/client/service/nodesFind.ts new file mode 100644 index 000000000..070a904f8 --- /dev/null +++ b/src/client/service/nodesFind.ts @@ -0,0 +1,44 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; + +/** + * Attempts to get the node address of a provided node ID (by contacting + * keynodes in the wider Polykey network). + * @throws ErrorNodeGraphNodeNotFound if node address cannot be found + */ +function nodesFind({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new nodesPB.NodeAddress(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const nodeId = nodesUtils.makeNodeId(call.request.getNodeId()); + const address = await nodeManager.findNode(nodeId); + response + .setNodeId(nodeId) + .setAddress( + new nodesPB.Address().setHost(address.host).setPort(address.port), + ); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesFind; diff --git a/src/client/service/nodesPing.ts b/src/client/service/nodesPing.ts new file mode 100644 index 000000000..e4da23b73 --- /dev/null +++ b/src/client/service/nodesPing.ts @@ -0,0 +1,40 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NodeManager } from '../../nodes'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Checks if a remote node is online. + */ +function nodesPing({ + nodeManager, + authenticate, +}: { + nodeManager: NodeManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const status = await nodeManager.pingNode( + nodesUtils.makeNodeId(call.request.getNodeId()), + ); + response.setSuccess(status); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default nodesPing; diff --git a/src/client/service/notificationsClear.ts b/src/client/service/notificationsClear.ts new file mode 100644 index 000000000..adb139294 --- /dev/null +++ b/src/client/service/notificationsClear.ts @@ -0,0 +1,33 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function notificationsClear({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + await notificationsManager.clearNotifications(); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsClear; diff --git a/src/client/service/notificationsRead.ts b/src/client/service/notificationsRead.ts new file mode 100644 index 000000000..953e1e9a1 --- /dev/null +++ b/src/client/service/notificationsRead.ts @@ -0,0 +1,73 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import { utils as grpcUtils } from '../../grpc'; +import * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; + +function notificationsRead({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new notificationsPB.List(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const unread = call.request.getUnread(); + const order = call.request.getOrder() as 'newest' | 'oldest'; + const numberField = call.request.getNumber(); + let number: number | 'all'; + if (numberField === 'all') { + number = numberField; + } else { + number = parseInt(numberField); + } + const notifications = await notificationsManager.readNotifications({ + unread, + number, + order, + }); + const notifMessages: Array = []; + for (const notif of notifications) { + const notificationsMessage = new notificationsPB.Notification(); + switch (notif.data.type) { + case 'General': { + const generalMessage = new notificationsPB.General(); + generalMessage.setMessage(notif.data.message); + notificationsMessage.setGeneral(generalMessage); + break; + } + case 'GestaltInvite': { + notificationsMessage.setGestaltInvite('GestaltInvite'); + break; + } + case 'VaultShare': { + const vaultShareMessage = new notificationsPB.Share(); + vaultShareMessage.setVaultId(notif.data.vaultId); + vaultShareMessage.setVaultName(notif.data.vaultName); + vaultShareMessage.setActionsList(Object.keys(notif.data.actions)); + notificationsMessage.setVaultShare(vaultShareMessage); + break; + } + } + notificationsMessage.setSenderId(notif.senderId); + notificationsMessage.setIsRead(notif.isRead); + notifMessages.push(notificationsMessage); + } + response.setNotificationList(notifMessages); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsRead; diff --git a/src/client/service/notificationsSend.ts b/src/client/service/notificationsSend.ts new file mode 100644 index 000000000..9992f7e73 --- /dev/null +++ b/src/client/service/notificationsSend.ts @@ -0,0 +1,42 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import type { NotificationsManager } from '../../notifications'; +import type * as notificationsPB from '../../proto/js/polykey/v1/notifications/notifications_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as nodesUtils } from '../../nodes'; +import { utils as notificationsUtils } from '../../notifications'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function notificationsSend({ + notificationsManager, + authenticate, +}: { + notificationsManager: NotificationsManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.EmptyMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const receivingId = nodesUtils.makeNodeId(call.request.getReceiverId()); + const data = { + type: 'General', + message: call.request.getData()?.getMessage(), + }; + const validatedData = + notificationsUtils.validateGeneralNotification(data); + await notificationsManager.sendNotification(receivingId, validatedData); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default notificationsSend; diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts new file mode 100644 index 000000000..2a5e579d7 --- /dev/null +++ b/src/client/service/vaultsClone.ts @@ -0,0 +1,45 @@ +import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsClone({ authenticate }: { authenticate: Authenticate }) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Vault id + // const vaultId = parseVaultInput(vaultMessage, vaultManager); + // Node id + // const id = makeNodeId(nodeMessage.getNodeId()); + + throw Error('Not implemented'); + // FIXME, not fully implemented + // await vaultManager.cloneVault(vaultId, id); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsClone; diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts new file mode 100644 index 000000000..6539c7083 --- /dev/null +++ b/src/client/service/vaultsCreate.ts @@ -0,0 +1,39 @@ +import type { Authenticate } from '../types'; +import type { Vault, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as grpc from '@grpc/grpc-js'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsCreate({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Vault(); + let vault: Vault; + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + vault = await vaultManager.createVault( + call.request.getNameOrId() as VaultName, + ); + response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsCreate; diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts new file mode 100644 index 000000000..34a6c692f --- /dev/null +++ b/src/client/service/vaultsDelete.ts @@ -0,0 +1,48 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsDelete({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const vaultMessage = call.request; + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + await vaultManager.destroyVault(vaultId); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsDelete; diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts new file mode 100644 index 000000000..237aaedf1 --- /dev/null +++ b/src/client/service/vaultsList.ts @@ -0,0 +1,43 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type * as grpc from '@grpc/grpc-js'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsList({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + // Call.on('error', (e) => console.error(e)); + // call.on('close', () => console.log('Got close')); + // call.on('finish', () => console.log('Got finish')); + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaults = await vaultManager.listVaults(); + for await (const [vaultName, vaultId] of vaults) { + const vaultListMessage = new vaultsPB.List(); + vaultListMessage.setVaultName(vaultName); + vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + await genWritable.next(((_) => vaultListMessage)()); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsList; diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts new file mode 100644 index 000000000..6678cd52b --- /dev/null +++ b/src/client/service/vaultsLog.ts @@ -0,0 +1,66 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsLog({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Getting the vault. + const vaultsLogMessage = call.request; + const vaultMessage = vaultsLogMessage.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + + // Getting the log + const depth = vaultsLogMessage.getLogDepth(); + let commitId: string | undefined = vaultsLogMessage.getCommitId(); + commitId = commitId ? commitId : undefined; + const log = await vault.log(depth, commitId); + + const vaultsLogEntryMessage = new vaultsPB.LogEntry(); + for (const entry of log) { + vaultsLogEntryMessage.setOid(entry.oid); + vaultsLogEntryMessage.setCommitter(entry.committer); + vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); + vaultsLogEntryMessage.setMessage(entry.message); + await genWritable.next(vaultsLogEntryMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsLog; diff --git a/src/client/service/vaultsPermissions.ts b/src/client/service/vaultsPermissions.ts new file mode 100644 index 000000000..2a43ade68 --- /dev/null +++ b/src/client/service/vaultsPermissions.ts @@ -0,0 +1,53 @@ +import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; + +function vaultsPermissions({ authenticate }: { authenticate: Authenticate }) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + // Const node = nodeMessage.getNodeId(); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + // let perms: Record; + throw Error('Not implemented'); + // FIXME + // if (isNodeId(node)) { + // Perms = await vaultManager.getVaultPermissions(id, node); + // } else { + // Perms = await vaultManager.getVaultPermissions(id); + // } + // const permissionMessage = new vaultsPB.Permission(); + // For (const nodeId in perms) { + // permissionMessage.setNodeId(nodeId); + // if (perms[nodeId]['pull'] !== undefined) { + // permissionMessage.setAction('pull'); + // } + // await genWritable.next(permissionMessage); + // } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsPermissions; diff --git a/src/client/service/vaultsPermissionsSet.ts b/src/client/service/vaultsPermissionsSet.ts new file mode 100644 index 000000000..adb31381d --- /dev/null +++ b/src/client/service/vaultsPermissionsSet.ts @@ -0,0 +1,45 @@ +import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionsSet({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const node = makeNodeId(nodeMessage.getNodeId()); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + throw Error('Not Implemented'); + // Await vaultManager.setVaultPermissions(node, id); // FIXME + const response = new utilsPB.StatusMessage(); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPermissionsSet; diff --git a/src/client/service/vaultsPermissionsUnset.ts b/src/client/service/vaultsPermissionsUnset.ts new file mode 100644 index 000000000..4840176d3 --- /dev/null +++ b/src/client/service/vaultsPermissionsUnset.ts @@ -0,0 +1,45 @@ +import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionsUnset({ + authenticate, +}: { + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const node = makeNodeId(nodeMessage.getNodeId()); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Const id = await parseVaultInput(vaultMessage, vaultManager); + throw Error('Not implemented'); + // Await vaultManager.unsetVaultPermissions(node, id); // FIXME + const response = new utilsPB.StatusMessage(); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPermissionsUnset; diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts new file mode 100644 index 000000000..1e61964f9 --- /dev/null +++ b/src/client/service/vaultsPull.ts @@ -0,0 +1,43 @@ +import type { Authenticate } from '../types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPull({ authenticate }: { authenticate: Authenticate }) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nodeMessage = call.request.getNode(); + if (nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Vault name + // const vaultId = await parseVaultInput(vaultMessage, vaultManager); + // Node id + // const id = makeNodeId(nodeMessage.getNodeId()); + + // Await vaultManager.pullVault(vaultId, id); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsPull; diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts new file mode 100644 index 000000000..3c2bfd078 --- /dev/null +++ b/src/client/service/vaultsRename.ts @@ -0,0 +1,52 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsRename({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Vault(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const newName = call.request.getNewName() as VaultName; + await vaultManager.renameVault(vaultId, newName); + response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsRename; diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts new file mode 100644 index 000000000..98fba456e --- /dev/null +++ b/src/client/service/vaultsScan.ts @@ -0,0 +1,42 @@ +import type { Authenticate } from '../types'; +import type { VaultManager } from '../../vaults'; +import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import type * as grpc from '@grpc/grpc-js'; +import { utils as grpcUtils } from '../../grpc'; +import { utils as vaultsUtils } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsScan({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + // Const nodeId = makeNodeId(call.request.getNodeId()); + + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaults = await vaultManager.listVaults(); + vaults.forEach(async (vaultId, vaultName) => { + const vaultListMessage = new vaultsPB.List(); + vaultListMessage.setVaultName(vaultName); + vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + await genWritable.next(vaultListMessage); + }); + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsScan; diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts new file mode 100644 index 000000000..4a3a58fd1 --- /dev/null +++ b/src/client/service/vaultsSecretsDelete.ts @@ -0,0 +1,55 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsDelete({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = call.request.getSecretName(); + await vaultOps.deleteSecret(vault, secretName); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsDelete; diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts new file mode 100644 index 000000000..830232677 --- /dev/null +++ b/src/client/service/vaultsSecretsEdit.ts @@ -0,0 +1,61 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsEdit({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const secretMessage = call.request; + if (secretMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const vaultMessage = secretMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = secretMessage.getSecretName(); + const secretContent = Buffer.from(secretMessage.getSecretContent()); + await vaultOps.updateSecret(vault, secretName, secretContent); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsEdit; diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts new file mode 100644 index 000000000..f3c6b2094 --- /dev/null +++ b/src/client/service/vaultsSecretsGet.ts @@ -0,0 +1,55 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsGet({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new secretsPB.Secret(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretName = call.request.getSecretName(); + const secretContent = await vaultOps.getSecret(vault, secretName); + response.setSecretContent(secretContent); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsGet; diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts new file mode 100644 index 000000000..778947b8d --- /dev/null +++ b/src/client/service/vaultsSecretsList.ts @@ -0,0 +1,53 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as grpc from '@grpc/grpc-js'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsList({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultMessage = call.request; + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secrets = await vaultOps.listSecrets(vault); + let secretMessage: secretsPB.Secret; + for (const secret of secrets) { + secretMessage = new secretsPB.Secret(); + secretMessage.setSecretName(secret); + await genWritable.next(secretMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsSecretsList; diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts new file mode 100644 index 000000000..594640a66 --- /dev/null +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -0,0 +1,57 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsMkdir({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMkdirMessge = call.request; + const vaultMessage = vaultMkdirMessge.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { + recursive: vaultMkdirMessge.getRecursive(), + }); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsMkdir; diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts new file mode 100644 index 000000000..0352e0241 --- /dev/null +++ b/src/client/service/vaultsSecretsNew.ts @@ -0,0 +1,56 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsNew({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secret = call.request.getSecretName(); + const content = Buffer.from(call.request.getSecretContent()); + await vaultOps.addSecret(vault, secret, content); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsNew; diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts new file mode 100644 index 000000000..9b3804ddf --- /dev/null +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -0,0 +1,58 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type { FileSystem } from '../../types'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsNewDir({ + vaultManager, + authenticate, + fs, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + fs: FileSystem; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const secretsPath = call.request.getSecretDirectory(); + await vaultOps.addSecretDirectory(vault, secretsPath, fs); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsNewDir; diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts new file mode 100644 index 000000000..dc3f98f2b --- /dev/null +++ b/src/client/service/vaultsSecretsRename.ts @@ -0,0 +1,60 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsSecretsRename({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new utilsPB.StatusMessage(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const secretMessage = call.request.getOldSecret(); + if (!secretMessage) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const vaultMessage = secretMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + const vault = await vaultManager.openVault(vaultId); + const oldSecret = secretMessage.getSecretName(); + const newSecret = call.request.getNewName(); + await vaultOps.renameSecret(vault, oldSecret, newSecret); + response.setSuccess(true); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsRename; diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts new file mode 100644 index 000000000..20b7308f3 --- /dev/null +++ b/src/client/service/vaultsSecretsStat.ts @@ -0,0 +1,32 @@ +import type * as grpc from '@grpc/grpc-js'; +import type { Authenticate } from '../types'; +import { utils as grpcUtils } from '../../grpc'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function vaultsSecretsStat({ authenticate }: { authenticate: Authenticate }) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.Stat(); + try { + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + // Const vaultMessage = call.request; + // Const id = await parseVaultInput(vaultMessage, vaultManager); + // const vault = await vaultManager.openVault(id); + // FIXME, reimplement this. + throw Error('Not Implemented'); + // Const stats = await vaultManager.vaultStats(id); + // response.setStats(JSON.stringify(stats));); + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsSecretsStat; diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts new file mode 100644 index 000000000..c05f0ac1f --- /dev/null +++ b/src/client/service/vaultsVersion.ts @@ -0,0 +1,70 @@ +import type { Authenticate } from '../types'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type { VaultManager } from '../../vaults'; +import * as grpc from '@grpc/grpc-js'; +import { utils as idUtils } from '@matrixai/id'; +import { utils as grpcUtils } from '../../grpc'; +import { errors as vaultsErrors } from '../../vaults'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; + +function decodeVaultId(input: string): VaultId | undefined { + return idUtils.fromMultibase(input) + ? (idUtils.fromMultibase(input) as VaultId) + : undefined; +} + +function vaultsVersion({ + vaultManager, + authenticate, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + const response = new vaultsPB.VersionResult(); + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + + const vaultsVersionMessage = call.request; + + // Getting vault ID + const vaultMessage = vaultsVersionMessage.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + if (!vaultId) vaultId = decodeVaultId(nameOrId); + if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + + // Doing the deed + const vault = await vaultManager.openVault(vaultId); + const latestOid = (await vault.log())[0].oid; + const versionId = vaultsVersionMessage.getVersionId(); + + await vault.version(versionId); + const currentVersionId = (await vault.log(0, versionId))[0]?.oid; + + // Checking if latest version ID. + const isLatestVersion = latestOid === currentVersionId; + + // Creating message + response.setIsLatestVersion(isLatestVersion); + + // Sending message + callback(null, response); + return; + } catch (err) { + callback(grpcUtils.fromError(err), null); + return; + } + }; +} + +export default vaultsVersion; diff --git a/src/client/types.ts b/src/client/types.ts new file mode 100644 index 000000000..dc642800f --- /dev/null +++ b/src/client/types.ts @@ -0,0 +1,8 @@ +import type * as grpc from '@grpc/grpc-js'; + +type Authenticate = ( + metadataClient: grpc.Metadata, + metadataServer?: grpc.Metadata, +) => Promise; + +export type { Authenticate }; diff --git a/src/client/utils/utils.ts b/src/client/utils/utils.ts index 581d511e0..3e2021cf9 100644 --- a/src/client/utils/utils.ts +++ b/src/client/utils/utils.ts @@ -6,6 +6,7 @@ import type { import type { KeyManager } from '../../keys'; import type { Session, SessionManager } from '../../sessions'; import type { SessionToken } from '../../sessions/types'; +import type { Authenticate } from '../types'; import * as grpc from '@grpc/grpc-js'; import * as base64 from 'multiformats/bases/base64'; import * as clientErrors from '../errors'; @@ -49,11 +50,6 @@ function sessionInterceptor(session: Session): Interceptor { return interceptor; } -type Authenticate = ( - metadataClient: grpc.Metadata, - metadataServer?: grpc.Metadata, -) => Promise; - function authenticator( sessionManager: SessionManager, keyManager: KeyManager, @@ -143,5 +139,3 @@ export { encodeAuthFromSession, decodeAuthToSession, }; - -export type { Authenticate }; diff --git a/src/config.ts b/src/config.ts index 3f07c0ab7..ffcdf5418 100644 --- a/src/config.ts +++ b/src/config.ts @@ -56,7 +56,9 @@ const config = { defaults: { nodePath: getDefaultNodePath(), statusBase: 'status.json', + statusLockBase: 'status.lock', stateBase: 'state', + stateVersionBase: 'version', dbBase: 'db', keysBase: 'keys', vaultsBase: 'vaults', @@ -91,20 +93,10 @@ const config = { connConnectTime: 20000, connTimeoutTime: 20000, }, - // Note: this is not used by the `PolykeyAgent`, that is defaulting to `{}`. + // This is not used by the `PolykeyAgent` with defaults to `{}` network: { - mainnet: { - v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug: { - host: 'testnet.polykey.io', - port: 1314, - }, - }, - testnet: { - v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug: { - host: '127.0.0.3', - port: 1314, - }, - }, + mainnet: {}, + testnet: {}, }, }, }; diff --git a/src/errors.ts b/src/errors.ts index d29fe973a..dfa44bb1a 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -58,5 +58,6 @@ export * from './claims/errors'; export * from './sigchain/errors'; export * from './bootstrap/errors'; export * from './notifications/errors'; +export * from './schema/errors'; export * from './status/errors'; export * from './utils/errors'; diff --git a/src/grpc/GRPCServer.ts b/src/grpc/GRPCServer.ts index cb7b59b13..843cdab84 100644 --- a/src/grpc/GRPCServer.ts +++ b/src/grpc/GRPCServer.ts @@ -27,7 +27,7 @@ class GRPCServer { protected tlsConfig?: TLSConfig; protected _secured: boolean = false; - constructor({ logger }: { logger?: Logger }) { + constructor({ logger }: { logger?: Logger } = {}) { this.logger = logger ?? new Logger(this.constructor.name); } diff --git a/src/grpc/index.ts b/src/grpc/index.ts index 26d7451d0..16380a821 100644 --- a/src/grpc/index.ts +++ b/src/grpc/index.ts @@ -1,3 +1,9 @@ +/** + * Use this module when contacting Polykey + * If you use the upstream `@grpc/grpc-js`, it may give you mismatched dependencies + * For example the `Metadata` object has to be used when calling `PolykeyClient` + */ +export * as grpc from '@grpc/grpc-js'; export { default as GRPCServer } from './GRPCServer'; export { default as GRPCClient } from './GRPCClient'; export * as utils from './utils'; diff --git a/src/http/utils.ts b/src/http/utils.ts index 5308a92b4..a83e5dc21 100644 --- a/src/http/utils.ts +++ b/src/http/utils.ts @@ -58,8 +58,6 @@ function terminatingHttpServer( return terminating; } - // Why the fuck do we take this out of promise... - // Even tho it works... let resolveTerminating; let rejectTerminating; terminating = new Promise((resolve, reject) => { @@ -69,7 +67,7 @@ function terminatingHttpServer( // On new request. server.on('request', (incomingMessage, outgoingMessage) => { - // If this new request have not been responded. Close Connection. + // If this new request have not been responded. Close Connection. if (!outgoingMessage.headersSent) { outgoingMessage.setHeader('connection', 'close'); } diff --git a/src/network/Connection.ts b/src/network/Connection.ts index e901f7980..97aa44091 100644 --- a/src/network/Connection.ts +++ b/src/network/Connection.ts @@ -1,6 +1,5 @@ import type UTP from 'utp-native'; import type { Host, Port, Address, TLSConfig } from './types'; - import Logger from '@matrixai/logger'; import * as networkUtils from './utils'; import { promisify } from '../utils'; @@ -11,11 +10,27 @@ abstract class Connection { public readonly port: Port; public readonly address: Address; public readonly tlsConfig: Readonly; - public readonly timeoutTime: number; + /** + * Time used for keep-alive timeout + */ + public readonly keepAliveTimeoutTime: number; + /** + * Time used to gracefully wait for teardown + * Used for both UTP and client sockets in forward + * Used for both UTP and server sockets in reverse + */ + public readonly endTime: number; + /** + * Time used between each ping or pong message for hole-punching + */ + public readonly punchIntervalTime: number; + /** + * Time used between each ping or pong message for keep-alive + */ + public readonly keepAliveIntervalTime: number; protected logger: Logger; protected timeout: ReturnType; - protected _started: boolean = false; protected _composed: boolean = false; constructor({ @@ -23,14 +38,20 @@ abstract class Connection { host, port, tlsConfig, - timeoutTime = 20000, + keepAliveTimeoutTime = 20000, + endTime = 1000, + punchIntervalTime = 1000, + keepAliveIntervalTime = 1000, logger, }: { utpSocket: UTP; host: Host; port: Port; tlsConfig: TLSConfig; - timeoutTime?: number; + keepAliveTimeoutTime?: number; + endTime?: number; + punchIntervalTime?: number; + keepAliveIntervalTime?: number; logger?: Logger; }) { const address = networkUtils.buildAddress(host, port); @@ -43,11 +64,10 @@ abstract class Connection { this.port = port; this.tlsConfig = tlsConfig; this.address = address; - this.timeoutTime = timeoutTime; - } - - get started(): boolean { - return this._started; + this.keepAliveTimeoutTime = keepAliveTimeoutTime; + this.endTime = endTime; + this.punchIntervalTime = punchIntervalTime; + this.keepAliveIntervalTime = keepAliveIntervalTime; } get composed(): boolean { diff --git a/src/network/ConnectionForward.ts b/src/network/ConnectionForward.ts index 2ea6f8f62..66d2a6969 100644 --- a/src/network/ConnectionForward.ts +++ b/src/network/ConnectionForward.ts @@ -7,45 +7,105 @@ import type { NodeId } from '../nodes/types'; import type { AbstractConstructorParameters, Timer } from '../types'; import tls from 'tls'; +import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; import { utils as keysUtils } from '../keys'; -import { promise } from '../utils'; +import { promise, timerStart, timerStop } from '../utils'; type ConnectionsForward = { ingress: Map; client: Map; }; +interface ConnectionForward extends StartStop {} +@StartStop() class ConnectionForward extends Connection { public readonly nodeId: NodeId; - public readonly pingIntervalTime: number; + public readonly endTime: number; protected connections: ConnectionsForward; protected pingInterval: ReturnType; protected utpConn: UTPConnection; protected tlsSocket: TLSSocket; + protected clientSocket?: Socket; protected clientHost: Host; protected clientPort: Port; protected clientAddress: Address; protected serverCertChain: Array; protected resolveReadyP: (value: void) => void; + protected handleMessage = async ( + data: Buffer, + remoteInfo: { address: string; port: number }, + ) => { + // Ignore messages not intended for this target + if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { + return; + } + let msg: NetworkMessage; + try { + msg = networkUtils.unserializeNetworkMessage(data); + } catch (e) { + return; + } + // Don't reset timeout until timeout is initialised + if (this.timeout != null) { + // Any message should reset the timeout + this.stopKeepAliveTimeout(); + this.startKeepAliveTimeout(); + } + if (msg.type === 'ping') { + this.resolveReadyP(); + // Respond with ready message + await this.send(networkUtils.pongBuffer); + } + }; + + protected handleError = async (e: Error) => { + this.logger.warn(`Forward Error: ${e.toString()}`); + await this.stop(); + }; + + /** + * Handles receiving `end` event for `this.tlsSocket` from reverse + * Handler is removed and not executed when `end` is initiated here + */ + protected handleEnd = async () => { + this.logger.debug('Receives tlsSocket ending'); + if (this.utpConn.destroyed) { + this.tlsSocket.destroy(); + this.logger.debug('Destroyed tlsSocket'); + } else { + this.logger.debug('Responds tlsSocket ending'); + this.tlsSocket.end(); + this.tlsSocket.destroy(); + this.logger.debug('Responded tlsSocket ending'); + } + await this.stop(); + }; + + /** + * Handles `close` event for `this.tlsSocket` + * Destroying `this.tlsSocket` triggers the close event + * If already stopped, then this does nothing + */ + protected handleClose = async () => { + await this.stop(); + }; + public constructor({ nodeId, connections, - pingIntervalTime = 1000, ...rest }: { nodeId: NodeId; connections: ConnectionsForward; - pingIntervalTime?: number; } & AbstractConstructorParameters[0]) { super(rest); this.nodeId = nodeId; this.connections = connections; - this.pingIntervalTime = pingIntervalTime; } public async start({ @@ -53,135 +113,152 @@ class ConnectionForward extends Connection { }: { timer?: Timer; } = {}): Promise { + this.logger.info('Starting Connection Forward'); + // Promise for ready + const { p: readyP, resolveP: resolveReadyP } = promise(); + // Promise for start errors + const { p: errorP, rejectP: rejectErrorP } = promise(); + // Promise for secure connection + const { p: secureConnectP, resolveP: resolveSecureConnectP } = + promise(); + this.resolveReadyP = resolveReadyP; + this.utpSocket.on('message', this.handleMessage); + const handleStartError = (e) => { + rejectErrorP(e); + }; + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used + this.utpConn = this.utpSocket.connect(this.port, this.host, { + allowHalfOpen: true, + }); + this.tlsSocket = tls.connect( + { + key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), + cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), + socket: this.utpConn, + rejectUnauthorized: false, + }, + () => { + resolveSecureConnectP(); + }, + ); + this.tlsSocket.once('error', handleStartError); + this.tlsSocket.on('end', this.handleEnd); + this.tlsSocket.on('close', this.handleClose); + let punchInterval; try { - if (this._started) { - return; - } - this.logger.info('Starting Connection Forward'); - this._started = true; - // Promise for ready - const { p: readyP, resolveP: resolveReadyP } = promise(); - // Promise for start errors - const { p: errorP, rejectP: rejectErrorP } = promise(); - // Promise for secure connection - const { p: secureConnectP, resolveP: resolveSecureConnectP } = - promise(); - this.resolveReadyP = resolveReadyP; - this.utpSocket.on('message', this.handleMessage); - const handleStartError = (e) => { - rejectErrorP(e); - }; - this.utpConn = this.utpSocket.connect(this.port, this.host); - this.tlsSocket = tls.connect( - { - key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), - cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), - socket: this.utpConn, - rejectUnauthorized: false, - }, - () => { - resolveSecureConnectP(); - }, - ); - this.tlsSocket.once('error', handleStartError); - this.tlsSocket.on('end', this.handleEnd); - this.tlsSocket.on('close', this.handleClose); - let punchInterval; - try { - // Send punch signal + // Send punch signal + await this.send(networkUtils.pingBuffer); + punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - punchInterval = setInterval(async () => { - await this.send(networkUtils.pingBuffer); - }, 1000); - await Promise.race([ - Promise.all([readyP, secureConnectP]).then(() => {}), - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - } catch (e) { - await this.stop(); - throw new networkErrors.ErrorConnectionStart(e.message, { - code: e.code, - errno: e.errno, - syscall: e.syscall, - }); - } finally { - clearInterval(punchInterval); - } - if (timer?.timedOut) { - await this.stop(); - throw new networkErrors.ErrorConnectionStartTimeout(); + }, this.punchIntervalTime); + await Promise.race([ + Promise.all([readyP, secureConnectP]).then(() => {}), + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); + } catch (e) { + // Clean up partial start + // TLSSocket isn't established yet, so it is destroyed + if (!this.tlsSocket.destroyed) { + this.tlsSocket.end(); + this.tlsSocket.destroy(); } - const serverCertChain = networkUtils.getCertificateChain(this.tlsSocket); - try { - networkUtils.verifyServerCertificateChain(this.nodeId, serverCertChain); - } catch (e) { - await this.stop(); - throw e; + this.utpSocket.off('message', this.handleMessage); + throw new networkErrors.ErrorConnectionStart(e.message, { + code: e.code, + errno: e.errno, + syscall: e.syscall, + }); + } finally { + clearInterval(punchInterval); + } + this.tlsSocket.on('error', this.handleError); + this.tlsSocket.off('error', handleStartError); + if (timer?.timedOut) { + // Clean up partial start + // TLSSocket isn't established yet, so it is destroyed + if (!this.tlsSocket.destroyed) { + this.tlsSocket.end(); + this.tlsSocket.destroy(); } - this.tlsSocket.off('error', handleStartError); - this.tlsSocket.on('error', this.handleError); - await this.startPingInterval(); - this.serverCertChain = serverCertChain; - this.connections.ingress.set(this.address, this); - this.startTimeout(); - this.logger.info('Started Connection Forward'); + this.utpSocket.off('message', this.handleMessage); + throw new networkErrors.ErrorConnectionStartTimeout(); + } + const serverCertChain = networkUtils.getCertificateChain(this.tlsSocket); + try { + networkUtils.verifyServerCertificateChain(this.nodeId, serverCertChain); } catch (e) { - this._started = false; + // Clean up partial start + this.utpSocket.off('message', this.handleMessage); + // TLSSocket is established, and is ended gracefully + this.logger.debug('Sends tlsSocket ending'); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + await this.endGracefully(this.tlsSocket, this.endTime); throw e; } + await this.startKeepAliveInterval(); + this.serverCertChain = serverCertChain; + this.connections.ingress.set(this.address, this); + this.startKeepAliveTimeout(); + this.logger.info('Started Connection Forward'); } public async stop(): Promise { - if (!this._started) { - return; - } this.logger.info('Stopping Connection Forward'); - this._started = false; this._composed = false; - this.stopTimeout(); - this.stopPingInterval(); + this.stopKeepAliveTimeout(); + this.stopKeepAliveInterval(); this.utpSocket.off('message', this.handleMessage); + const endPs: Array> = []; if (!this.tlsSocket.destroyed) { - this.tlsSocket.end(); - this.tlsSocket.destroy(); + this.logger.debug('Sends tlsSocket ending'); + this.tlsSocket.unpipe(); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.tlsSocket, this.endTime)); } + if (this.clientSocket != null && !this.clientSocket.destroyed) { + this.logger.debug('Sends clientSocket ending'); + this.clientSocket.unpipe(); + // Graceful exit has its own end handler + this.clientSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.clientSocket, this.endTime)); + } + await Promise.all(endPs); this.connections.ingress.delete(this.address); this.connections.client.delete(this.clientAddress); this.logger.info('Stopped Connection Forward'); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public compose(clientSocket: Socket): void { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } try { if (this._composed) { throw new networkErrors.ErrorConnectionComposed(); } + this._composed = true; + this.clientSocket = clientSocket; this.logger.info('Composing Connection Forward'); - this.tlsSocket.on('error', (e) => { - if (!clientSocket.destroyed) { - clientSocket.destroy(e); - } - }); - this.tlsSocket.on('close', () => { - clientSocket.destroy(); + clientSocket.on('error', async (e) => { + this.logger.warn(`Client Error: ${e.toString()}`); + await this.stop(); }); - clientSocket.on('end', () => { + clientSocket.on('end', async () => { + this.logger.debug('Receives clientSocket ending'); + this.logger.debug('Responds clientSocket ending'); clientSocket.end(); - }); - clientSocket.on('error', (e) => { - if (!this.tlsSocket.destroyed) { - this.tlsSocket.emit('error', e); - } clientSocket.destroy(); + this.logger.debug('Responded clientSocket ending'); + await this.stop(); }); - clientSocket.on('close', () => { - this.tlsSocket.destroy(); + clientSocket.on('close', async () => { + await this.stop(); }); - this.tlsSocket.pipe(clientSocket); - clientSocket.pipe(this.tlsSocket); + this.tlsSocket.pipe(clientSocket, { end: false }); + clientSocket.pipe(this.tlsSocket, { end: false }); const clientAddressInfo = clientSocket.address() as AddressInfo; this.clientHost = clientAddressInfo.address as Host; this.clientPort = clientAddressInfo.port as Port; @@ -190,7 +267,6 @@ class ConnectionForward extends Connection { this.clientPort, ); this.connections.client.set(this.clientAddress, this); - this._composed = true; this.logger.info('Composed Connection Forward'); } catch (e) { this._composed = false; @@ -212,89 +288,53 @@ class ConnectionForward extends Connection { return this.clientPort; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getServerCertificates(): Array { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.serverCertChain.map((crt) => keysUtils.certCopy(crt)); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getServerNodeIds(): Array { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.serverCertChain.map((c) => networkUtils.certNodeId(c)); } - protected async startPingInterval(): Promise { + protected async startKeepAliveInterval(): Promise { await this.send(networkUtils.pingBuffer); this.pingInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - }, this.pingIntervalTime); + }, this.keepAliveIntervalTime); } - protected stopPingInterval() { + protected stopKeepAliveInterval() { clearInterval(this.pingInterval); } - protected startTimeout() { + protected startKeepAliveTimeout() { this.timeout = setTimeout(() => { this.tlsSocket.emit('error', new networkErrors.ErrorConnectionTimeout()); - }, this.timeoutTime); + }, this.keepAliveTimeoutTime); } - protected stopTimeout() { + protected stopKeepAliveTimeout() { clearTimeout(this.timeout); } - protected handleMessage = async ( - data: Buffer, - remoteInfo: { address: string; port: number }, - ) => { - // Ignore messages not intended for this target - if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { - return; - } - let msg: NetworkMessage; - try { - msg = networkUtils.unserializeNetworkMessage(data); - } catch (e) { - return; - } - // Don't reset timeout until timeout is initialised - if (this.timeout != null) { - // Any message should reset the timeout - this.stopTimeout(); - this.startTimeout(); - } - if (msg.type === 'ping') { - this.resolveReadyP(); - // Respond with ready message - await this.send(networkUtils.pongBuffer); - } - }; - - protected handleError = (e: Error) => { - this.logger.warn(`Connection Error: ${e.toString()}`); - this.tlsSocket.destroy(); - }; - - /** - * Destroying the server socket triggers the close event - */ - protected handleClose = async () => { - await this.stop(); - }; - - protected handleEnd = () => { - if (this.utpConn.destroyed) { - // The utp connection may already be destroyed - this.tlsSocket.destroy(); + protected async endGracefully(socket: Socket, timeout: number) { + const { p: endP, resolveP: resolveEndP } = promise(); + socket.once('end', resolveEndP); + socket.end(); + const timer = timerStart(timeout); + await Promise.race([endP, timer.timerP]); + socket.removeListener('end', resolveEndP); + if (timer.timedOut) { + socket.emit('error', new networkErrors.ErrorConnectionEndTimeout()); } else { - // Prevent half open connections - this.tlsSocket.end(); + timerStop(timer); } - }; + // Must be destroyed if timed out + // If not timed out, force destroy the socket due to buggy tlsSocket and utpConn + socket.destroy(); + } } export default ConnectionForward; diff --git a/src/network/ConnectionReverse.ts b/src/network/ConnectionReverse.ts index 7e23f387f..fb138983b 100644 --- a/src/network/ConnectionReverse.ts +++ b/src/network/ConnectionReverse.ts @@ -1,4 +1,5 @@ import type { Socket, AddressInfo } from 'net'; +import type { TLSSocket } from 'tls'; import type UTPConnection from 'utp-native/lib/connection'; import type { Host, Port, Address, NetworkMessage } from './types'; import type { NodeId } from '../nodes/types'; @@ -7,29 +8,87 @@ import type { AbstractConstructorParameters, Timer } from '../types'; import net from 'net'; import tls from 'tls'; +import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import Connection from './Connection'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; import { utils as keysUtils } from '../keys'; -import { promise } from '../utils'; +import { promise, timerStart, timerStop } from '../utils'; type ConnectionsReverse = { egress: Map; proxy: Map; }; +interface ConnectionReverse extends StartStop {} +@StartStop() class ConnectionReverse extends Connection { public readonly serverHost: Host; public readonly serverPort: Port; protected connections: ConnectionsReverse; protected serverSocket: Socket; + protected tlsSocket?: TLSSocket; protected proxyHost: Host; protected proxyPort: Port; protected proxyAddress: Address; protected clientCertChain: Array; protected resolveReadyP: (value: void) => void; + protected handleMessage = async ( + data: Buffer, + remoteInfo: { address: string; port: number }, + ) => { + // Ignore messages not intended for this target + if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { + return; + } + let msg: NetworkMessage; + try { + msg = networkUtils.unserializeNetworkMessage(data); + } catch (e) { + return; + } + // Don't reset timeout until timeout is initialised + if (this.timeout != null) { + // Any message should reset the timeout + this.stopKeepAliveTimeout(); + this.startKeepAliveTimeout(); + } + if (msg.type === 'ping') { + await this.send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + this.resolveReadyP(); + } + }; + + protected handleError = async (e: Error) => { + this.logger.warn(`Server Error: ${e.toString()}`); + await this.stop(); + }; + + /** + * Handles receiving `end` event for `this.serverSocket` from server + * Handler is removed and not executed when `end` is initiated here + */ + protected handleEnd = async () => { + this.logger.debug('Receives serverSocket ending'); + this.logger.debug('Responds serverSocket ending'); + this.serverSocket.end(); + this.serverSocket.destroy(); + this.logger.debug('Responded serverSocket ending'); + await this.stop(); + }; + + /** + * Handles `close` event for `this.serverSocket` + * Destroying `this.serverSocket` triggers the close event + * If already stopped, then this does nothing + */ + protected handleClose = async () => { + await this.stop(); + }; + public constructor({ serverHost, serverPort, @@ -51,117 +110,120 @@ class ConnectionReverse extends Connection { }: { timer?: Timer; } = {}): Promise { + this.logger.info('Starting Connection Reverse'); + // Promise for ready + const { p: readyP, resolveP: resolveReadyP } = promise(); + // Promise for server connection + const { p: socketP, resolveP: resolveSocketP } = promise(); + // Promise for start errors + const { p: errorP, rejectP: rejectErrorP } = promise(); + this.resolveReadyP = resolveReadyP; + this.utpSocket.on('message', this.handleMessage); + this.serverSocket = net.connect(this.serverPort, this.serverHost, () => { + const proxyAddressInfo = this.serverSocket.address() as AddressInfo; + this.proxyHost = proxyAddressInfo.address as Host; + this.proxyPort = proxyAddressInfo.port as Port; + this.proxyAddress = networkUtils.buildAddress( + this.proxyHost, + this.proxyPort, + ); + resolveSocketP(); + }); + const handleStartError = (e) => { + rejectErrorP(e); + }; + this.serverSocket.once('error', handleStartError); + this.serverSocket.on('end', this.handleEnd); + this.serverSocket.on('close', this.handleClose); + let punchInterval; try { - if (this._started) { - return; - } - this.logger.info('Starting Connection Reverse'); - this._started = true; - // Promise for ready - const { p: readyP, resolveP: resolveReadyP } = promise(); - // Promise for server connection - const { p: socketP, resolveP: resolveSocketP } = promise(); - // Promise for start errors - const { p: errorP, rejectP: rejectErrorP } = promise(); - this.resolveReadyP = resolveReadyP; - this.utpSocket.on('message', this.handleMessage); - this.serverSocket = net.connect(this.serverPort, this.serverHost, () => { - const proxyAddressInfo = this.serverSocket.address() as AddressInfo; - this.proxyHost = proxyAddressInfo.address as Host; - this.proxyPort = proxyAddressInfo.port as Port; - this.proxyAddress = networkUtils.buildAddress( - this.proxyHost, - this.proxyPort, - ); - resolveSocketP(); - }); - const handleStartError = (e) => { - rejectErrorP(e); - }; - this.serverSocket.once('error', handleStartError); - this.serverSocket.on('end', this.handleEnd); - this.serverSocket.on('close', this.handleClose); - let punchInterval; - try { - await Promise.race([ - socketP, - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - // Send punch & ready signal + await Promise.race([ + socketP, + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); + // Send punch & ready signal + await this.send(networkUtils.pingBuffer); + punchInterval = setInterval(async () => { await this.send(networkUtils.pingBuffer); - punchInterval = setInterval(async () => { - await this.send(networkUtils.pingBuffer); - }, 1000); - await Promise.race([ - readyP, - errorP, - ...(timer != null ? [timer.timerP] : []), - ]); - } catch (e) { - await this.stop(); - throw new networkErrors.ErrorConnectionStart(e.message, { - code: e.code, - errno: e.errno, - syscall: e.syscall, - }); - } finally { - clearInterval(punchInterval); - } - if (timer?.timedOut) { - await this.stop(); - throw new networkErrors.ErrorConnectionStartTimeout(); - } - this.serverSocket.off('error', handleStartError); - this.serverSocket.on('error', this.handleError); - this.connections.egress.set(this.address, this); - this.connections.proxy.set(this.proxyAddress, this); - this.startTimeout(); - this.logger.info('Started Connection Reverse'); + }, this.punchIntervalTime); + await Promise.race([ + readyP, + errorP, + ...(timer != null ? [timer.timerP] : []), + ]); } catch (e) { - this._started = false; - throw e; + // Clean up partial start + // Socket isn't established yet, so it is destroyed + this.serverSocket.destroy(); + this.utpSocket.off('message', this.handleMessage); + throw new networkErrors.ErrorConnectionStart(e.message, { + code: e.code, + errno: e.errno, + syscall: e.syscall, + }); + } finally { + clearInterval(punchInterval); } + this.serverSocket.on('error', this.handleError); + this.serverSocket.off('error', handleStartError); + if (timer?.timedOut) { + // Clean up partial start + // Socket isn't established yet, so it is destroyed + this.serverSocket.destroy(); + this.utpSocket.off('message', this.handleMessage); + throw new networkErrors.ErrorConnectionStartTimeout(); + } + this.connections.egress.set(this.address, this); + this.connections.proxy.set(this.proxyAddress, this); + this.startKeepAliveTimeout(); + this.logger.info('Started Connection Reverse'); } /** - * The close event should run the stop * Repeated invocations are noops */ public async stop() { - if (!this._started) { - return; - } this.logger.info('Stopping Connection Reverse'); - this._started = false; this._composed = false; - this.stopTimeout(); + this.stopKeepAliveTimeout(); this.utpSocket.off('message', this.handleMessage); + const endPs: Array> = []; if (!this.serverSocket.destroyed) { - this.serverSocket.end(); - this.serverSocket.destroy(); + this.logger.debug('Sends serverSocket ending'); + this.serverSocket.unpipe(); + // Graceful exit has its own end handler + this.serverSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.serverSocket, this.endTime)); + } + if (this.tlsSocket != null && !this.tlsSocket.destroyed) { + this.logger.debug('Sends tlsSocket ending'); + this.tlsSocket.unpipe(); + // Graceful exit has its own end handler + this.tlsSocket.removeAllListeners('end'); + endPs.push(this.endGracefully(this.tlsSocket, this.endTime)); } + await Promise.all(endPs); this.connections.egress.delete(this.address); this.connections.proxy.delete(this.proxyAddress); this.logger.info('Stopped Connection Reverse'); } - /** - * Repeated invocations are noops - */ + @ready(new networkErrors.ErrorConnectionNotRunning(), true) public async compose(utpConn: UTPConnection, timer?: Timer): Promise { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } try { if (this._composed) { throw new networkErrors.ErrorConnectionComposed(); } + this._composed = true; this.logger.info('Composing Connection Reverse'); // Promise for secure establishment const { p: secureP, resolveP: resolveSecureP } = promise(); // Promise for compose errors const { p: errorP, rejectP: rejectErrorP } = promise(); + const handleComposeError = (e) => { + rejectErrorP(e); + }; const tlsSocket = new tls.TLSSocket(utpConn, { key: Buffer.from(this.tlsConfig.keyPrivatePem, 'ascii'), cert: Buffer.from(this.tlsConfig.certChainPem, 'ascii'), @@ -172,9 +234,6 @@ class ConnectionReverse extends Connection { tlsSocket.once('secure', () => { resolveSecureP(); }); - const handleComposeError = (e) => { - rejectErrorP(e); - }; tlsSocket.once('error', handleComposeError); try { await Promise.race([ @@ -183,49 +242,62 @@ class ConnectionReverse extends Connection { ...(timer != null ? [timer.timerP] : []), ]); } catch (e) { + // Clean up partial compose + if (!tlsSocket.destroyed) { + tlsSocket.end(); + tlsSocket.destroy(); + } throw new networkErrors.ErrorConnectionCompose(e.message, { code: e.code, errno: e.errno, syscall: e.syscall, }); } + tlsSocket.on('error', async (e) => { + this.logger.warn(`Reverse Error: ${e.toString()}`); + await this.stop(); + }); + tlsSocket.off('error', handleComposeError); if (timer?.timedOut) { + // Clean up partial compose + if (!tlsSocket.destroyed) { + tlsSocket.end(); + tlsSocket.destroy(); + } throw new networkErrors.ErrorConnectionComposeTimeout(); } const clientCertChain = networkUtils.getCertificateChain(tlsSocket); - networkUtils.verifyClientCertificateChain(clientCertChain); - tlsSocket.off('error', handleComposeError); - // Propagate end, error, close and data - tlsSocket.on('end', () => { - if (utpConn.destroyed) { - // The utp connection may already be destroyed - tlsSocket.destroy(); - } else { - // Prevent half open connections + try { + networkUtils.verifyClientCertificateChain(clientCertChain); + } catch (e) { + // Clean up partial compose + if (!tlsSocket.destroyed) { tlsSocket.end(); + tlsSocket.destroy(); } - }); - tlsSocket.on('error', (e) => { - if (!this.serverSocket.destroyed) { - this.serverSocket.emit('error', e); - } - tlsSocket.destroy(); - }); - tlsSocket.on('close', () => { - this.serverSocket.destroy(); - }); - this.serverSocket.on('error', (e) => { - if (!tlsSocket.destroyed) { - tlsSocket.destroy(e); + throw e; + } + // The TLSSocket is now established + this.tlsSocket = tlsSocket; + this.tlsSocket.on('end', async () => { + this.logger.debug('Receives tlsSocket ending'); + if (utpConn.destroyed) { + this.tlsSocket!.destroy(); + this.logger.debug('Destroyed tlsSocket'); + } else { + this.logger.debug('Responds tlsSocket ending'); + this.tlsSocket!.end(); + this.tlsSocket!.destroy(); + this.logger.debug('Responded tlsSocket ending'); } + await this.stop(); }); - this.serverSocket.on('close', () => { - tlsSocket.destroy(); + this.tlsSocket.on('close', async () => { + await this.stop(); }); - tlsSocket.pipe(this.serverSocket); - this.serverSocket.pipe(tlsSocket); + this.tlsSocket.pipe(this.serverSocket, { end: false }); + this.serverSocket.pipe(this.tlsSocket, { end: false }); this.clientCertChain = clientCertChain; - this._composed = true; this.logger.info('Composed Connection Reverse'); } catch (e) { this._composed = false; @@ -233,20 +305,17 @@ class ConnectionReverse extends Connection { } } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getProxyHost(): Host { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.proxyHost; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getProxyPort(): Port { - if (!this._started) { - throw new networkErrors.ErrorConnectionNotStarted(); - } return this.proxyPort; } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getClientCertificates(): Array { if (!this._composed) { throw new networkErrors.ErrorConnectionNotComposed(); @@ -254,6 +323,7 @@ class ConnectionReverse extends Connection { return this.clientCertChain.map((crt) => keysUtils.certCopy(crt)); } + @ready(new networkErrors.ErrorConnectionNotRunning()) public getClientNodeIds(): Array { if (!this._composed) { throw new networkErrors.ErrorConnectionNotComposed(); @@ -261,62 +331,44 @@ class ConnectionReverse extends Connection { return this.clientCertChain.map((c) => networkUtils.certNodeId(c)); } - protected startTimeout() { - this.timeout = setTimeout(() => { - this.serverSocket.emit( - 'error', - new networkErrors.ErrorConnectionTimeout(), - ); - }, this.timeoutTime); + protected startKeepAliveTimeout() { + this.timeout = setTimeout(async () => { + const e = new networkErrors.ErrorConnectionTimeout(); + // If the TLSSocket is established, emit the error so the + // tlsSocket error handler handles it + // This is not emitted on serverSocket in order maintain + // symmetry with ConnectionForward behaviour + if (this.tlsSocket != null && !this.tlsSocket.destroyed) { + this.tlsSocket.emit('error', e); + } else { + // Otherwise the composition has not occurred yet + // This means we have timed out waiting for a composition + this.logger.warn(`Reverse Error: ${e.toString()}`); + await this.stop(); + } + }, this.keepAliveTimeoutTime); } - protected stopTimeout() { + protected stopKeepAliveTimeout() { clearTimeout(this.timeout); } - protected handleMessage = async ( - data: Buffer, - remoteInfo: { address: string; port: number }, - ) => { - // Ignore messages not intended for this target - if (remoteInfo.address !== this.host || remoteInfo.port !== this.port) { - return; - } - let msg: NetworkMessage; - try { - msg = networkUtils.unserializeNetworkMessage(data); - } catch (e) { - return; - } - // Don't reset timeout until timeout is initialised - if (this.timeout != null) { - // Any message should reset the timeout - this.stopTimeout(); - this.startTimeout(); + protected async endGracefully(socket: Socket, timeout: number) { + const { p: endP, resolveP: resolveEndP } = promise(); + socket.once('end', resolveEndP); + socket.end(); + const timer = timerStart(timeout); + await Promise.race([endP, timer.timerP]); + socket.removeListener('end', resolveEndP); + if (timer.timedOut) { + socket.emit('error', new networkErrors.ErrorConnectionEndTimeout()); + } else { + timerStop(timer); } - if (msg.type === 'ping') { - await this.send(networkUtils.pongBuffer); - } else if (msg.type === 'pong') { - this.resolveReadyP(); - } - }; - - protected handleError = (e: Error) => { - this.logger.warn(`Connection Error: ${e.toString()}`); - this.serverSocket.destroy(); - }; - - /** - * Destroying the server socket triggers the close event - */ - protected handleClose = async () => { - await this.stop(); - }; - - protected handleEnd = () => { - // Prevent half open connections - this.serverSocket.end(); - }; + // Must be destroyed if timed out + // If not timed out, force destroy the socket due to buggy tlsSocket and utpConn + socket.destroy(); + } } export default ConnectionReverse; diff --git a/src/network/ForwardProxy.ts b/src/network/ForwardProxy.ts index 58dac9518..00a0d5b98 100644 --- a/src/network/ForwardProxy.ts +++ b/src/network/ForwardProxy.ts @@ -12,21 +12,23 @@ import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import ConnectionForward from './ConnectionForward'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; -import { promisify, sleep, timerStart, timerStop } from '../utils'; +import { promisify, timerStart, timerStop } from '../utils'; interface ForwardProxy extends StartStop {} @StartStop() class ForwardProxy { public readonly authToken: string; public readonly connConnectTime: number; - public readonly connTimeoutTime: number; - public readonly connPingIntervalTime: number; + public readonly connKeepAliveTimeoutTime: number; + public readonly connEndTime: number; + public readonly connPunchIntervalTime: number; + public readonly connKeepAliveIntervalTime: number; protected logger: Logger; - protected _proxyHost: Host; - protected _proxyPort: Port; - protected _egressHost: Host; - protected _egressPort: Port; + protected proxyHost: Host; + protected proxyPort: Port; + protected egressHost: Host; + protected egressPort: Port; protected server: http.Server; protected utpSocket: UTP; protected tlsConfig: TLSConfig; @@ -39,23 +41,28 @@ class ForwardProxy { constructor({ authToken, connConnectTime = 20000, - connTimeoutTime = 20000, - connPingIntervalTime = 1000, + connKeepAliveTimeoutTime = 20000, + connEndTime = 1000, + connPunchIntervalTime = 1000, + connKeepAliveIntervalTime = 1000, logger, }: { authToken: string; connConnectTime?: number; - connTimeoutTime?: number; - connPingIntervalTime?: number; - + connKeepAliveTimeoutTime?: number; + connEndTime?: number; + connPunchIntervalTime?: number; + connKeepAliveIntervalTime?: number; logger?: Logger; }) { this.logger = logger ?? new Logger(ForwardProxy.name); this.logger.info('Creating Forward Proxy'); this.authToken = authToken; this.connConnectTime = connConnectTime; - this.connTimeoutTime = connTimeoutTime; - this.connPingIntervalTime = connPingIntervalTime; + this.connKeepAliveTimeoutTime = connKeepAliveTimeoutTime; + this.connEndTime = connEndTime; + this.connPunchIntervalTime = connPunchIntervalTime; + this.connKeepAliveIntervalTime = connKeepAliveIntervalTime; this.server = http.createServer(); this.server.on('request', this.handleRequest); this.server.on('connect', this.handleConnect); @@ -78,28 +85,29 @@ class ForwardProxy { egressPort?: Port; tlsConfig: TLSConfig; }): Promise { - this._proxyHost = proxyHost; - this._egressHost = egressHost; - this.tlsConfig = tlsConfig; - - let proxyAddress = networkUtils.buildAddress(this._proxyHost, proxyPort); - let egressAddress = networkUtils.buildAddress(this._egressHost, egressPort); + let proxyAddress = networkUtils.buildAddress(proxyHost, proxyPort); + let egressAddress = networkUtils.buildAddress(egressHost, egressPort); this.logger.info( `Starting Forward Proxy from ${proxyAddress} to ${egressAddress}`, ); - const utpSocket = UTP({ allowHalfOpen: false }); + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used + const utpSocket = UTP({ allowHalfOpen: true }); const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); await utpSocketBind(egressPort, egressHost); - this._egressPort = utpSocket.address().port; + egressPort = utpSocket.address().port; const serverListen = promisify(this.server.listen).bind(this.server); - await serverListen(proxyPort, this._proxyHost); - this._proxyPort = (this.server.address() as AddressInfo).port as Port; - proxyAddress = networkUtils.buildAddress(this._proxyHost, this._proxyPort); - egressAddress = networkUtils.buildAddress( - this._egressHost, - this._egressPort, - ); + await serverListen(proxyPort, proxyHost); + proxyPort = (this.server.address() as AddressInfo).port as Port; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + this.egressHost = egressHost; + this.egressPort = egressPort; this.utpSocket = utpSocket; + this.tlsConfig = tlsConfig; + proxyAddress = networkUtils.buildAddress(proxyHost, proxyPort); + egressAddress = networkUtils.buildAddress(egressHost, egressPort); this.logger.info( `Started Forward Proxy from ${proxyAddress} to ${egressAddress}`, ); @@ -107,15 +115,20 @@ class ForwardProxy { public async stop(): Promise { this.logger.info('Stopping Forward Proxy Server'); + // Ensure no new connections are created + this.server.removeAllListeners('connect'); + this.server.on('connect', async (_request, clientSocket) => { + const clientSocketEnd = promisify(clientSocket.end).bind(clientSocket); + await clientSocketEnd('HTTP/1.1 503 Service Unavailable\r\n' + '\r\n'); + clientSocket.destroy(); + }); + const connStops: Array> = []; + for (const [_, conn] of this.connections.ingress) { + connStops.push(conn.stop()); + } const serverClose = promisify(this.server.close).bind(this.server); await serverClose(); - // Ensure no new connections are created while this is iterating - await Promise.all( - Array.from(this.connections.ingress, ([, conn]) => conn.stop()), - ); - // Delay socket close by about 1 second - // this gives some time for the end/FIN packets to be sent - await sleep(1000); + await Promise.all(connStops); // Even when all connections are destroyed // the utp socket sometimes hangs in closing // here we asynchronously close and unreference it @@ -125,30 +138,30 @@ class ForwardProxy { this.logger.info('Stopped Forward Proxy Server'); } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get proxyHost(): Host { - return this._proxyHost; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getProxyHost(): Host { + return this.proxyHost; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get proxyPort(): Port { - return this._proxyPort; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getProxyPort(): Port { + return this.proxyPort; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get egressHost(): Host { - return this._egressHost; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getEgressHost(): Host { + return this.egressHost; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) - get egressPort(): Port { - return this._egressPort; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public getEgressPort(): Port { + return this.egressPort; } - - public setTLSConfig(tlsConfig: TLSConfig): void { - this.tlsConfig = tlsConfig; + public getConnectionCount(): number { + return this.connections.ingress.size; } + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public getConnectionInfoByClient( clientHost: Host, clientPort: Port, @@ -163,13 +176,14 @@ class ForwardProxy { return { nodeId: serverNodeIds[0], certificates: serverCertificates, - egressHost: this._egressHost, - egressPort: this._egressPort, + egressHost: this.egressHost, + egressPort: this.egressPort, ingressHost: conn.host, ingressPort: conn.port, }; } + @ready(new networkErrors.ErrorForwardProxyNotRunning()) public getConnectionInfoByIngress( ingressHost: Host, ingressPort: Port, @@ -184,24 +198,37 @@ class ForwardProxy { return { nodeId: serverNodeIds[0], certificates: serverCertificates, - egressHost: this._egressHost, - egressPort: this._egressPort, + egressHost: this.egressHost, + egressPort: this.egressPort, ingressHost: conn.host, ingressPort: conn.port, }; } - get connectionCount(): number { - return this.connections.ingress.size; + @ready(new networkErrors.ErrorForwardProxyNotRunning()) + public setTLSConfig(tlsConfig: TLSConfig): void { + this.tlsConfig = tlsConfig; } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) + /** + * Manually opens a connection with the ForwardProxy + * Usually you just use HTTP Connect requests to trigger handleConnect + * This will default to using `this.connConnectTime` if + * timer is not set or set to `undefined` + * It will only stop the timer if using the default timer + * Set timer to `null` explicitly to wait forever + */ + @ready(new networkErrors.ErrorForwardProxyNotRunning(), true) public async openConnection( nodeId: NodeId, ingressHost: Host, ingressPort: Port, timer?: Timer, ): Promise { + let timer_ = timer; + if (timer === undefined) { + timer_ = timerStart(this.connConnectTime); + } const ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); let lock = this.connectionLocks.get(ingressAddress); if (lock == null) { @@ -210,14 +237,17 @@ class ForwardProxy { } const release = await lock.acquire(); try { - await this.establishConnection(nodeId, ingressHost, ingressPort, timer); + await this.establishConnection(nodeId, ingressHost, ingressPort, timer_); } finally { + if (timer === undefined) { + timerStop(timer_!); + } release(); this.connectionLocks.delete(ingressAddress); } } - @ready(new networkErrors.ErrorForwardProxyNotStarted()) + @ready(new networkErrors.ErrorForwardProxyNotRunning(), true) public async closeConnection( ingressHost: Host, ingressPort: Port, @@ -311,9 +341,43 @@ class ForwardProxy { timer, ); } catch (e) { + if (e instanceof networkErrors.ErrorConnectionStartTimeout) { + if (!clientSocket.destroyed) { + await clientSocketEnd('HTTP/1.1 504 Gateway Timeout\r\n' + '\r\n'); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorConnectionStart) { + if (!clientSocket.destroyed) { + await clientSocketEnd('HTTP/1.1 502 Bad Gateway\r\n' + '\r\n'); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorCertChain) { + if (!clientSocket.destroyed) { + await clientSocketEnd( + 'HTTP/1.1 526 Invalid SSL Certificate\r\n' + '\r\n', + ); + clientSocket.destroy(e); + } + return; + } + if (e instanceof networkErrors.ErrorConnectionTimeout) { + if (!clientSocket.destroyed) { + await clientSocketEnd( + 'HTTP/1.1 524 A Timeout Occurred\r\n' + '\r\n', + ); + clientSocket.destroy(e); + } + return; + } if (e instanceof networkErrors.ErrorConnection) { if (!clientSocket.destroyed) { - await clientSocketEnd('HTTP/1.1 400 Bad Request\r\n' + '\r\n'); + await clientSocketEnd( + 'HTTP/1.1 500 Internal Server Error\r\n' + '\r\n', + ); clientSocket.destroy(e); } return; @@ -372,12 +436,14 @@ class ForwardProxy { conn = new ConnectionForward({ nodeId, connections: this.connections, - pingIntervalTime: this.connPingIntervalTime, utpSocket: this.utpSocket, host: ingressHost, port: ingressPort, tlsConfig: this.tlsConfig, - timeoutTime: this.connTimeoutTime, + keepAliveTimeoutTime: this.connKeepAliveTimeoutTime, + endTime: this.connEndTime, + punchIntervalTime: this.connPunchIntervalTime, + keepAliveIntervalTime: this.connKeepAliveIntervalTime, logger: this.logger.getChild( `${ConnectionForward.name} ${ingressAddress}`, ), @@ -401,7 +467,7 @@ class ForwardProxy { * Regular HTTP requests are not allowed */ protected handleRequest = ( - request: http.IncomingMessage, + _request: http.IncomingMessage, response: http.ServerResponse, ): void => { response.writeHead(405); diff --git a/src/network/ReverseProxy.ts b/src/network/ReverseProxy.ts index 7244e1483..5ecdf90c0 100644 --- a/src/network/ReverseProxy.ts +++ b/src/network/ReverseProxy.ts @@ -10,19 +10,21 @@ import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import ConnectionReverse from './ConnectionReverse'; import * as networkUtils from './utils'; import * as networkErrors from './errors'; -import { promisify, sleep, timerStart, timerStop } from '../utils'; +import { promisify, timerStart, timerStop } from '../utils'; interface ReverseProxy extends StartStop {} @StartStop() class ReverseProxy { public readonly connConnectTime: number; - public readonly connTimeoutTime: number; + public readonly connKeepAliveTimeoutTime: number; + public readonly connEndTime: number; + public readonly connPunchIntervalTime: number; protected logger: Logger; - protected _ingressHost: Host; - protected _ingressPort: Port; - protected _serverHost: Host; - protected _serverPort: Port; + protected ingressHost: Host; + protected ingressPort: Port; + protected serverHost: Host; + protected serverPort: Port; protected utpSocket: UTP; protected tlsConfig: TLSConfig; protected connectionLocks: Map = new Map(); @@ -33,17 +35,23 @@ class ReverseProxy { constructor({ connConnectTime = 20000, - connTimeoutTime = 20000, + connKeepAliveTimeoutTime = 20000, + connEndTime = 1000, + connPunchIntervalTime = 1000, logger, }: { connConnectTime?: number; - connTimeoutTime?: number; + connKeepAliveTimeoutTime?: number; + connEndTime?: number; + connPunchIntervalTime?: number; logger?: Logger; }) { this.logger = logger ?? new Logger(ReverseProxy.name); this.logger.info('Creating Reverse Proxy'); this.connConnectTime = connConnectTime; - this.connTimeoutTime = connTimeoutTime; + this.connKeepAliveTimeoutTime = connKeepAliveTimeoutTime; + this.connEndTime = connEndTime; + this.connPunchIntervalTime = connPunchIntervalTime; this.logger.info('Created Reverse Proxy'); } @@ -63,26 +71,30 @@ class ReverseProxy { ingressPort?: Port; tlsConfig: TLSConfig; }): Promise { - this._ingressHost = ingressHost; - this.tlsConfig = tlsConfig; let ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); let serverAddress = networkUtils.buildAddress(serverHost, serverPort); this.logger.info( `Starting Reverse Proxy from ${ingressAddress} to ${serverAddress}`, ); - const utpSocket = UTP.createServer(this.handleConnection, { - allowHalfOpen: false, - }); + // Normal sockets defaults to `allowHalfOpen: false` + // But UTP defaults to `allowHalfOpen: true` + // Setting `allowHalfOpen: false` on UTP is buggy and cannot be used + const utpSocket = UTP.createServer( + { + allowHalfOpen: true, + }, + this.handleConnection, + ); const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); - await utpSocketListen(ingressPort, this._ingressHost); - this._ingressPort = utpSocket.address().port; - this._serverHost = serverHost; - this._serverPort = serverPort; + await utpSocketListen(ingressPort, ingressHost); + ingressPort = utpSocket.address().port; + this.serverHost = serverHost; + this.serverPort = serverPort; + this.ingressHost = ingressHost; + this.ingressPort = ingressPort; this.utpSocket = utpSocket; - ingressAddress = networkUtils.buildAddress( - this._ingressHost, - this._ingressPort, - ); + this.tlsConfig = tlsConfig; + ingressAddress = networkUtils.buildAddress(ingressHost, ingressPort); serverAddress = networkUtils.buildAddress(serverHost, serverPort); this.logger.info( `Started Reverse Proxy from ${ingressAddress} to ${serverAddress}`, @@ -91,13 +103,17 @@ class ReverseProxy { public async stop(): Promise { this.logger.info('Stopping Reverse Proxy'); - // Ensure no new connections are created while this is iterating - await Promise.all( - Array.from(this.connections.egress, ([, conn]) => conn.stop()), - ); - // Delay socket close by about 1 second - // this gives some time for the end/FIN packets to be sent - await sleep(1000); + // Ensure no new connections are created + this.utpSocket.removeAllListeners('connection'); + this.utpSocket.on('connection', (utpConn: UTPConnection) => { + utpConn.end(); + utpConn.destroy(); + }); + const connStops: Array> = []; + for (const [_, conn] of this.connections.egress) { + connStops.push(conn.stop()); + } + await Promise.all(connStops); // Even when all connections are destroyed // the utp socket sometimes hangs in closing // here we asynchronously close and unreference it @@ -107,31 +123,31 @@ class ReverseProxy { this.logger.info('Stopped Reverse Proxy'); } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get ingressHost(): Host { - return this._ingressHost; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getIngressHost(): Host { + return this.ingressHost; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get ingressPort(): Port { - return this._ingressPort; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getIngressPort(): Port { + return this.ingressPort; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get serverHost(): Host { - return this._serverHost; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getServerHost(): Host { + return this.serverHost; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) - get serverPort(): Port { - return this._serverPort; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public getServerPort(): Port { + return this.serverPort; } - public setTLSConfig(tlsConfig: TLSConfig): void { - this.tlsConfig = tlsConfig; + public getConnectionCount(): number { + return this.connections.egress.size; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public getConnectionInfoByProxy( proxyHost: Host, proxyPort: Port, @@ -148,12 +164,12 @@ class ReverseProxy { certificates: clientCertificates, egressHost: conn.host, egressPort: conn.port, - ingressHost: this._ingressHost, - ingressPort: this._ingressPort, + ingressHost: this.ingressHost, + ingressPort: this.ingressPort, }; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning()) public getConnectionInfoByEgress( egressHost: Host, egressPort: Port, @@ -170,21 +186,26 @@ class ReverseProxy { certificates: clientCertificates, egressHost: conn.host, egressPort: conn.port, - ingressHost: this._ingressHost, - ingressPort: this._ingressPort, + ingressHost: this.ingressHost, + ingressPort: this.ingressPort, }; } - get connectionCount(): number { - return this.connections.egress.size; + @ready(new networkErrors.ErrorReverseProxyNotRunning()) + public setTLSConfig(tlsConfig: TLSConfig): void { + this.tlsConfig = tlsConfig; } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning(), true) public async openConnection( egressHost: Host, egressPort: Port, timer?: Timer, ): Promise { + let timer_ = timer; + if (timer === undefined) { + timer_ = timerStart(this.connConnectTime); + } const egressAddress = networkUtils.buildAddress(egressHost, egressPort); let lock = this.connectionLocks.get(egressAddress); if (lock == null) { @@ -193,14 +214,17 @@ class ReverseProxy { } const release = await lock.acquire(); try { - await this.establishConnection(egressHost, egressPort, timer); + await this.establishConnection(egressHost, egressPort, timer_); } finally { + if (timer === undefined) { + timerStop(timer_!); + } release(); this.connectionLocks.delete(egressAddress); } } - @ready(new networkErrors.ErrorReverseProxyNotStarted()) + @ready(new networkErrors.ErrorReverseProxyNotRunning(), true) public async closeConnection( egressHost: Host, egressPort: Port, @@ -238,13 +262,6 @@ class ReverseProxy { } const release = await lock.acquire(); try { - const handleConnectionError = (e) => { - this.logger.warn( - `Failed connection from ${egressAddress} - ${e.toString()}`, - ); - utpConn.destroy(); - }; - utpConn.on('error', handleConnectionError); this.logger.info(`Handling connection from ${egressAddress}`); const timer = timerStart(this.connConnectTime); try { @@ -259,16 +276,14 @@ class ReverseProxy { throw e; } if (!utpConn.destroyed) { - utpConn.destroy(e); - } else { - this.logger.warn( - `Failed connection from ${egressAddress} - ${e.toString()}`, - ); + utpConn.destroy(); } + this.logger.warn( + `Failed connection from ${egressAddress} - ${e.toString()}`, + ); } finally { timerStop(timer); } - utpConn.off('error', handleConnectionError); this.logger.info(`Handled connection from ${egressAddress}`); } finally { release(); @@ -297,14 +312,16 @@ class ReverseProxy { return conn; } conn = new ConnectionReverse({ - serverHost: this._serverHost, - serverPort: this._serverPort, + serverHost: this.serverHost, + serverPort: this.serverPort, connections: this.connections, utpSocket: this.utpSocket, host: egressHost, port: egressPort, tlsConfig: this.tlsConfig, - timeoutTime: this.connTimeoutTime, + keepAliveTimeoutTime: this.connKeepAliveTimeoutTime, + endTime: this.connEndTime, + punchIntervalTime: this.connPunchIntervalTime, logger: this.logger.getChild( `${ConnectionReverse.name} ${egressAddress}`, ), diff --git a/src/network/errors.ts b/src/network/errors.ts index 9b03420b4..868b482d0 100644 --- a/src/network/errors.ts +++ b/src/network/errors.ts @@ -1,109 +1,156 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorNetwork extends ErrorPolykey {} -class ErrorForwardProxyNotStarted extends ErrorNetwork {} +class ErrorForwardProxy extends ErrorNetwork {} -class ErrorForwardProxyDestroyed extends ErrorNetwork {} +class ErrorForwardProxyNotRunning extends ErrorForwardProxy { + description = 'ForwardProxy is not running'; + exitCode = sysexits.USAGE; +} -class ErrorForwardProxyInvalidUrl extends ErrorNetwork {} +class ErrorForwardProxyInvalidUrl extends ErrorForwardProxy { + description = 'Invalid target host used for HTTP connect proxy'; + exitCode = sysexits.PROTOCOL; +} -class ErrorForwardProxyMissingNodeId extends ErrorNetwork {} +class ErrorForwardProxyMissingNodeId extends ErrorForwardProxy { + description = 'Node ID query parameter is required for HTTP connect proxy'; + exitCode = sysexits.PROTOCOL; +} -class ErrorForwardProxyAuth extends ErrorNetwork {} +class ErrorForwardProxyAuth extends ErrorForwardProxy { + description = 'Incorrect HTTP connect proxy password'; + exitCode = sysexits.NOPERM; +} -class ErrorReverseProxyNotStarted extends ErrorNetwork {} +class ErrorReverseProxy extends ErrorNetwork {} -class ErrorReverseProxyDestroyed extends ErrorNetwork {} +class ErrorReverseProxyNotRunning extends ErrorReverseProxy { + description = 'ReverseProxy is not running'; + exitCode = sysexits.USAGE; +} class ErrorConnection extends ErrorNetwork {} -class ErrorConnectionMessageParse extends ErrorConnection {} +class ErrorConnectionNotRunning extends ErrorConnection { + description = 'Connection is not running'; + exitCode = sysexits.USAGE; +} -class ErrorConnectionNotStarted extends ErrorConnection {} +class ErrorConnectionComposed extends ErrorConnection { + description = 'Connection is composed'; + exitCode = sysexits.USAGE; +} -// During start error -class ErrorConnectionStart extends ErrorConnection {} +class ErrorConnectionNotComposed extends ErrorConnection { + description = 'Connection is not composed'; + exitCode = sysexits.USAGE; +} -// Start timeout error -class ErrorConnectionStartTimeout extends ErrorConnectionStart {} +class ErrorConnectionMessageParse extends ErrorConnection { + description = 'Network message received is invalid'; + exitCode = sysexits.TEMPFAIL; +} -// During compose error -class ErrorConnectionCompose extends ErrorConnection {} +class ErrorConnectionTimeout extends ErrorConnection { + description = 'Connection keep-alive timed out'; + exitCode = sysexits.UNAVAILABLE; +} -// Compose timeout error -class ErrorConnectionComposeTimeout extends ErrorConnectionCompose {} - -// Connection is already composed -class ErrorConnectionComposed extends ErrorConnection {} - -// Not yet composed, cannot answer certain things -class ErrorConnectionNotComposed extends ErrorConnection {} - -// Was not able to keep alive -class ErrorConnectionTimeout extends ErrorConnection {} - -/** - * Certificate verification errors - */ -class ErrorCertChain extends ErrorNetwork {} +class ErrorConnectionEndTimeout extends ErrorConnection { + description = 'Connection end timed out'; + exitCode = sysexits.UNAVAILABLE; +} /** - * When the certificate chain is empty + * Used by ConnectionForward and ConnectionReverse */ -class ErrorCertChainEmpty extends ErrorCertChain {} +class ErrorConnectionStart extends ErrorConnection { + description = 'Connection start failed'; + exitCode = sysexits.PROTOCOL; +} -/** - * The target node id is not claimed by any certificate - */ -class ErrorCertChainUnclaimed extends ErrorCertChain {} +class ErrorConnectionStartTimeout extends ErrorConnectionStart { + description = 'Connection start timed out'; + exitCode = sysexits.NOHOST; +} /** - * If the signature chain is broken + * Used by ConnectionReverse */ -class ErrorCertChainBroken extends ErrorCertChain {} +class ErrorConnectionCompose extends ErrorConnection { + description = 'Connection compose failed'; + exitCode = sysexits.PROTOCOL; +} -/** - * Certificate in the chain was expired - */ -class ErrorCertChainDateInvalid extends ErrorCertChain {} +class ErrorConnectionComposeTimeout extends ErrorConnectionCompose { + description = 'Connection compose timed out'; + exitCode = sysexits.NOHOST; +} /** - * Certificate is missing the common name + * Used for certificate verification */ -class ErrorCertChainNameInvalid extends ErrorCertChain {} - -/** - * Certificate public key doesn't generate the node id - */ -class ErrorCertChainKeyInvalid extends ErrorCertChain {} +class ErrorCertChain extends ErrorNetwork {} -/** - * Certificate self-signed signature is invalid - */ -class ErrorCertChainSignatureInvalid extends ErrorCertChain {} +class ErrorCertChainEmpty extends ErrorCertChain { + description = 'Certificate chain is empty'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainUnclaimed extends ErrorCertChain { + description = 'The target node id is not claimed by any certificate'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainBroken extends ErrorCertChain { + description = 'The signature chain is broken'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainDateInvalid extends ErrorCertChain { + description = 'Certificate in the chain is expired'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainNameInvalid extends ErrorCertChain { + description = 'Certificate is missing the common name'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainKeyInvalid extends ErrorCertChain { + description = 'Certificate public key does not generate the Node ID'; + exitCode = sysexits.PROTOCOL; +} + +class ErrorCertChainSignatureInvalid extends ErrorCertChain { + description = 'Certificate self-signed signature is invalid'; + exitCode = sysexits.PROTOCOL; +} class ErrorHostnameResolutionFailed extends ErrorNetwork {} export { ErrorNetwork, - ErrorForwardProxyNotStarted, - ErrorForwardProxyDestroyed, + ErrorForwardProxy, + ErrorForwardProxyNotRunning, ErrorForwardProxyInvalidUrl, ErrorForwardProxyMissingNodeId, ErrorForwardProxyAuth, - ErrorReverseProxyNotStarted, - ErrorReverseProxyDestroyed, + ErrorReverseProxy, + ErrorReverseProxyNotRunning, ErrorConnection, + ErrorConnectionNotRunning, + ErrorConnectionComposed, + ErrorConnectionNotComposed, ErrorConnectionMessageParse, - ErrorConnectionNotStarted, + ErrorConnectionTimeout, + ErrorConnectionEndTimeout, ErrorConnectionStart, ErrorConnectionStartTimeout, ErrorConnectionCompose, ErrorConnectionComposeTimeout, - ErrorConnectionComposed, - ErrorConnectionNotComposed, - ErrorConnectionTimeout, ErrorCertChain, ErrorCertChainEmpty, ErrorCertChainUnclaimed, diff --git a/src/nodes/NodeConnection.ts b/src/nodes/NodeConnection.ts index d4c9b61b8..fd5ac92b5 100644 --- a/src/nodes/NodeConnection.ts +++ b/src/nodes/NodeConnection.ts @@ -79,9 +79,9 @@ class NodeConnection { seedConnections?: Map; }): Promise { logger.info(`Creating ${this.name}`); - const proxyConfig: ProxyConfig = { - host: forwardProxy.proxyHost, - port: forwardProxy.proxyPort, + const proxyConfig = { + host: forwardProxy.getProxyHost(), + port: forwardProxy.getProxyPort(), authToken: forwardProxy.authToken, }; const nodeConnection = new NodeConnection({ @@ -146,8 +146,8 @@ class NodeConnection { this.logger.info(`Starting ${this.constructor.name}`); // 1. Get the egress port of the fwdProxy (used for hole punching) const egressAddress = networkUtils.buildAddress( - this.fwdProxy.egressHost, - this.fwdProxy.egressPort, + this.fwdProxy.getEgressHost(), + this.fwdProxy.getEgressPort(), ); // Also need to sign this for authentication (i.e. from expected source) const signature = await this.keyManager.signWithRootKeyPair( diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index f00994697..9c643f046 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -42,13 +42,11 @@ class NodeManager { protected db: DB; protected logger: Logger; protected lock: Mutex = new Mutex(); - protected nodeGraph: NodeGraph; protected sigchain: Sigchain; protected keyManager: KeyManager; protected fwdProxy: ForwardProxy; protected revProxy: ReverseProxy; - // Active connections to other nodes protected connections: NodeConnectionMap = new Map(); // Node ID -> node address mappings for the seed nodes @@ -125,20 +123,26 @@ class NodeManager { }: { fresh?: boolean; } = {}) { - this.logger.info(`Starting ${this.constructor.name}`); - // Instantiate the node graph (containing Kademlia implementation) - this.nodeGraph = await NodeGraph.createNodeGraph({ - db: this.db, - nodeManager: this, - logger: this.logger, - fresh, - }); - // Add the seed nodes to the NodeGraph - for (const id in this.seedNodes) { - const seedNodeId = id as NodeId; - await this.nodeGraph.setNode(seedNodeId, this.seedNodes[seedNodeId]); + try { + this.logger.info(`Starting ${this.constructor.name}`); + // Instantiate the node graph (containing Kademlia implementation) + this.nodeGraph = await NodeGraph.createNodeGraph({ + db: this.db, + nodeManager: this, + logger: this.logger, + fresh, + }); + // Add the seed nodes to the NodeGraph + for (const id in this.seedNodes) { + const seedNodeId = id as NodeId; + await this.nodeGraph.setNode(seedNodeId, this.seedNodes[seedNodeId]); + } + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.nodeGraph.stop(); + throw e; } - this.logger.info(`Started ${this.constructor.name}`); } public async stop() { diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index fa5375154..ea1c11386 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -16,7 +16,7 @@ interface IAgentServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsPermisssionsCheck extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsPermisssionsCheck"; +interface IAgentServiceService_IVaultsPermissionsCheck extends grpc.MethodDefinition { + path: "/polykey.v1.AgentService/VaultsPermissionsCheck"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -132,7 +132,7 @@ export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; vaultsScan: grpc.handleServerStreamingCall; - vaultsPermisssionsCheck: grpc.handleUnaryCall; + vaultsPermissionsCheck: grpc.handleUnaryCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; @@ -152,9 +152,9 @@ export interface IAgentServiceClient { vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -186,9 +186,9 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index ad4682787..782ed2f8e 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -209,8 +209,8 @@ vaultsGitInfoGet: { responseSerialize: serialize_polykey_v1_vaults_Vault, responseDeserialize: deserialize_polykey_v1_vaults_Vault, }, - vaultsPermisssionsCheck: { - path: '/polykey.v1.AgentService/VaultsPermisssionsCheck', + vaultsPermissionsCheck: { + path: '/polykey.v1.AgentService/VaultsPermissionsCheck', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.NodePermission, diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index e666e9903..2de9a871d 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -19,10 +19,10 @@ import * as polykey_v1_vaults_vaults_pb from "../../polykey/v1/vaults/vaults_pb" import * as polykey_v1_utils_utils_pb from "../../polykey/v1/utils/utils_pb"; interface IClientServiceService extends grpc.ServiceDefinition { + agentLockAll: IClientServiceService_IAgentLockAll; agentStatus: IClientServiceService_IAgentStatus; agentStop: IClientServiceService_IAgentStop; - sessionsUnlock: IClientServiceService_ISessionsUnlock; - sessionsLockAll: IClientServiceService_ISessionsLockAll; + agentUnlock: IClientServiceService_IAgentUnlock; nodesAdd: IClientServiceService_INodesAdd; nodesPing: IClientServiceService_INodesPing; nodesClaim: IClientServiceService_INodesClaim; @@ -82,6 +82,15 @@ interface IClientServiceService extends grpc.ServiceDefinition { + path: "/polykey.v1.ClientService/AgentLockAll"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IAgentStatus extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/AgentStatus"; requestStream: false; @@ -100,17 +109,8 @@ interface IClientServiceService_IAgentStop extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_ISessionsUnlock extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/SessionsUnlock"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_ISessionsLockAll extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/SessionsLockAll"; +interface IClientServiceService_IAgentUnlock extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/AgentUnlock"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -635,10 +635,10 @@ interface IClientServiceService_INotificationsClear extends grpc.MethodDefinitio export const ClientServiceService: IClientServiceService; export interface IClientServiceServer extends grpc.UntypedServiceImplementation { + agentLockAll: grpc.handleUnaryCall; agentStatus: grpc.handleUnaryCall; agentStop: grpc.handleUnaryCall; - sessionsUnlock: grpc.handleUnaryCall; - sessionsLockAll: grpc.handleUnaryCall; + agentUnlock: grpc.handleUnaryCall; nodesAdd: grpc.handleUnaryCall; nodesPing: grpc.handleUnaryCall; nodesClaim: grpc.handleUnaryCall; @@ -699,18 +699,18 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation } export interface IClientServiceClient { + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; @@ -877,18 +877,18 @@ export interface IClientServiceClient { export class ClientServiceClient extends grpc.Client implements IClientServiceClient { constructor(address: string, credentials: grpc.ChannelCredentials, options?: Partial); + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStatus(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_agent_agent_pb.InfoMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public agentStop(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; - public sessionsLockAll(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; + public agentUnlock(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesAdd(request: polykey_v1_nodes_nodes_pb.NodeAddress, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 28e446dc1..7ab4f8f36 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -479,31 +479,30 @@ function deserialize_polykey_v1_vaults_VersionResult(buffer_arg) { var ClientServiceService = exports.ClientServiceService = { // Agent -agentStatus: { - path: '/polykey.v1.ClientService/AgentStatus', +agentLockAll: { + path: '/polykey.v1.ClientService/AgentLockAll', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, - responseType: polykey_v1_agent_agent_pb.InfoMessage, + responseType: polykey_v1_utils_utils_pb.EmptyMessage, requestSerialize: serialize_polykey_v1_utils_EmptyMessage, requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, - responseSerialize: serialize_polykey_v1_agent_InfoMessage, - responseDeserialize: deserialize_polykey_v1_agent_InfoMessage, + responseSerialize: serialize_polykey_v1_utils_EmptyMessage, + responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, }, - agentStop: { - path: '/polykey.v1.ClientService/AgentStop', + agentStatus: { + path: '/polykey.v1.ClientService/AgentStatus', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, - responseType: polykey_v1_utils_utils_pb.EmptyMessage, + responseType: polykey_v1_agent_agent_pb.InfoMessage, requestSerialize: serialize_polykey_v1_utils_EmptyMessage, requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, - responseSerialize: serialize_polykey_v1_utils_EmptyMessage, - responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, + responseSerialize: serialize_polykey_v1_agent_InfoMessage, + responseDeserialize: deserialize_polykey_v1_agent_InfoMessage, }, - // Session -sessionsUnlock: { - path: '/polykey.v1.ClientService/SessionsUnlock', + agentStop: { + path: '/polykey.v1.ClientService/AgentStop', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, @@ -513,8 +512,8 @@ sessionsUnlock: { responseSerialize: serialize_polykey_v1_utils_EmptyMessage, responseDeserialize: deserialize_polykey_v1_utils_EmptyMessage, }, - sessionsLockAll: { - path: '/polykey.v1.ClientService/SessionsLockAll', + agentUnlock: { + path: '/polykey.v1.ClientService/AgentUnlock', requestStream: false, responseStream: false, requestType: polykey_v1_utils_utils_pb.EmptyMessage, diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 5c27703ed..712ee5d8a 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -16,7 +16,7 @@ service AgentService { rpc VaultsGitInfoGet (polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.Vault); - rpc VaultsPermisssionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); + rpc VaultsPermissionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index 2f5d384fa..1edbd01d5 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -16,12 +16,10 @@ package polykey.v1; service ClientService { // Agent + rpc AgentLockAll (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); rpc AgentStatus(polykey.v1.utils.EmptyMessage) returns (polykey.v1.agent.InfoMessage); rpc AgentStop(polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); - - // Session - rpc SessionsUnlock (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); - rpc SessionsLockAll (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); + rpc AgentUnlock (polykey.v1.utils.EmptyMessage) returns (polykey.v1.utils.EmptyMessage); // Nodes rpc NodesAdd(polykey.v1.nodes.NodeAddress) returns (polykey.v1.utils.EmptyMessage); diff --git a/src/schema/Schema.ts b/src/schema/Schema.ts index 82b8da2c5..467139fa3 100644 --- a/src/schema/Schema.ts +++ b/src/schema/Schema.ts @@ -51,20 +51,23 @@ class Schema { public constructor({ statePath, - stateVersion, - lock, - fs, + stateVersion = config.stateVersion as StateVersion, + lock = new utils.RWLock(), + fs = require('fs'), logger, }: { statePath: string; - stateVersion: StateVersion; - lock: utils.RWLock; - fs: FileSystem; - logger: Logger; + stateVersion?: StateVersion; + lock?: utils.RWLock; + fs?: FileSystem; + logger?: Logger; }) { - this.logger = logger; + this.logger = logger ?? new Logger(this.constructor.name); this.statePath = statePath; - this.stateVersionPath = path.join(statePath, 'version'); + this.stateVersionPath = path.join( + statePath, + config.defaults.stateVersionBase, + ); this.stateVersion = stateVersion; this.lock = lock; this.fs = fs; diff --git a/src/sessions/Session.ts b/src/sessions/Session.ts index c4f53091a..47b5f8dbf 100644 --- a/src/sessions/Session.ts +++ b/src/sessions/Session.ts @@ -2,10 +2,7 @@ import type { SessionToken } from './types'; import type { FileSystem } from '../types'; import Logger from '@matrixai/logger'; -import { - CreateDestroyStartStop, - ready, -} from '@matrixai/async-init/dist/CreateDestroyStartStop'; +import { CreateDestroyStartStop } from '@matrixai/async-init/dist/CreateDestroyStartStop'; import lock from 'fd-lock'; import * as sessionErrors from './errors'; import * as utils from '../utils'; @@ -94,7 +91,6 @@ class Session { this.logger.info(`Destroyed ${this.constructor.name}`); } - @ready(new sessionErrors.ErrorSessionNotRunning()) public async readToken(): Promise { let sessionTokenFile; try { @@ -107,7 +103,7 @@ class Session { } const sessionTokenData = await sessionTokenFile.readFile('utf-8'); const sessionToken = sessionTokenData.trim(); - // WriteToken may create an empty session token file before it completes + // `writeToken` may create an empty session token file before it completes if (sessionToken === '') { return; } @@ -126,7 +122,6 @@ class Session { } } - @ready(new sessionErrors.ErrorSessionNotRunning()) public async writeToken(sessionToken: SessionToken): Promise { let sessionTokenFile; try { diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 0d179082e..220c8932c 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -220,7 +220,9 @@ class Sigchain { * Appends a claim (of any type) to the sigchain. */ @ready(new sigchainErrors.ErrorSigchainNotRunning()) - public async addClaim(claimData: ClaimData): Promise { + public async addClaim( + claimData: ClaimData, + ): Promise<[ClaimId, ClaimEncoded]> { return await this._transaction(async () => { const prevSequenceNumber = await this.getSequenceNumber(); const newSequenceNumber = prevSequenceNumber + 1; @@ -232,11 +234,12 @@ class Sigchain { }); // Add the claim to the sigchain database, and update the sequence number + const claimId = this.generateClaimId(); const ops: Array = [ { type: 'put', domain: this.sigchainClaimsDbDomain, - key: idUtils.toBuffer(this.generateClaimId()), + key: idUtils.toBuffer(claimId), value: claim, }, { @@ -247,7 +250,7 @@ class Sigchain { }, ]; await this.db.batch(ops); - return claim; + return [claimId, claim]; }); } @@ -423,21 +426,6 @@ class Sigchain { }); } - @ready(new sigchainErrors.ErrorSigchainNotRunning()) - public async getLatestClaimId(): Promise { - return await this._transaction(async () => { - let latestId: ClaimId | undefined; - const keyStream = this.sigchainClaimsDb.createKeyStream({ - limit: 1, - reverse: true, - }); - for await (const o of keyStream) { - latestId = o as any as ClaimId; - } - return latestId; - }); - } - @ready(new sigchainErrors.ErrorSigchainNotRunning()) public async getSeqMap(): Promise> { const map: Record = {}; @@ -462,6 +450,20 @@ class Sigchain { ); }); } + + protected async getLatestClaimId(): Promise { + return await this._transaction(async () => { + let latestId: ClaimId | undefined; + const keyStream = this.sigchainClaimsDb.createKeyStream({ + limit: 1, + reverse: true, + }); + for await (const o of keyStream) { + latestId = o as any as ClaimId; + } + return latestId; + }); + } } export default Sigchain; diff --git a/src/status/Status.ts b/src/status/Status.ts index 96d7aa1a9..2b1dda4e4 100644 --- a/src/status/Status.ts +++ b/src/status/Status.ts @@ -11,56 +11,60 @@ import lock from 'fd-lock'; import { StartStop, ready } from '@matrixai/async-init/dist/StartStop'; import * as statusErrors from './errors'; import * as statusUtils from './utils'; -import { poll } from '../utils'; +import { sleep, poll } from '../utils'; interface Status extends StartStop {} @StartStop() class Status { public readonly statusPath: string; + public readonly statusLockPath: string; protected logger: Logger; protected fs: FileSystem; - protected statusFile: FileHandle; + protected statusLockFile: FileHandle; public constructor({ statusPath, + statusLockPath, fs = require('fs'), logger, }: { statusPath: string; + statusLockPath: string; fs?: FileSystem; logger?: Logger; }) { this.logger = logger ?? new Logger(this.constructor.name); this.statusPath = statusPath; + this.statusLockPath = statusLockPath; this.fs = fs; } public async start(data: StatusStarting['data']): Promise { this.logger.info(`Starting ${this.constructor.name}`); - const statusFile = await this.fs.promises.open( - this.statusPath, + const statusLockFile = await this.fs.promises.open( + this.statusLockPath, this.fs.constants.O_WRONLY | this.fs.constants.O_CREAT, ); - if (!lock(statusFile.fd)) { - await statusFile.close(); + if (!lock(statusLockFile.fd)) { + await statusLockFile.close(); throw new statusErrors.ErrorStatusLocked(); } - this.statusFile = statusFile; + this.statusLockFile = statusLockFile; try { await this.writeStatus({ status: 'STARTING', data, }); } catch (e) { - lock.unlock(this.statusFile.fd); - await this.statusFile.close(); + lock.unlock(this.statusLockFile.fd); + await this.statusLockFile.close(); throw e; } this.logger.info(`${this.constructor.name} is STARTING`); } - @ready(new statusErrors.ErrorStatusNotRunning()) + @ready(new statusErrors.ErrorStatusNotRunning(), true) public async finishStart(data: StatusLive['data']): Promise { this.logger.info(`Finish ${this.constructor.name} STARTING`); await this.writeStatus({ @@ -70,7 +74,7 @@ class Status { this.logger.info(`${this.constructor.name} is LIVE`); } - @ready(new statusErrors.ErrorStatusNotRunning()) + @ready(new statusErrors.ErrorStatusNotRunning(), true) public async beginStop(data: StatusStopping['data']): Promise { this.logger.info(`Begin ${this.constructor.name} STOPPING`); await this.writeStatus({ @@ -86,8 +90,9 @@ class Status { status: 'DEAD', data, }); - lock.unlock(this.statusFile.fd); - await this.statusFile.close(); + lock.unlock(this.statusLockFile.fd); + await this.statusLockFile.close(); + await this.fs.promises.rm(this.statusLockPath, { force: true }); this.logger.info(`${this.constructor.name} is DEAD`); } @@ -96,50 +101,97 @@ class Status { * This can be used without running Status */ public async readStatus(): Promise { - let statusData: string; + let statusFile; try { - statusData = await this.fs.promises.readFile(this.statusPath, 'utf-8'); - } catch (e) { - if (e.code === 'ENOENT') { + try { + statusFile = await this.fs.promises.open(this.statusPath, 'r'); + } catch (e) { + if (e.code === 'ENOENT') { + return; + } + throw new statusErrors.ErrorStatusRead(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + while (!lock(statusFile.fd)) { + await sleep(2); + } + let statusData; + try { + statusData = (await statusFile.readFile('utf-8')).trim(); + } catch (e) { + throw new statusErrors.ErrorStatusRead(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + // `writeStatus` may create an empty status file before it completes + if (statusData === '') { return; } - throw new statusErrors.ErrorStatusRead(e.message, { - errno: e.errno, - syscall: e.syscall, - code: e.code, - path: e.path, - }); - } - let statusInfo; - try { - statusInfo = JSON.parse(statusData); - } catch (e) { - throw new statusErrors.ErrorStatusParse('JSON parsing failed'); - } - if (!statusUtils.statusValidate(statusInfo)) { - throw new statusErrors.ErrorStatusParse('StatusInfo validation failed', { - errors: statusUtils.statusValidate.errors, - }); + let statusInfo; + try { + statusInfo = JSON.parse(statusData); + } catch (e) { + throw new statusErrors.ErrorStatusParse('JSON parsing failed'); + } + if (!statusUtils.statusValidate(statusInfo)) { + throw new statusErrors.ErrorStatusParse( + 'StatusInfo validation failed', + { + errors: statusUtils.statusValidate.errors, + }, + ); + } + return statusInfo as StatusInfo; + } finally { + if (statusFile != null) { + lock.unlock(statusFile.fd); + await statusFile.close(); + } } - return statusInfo as StatusInfo; } protected async writeStatus(statusInfo: StatusInfo): Promise { this.logger.info(`Writing Status file to ${this.statusPath}`); + let statusFile; try { - await this.statusFile.truncate(); - await this.statusFile.write( - JSON.stringify(statusInfo, undefined, 2) + '\n', - 0, - 'utf-8', + // Cannot use 'w', it truncates immediately + // should truncate only while holding the lock + statusFile = await this.fs.promises.open( + this.statusPath, + this.fs.constants.O_WRONLY | this.fs.constants.O_CREAT, ); - } catch (e) { - throw new statusErrors.ErrorStatusWrite(e.message, { - errno: e.errno, - syscall: e.syscall, - code: e.code, - path: e.path, - }); + while (!lock(statusFile.fd)) { + // Write sleep should be half of read sleep + // this ensures write-preferring locking + await sleep(1); + } + try { + await statusFile.truncate(); + await statusFile.write( + JSON.stringify(statusInfo, undefined, 2) + '\n', + 0, + 'utf-8', + ); + } catch (e) { + throw new statusErrors.ErrorStatusWrite(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + } finally { + if (statusFile != null) { + lock.unlock(statusFile.fd); + await statusFile.close(); + } } } @@ -164,7 +216,7 @@ class Status { if (statusInfo?.status === status) return true; return false; }, - 250, + 50, timeout, ); if (statusInfo == null) { diff --git a/src/status/errors.ts b/src/status/errors.ts index 216958b42..7de8a51d3 100644 --- a/src/status/errors.ts +++ b/src/status/errors.ts @@ -24,6 +24,11 @@ class ErrorStatusParse extends ErrorStatus { exitCode = sysexits.CONFIG; } +class ErrorStatusTimeout extends ErrorStatus { + description = 'Poll timed out'; + exitCode = sysexits.TEMPFAIL; +} + export { ErrorStatus, ErrorStatusNotRunning, @@ -31,4 +36,5 @@ export { ErrorStatusRead, ErrorStatusWrite, ErrorStatusParse, + ErrorStatusTimeout, }; diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 0c86446ec..dcd979393 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -140,30 +140,36 @@ class VaultManager { public async start({ fresh = false, }: { fresh?: boolean } = {}): Promise { - this.logger.info(`Starting ${this.constructor.name}`); - this.vaultsDbDomain = 'VaultManager'; - this.vaultsDb = await this.db.level(this.vaultsDbDomain); - this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; - this.vaultsNamesDb = await this.db.level( - this.vaultsNamesDbDomain[1], - this.vaultsDb, - ); - if (fresh) { - await this.vaultsDb.clear(); - await this.fs.promises.rm(this.vaultsPath, { - force: true, - recursive: true, + try { + this.logger.info(`Starting ${this.constructor.name}`); + this.vaultsDbDomain = 'VaultManager'; + this.vaultsDb = await this.db.level(this.vaultsDbDomain); + this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; + this.vaultsNamesDb = await this.db.level( + this.vaultsNamesDbDomain[1], + this.vaultsDb, + ); + if (fresh) { + await this.vaultsDb.clear(); + await this.fs.promises.rm(this.vaultsPath, { + force: true, + recursive: true, + }); + this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + } + await utils.mkdirExists(this.fs, this.vaultsPath); + this.efs = await EncryptedFS.createEncryptedFS({ + dbPath: this.vaultsPath, + dbKey: this.vaultsKey, + logger: this.logger, }); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + await this.efs.start(); + this.logger.info(`Started ${this.constructor.name}`); + } catch (e) { + this.logger.warn(`Failed Starting ${this.constructor.name}`); + await this.efs.stop(); + throw e; } - await utils.mkdirExists(this.fs, this.vaultsPath); - this.efs = await EncryptedFS.createEncryptedFS({ - dbPath: this.vaultsPath, - dbKey: this.vaultsKey, - logger: this.logger, - }); - await this.efs.start(); - this.logger.info(`Started ${this.constructor.name}`); } public async stop(): Promise { diff --git a/src/workers/index.ts b/src/workers/index.ts index a6cf25ab7..7ff9ca454 100644 --- a/src/workers/index.ts +++ b/src/workers/index.ts @@ -1,5 +1,5 @@ +export { WorkerManager } from '@matrixai/workers'; export { default as polykeyWorker } from './polykeyWorkerModule'; export * as utils from './utils'; - export type { PolykeyWorkerModule } from './polykeyWorkerModule'; export type { PolykeyWorkerManagerInterface } from './types'; diff --git a/tests/PolykeyAgent.test.ts b/tests/PolykeyAgent.test.ts index 5fd643a08..9423050ab 100644 --- a/tests/PolykeyAgent.test.ts +++ b/tests/PolykeyAgent.test.ts @@ -1,190 +1,178 @@ +import type { StateVersion } from '@/schema/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import config from '@/config'; +import { utils as keysUtils } from '@/keys'; import { Status } from '@/status'; -import * as schemaErrors from '@/schema/errors'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import { Schema } from '@/schema'; +import * as errors from '@/errors'; +import config from '@/config'; +import * as testUtils from './utils'; -describe('Polykey', () => { +describe('PolykeyAgent', () => { const password = 'password'; const logger = new Logger('PolykeyAgent Test', LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let dataDir: string; - let pk: PolykeyAgent; beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); }); afterEach(async () => { - if (pk != null) { - await pk.stop(); - await pk.destroy(); - } await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test( - 'Able to construct', - async () => { - const nodePath = path.join(dataDir, 'polykey'); - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - expect(pk).toBeInstanceOf(PolykeyAgent); - }, - global.polykeyStartupTimeout, - ); - test( - 'async start constructs node path', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - const nodePathContents = await fs.promises.readdir( - path.join(nodePath, 'state'), - ); - expect(nodePathContents).toContain('keys'); - expect(nodePathContents).toContain('vaults'); - expect(nodePathContents).toContain('db'); - await pk.stop(); - }, - global.polykeyStartupTimeout, - ); - test( - 'async stop leaves the node path', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - await pk.stop(); - const nodePathContents = await fs.promises.readdir( - path.join(nodePath, 'state'), - ); - expect(nodePathContents).toContain('keys'); - expect(nodePathContents).toContain('db'); - expect(nodePathContents).toContain('vaults'); - }, - global.polykeyStartupTimeout, - ); - test( - 'able to async start after async stop', - async () => { - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - await pk.stop(); - await expect(pk.start({ password })).resolves.not.toThrowError(); - }, - global.polykeyStartupTimeout * 2, - ); - test('GithubProvider is registered', async () => { - const providerId = 'github.com'; + test('PolykeyAgent readiness', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + await expect(pkAgent.destroy()).rejects.toThrow( + errors.ErrorPolykeyAgentRunning, + ); + // Should be a noop + await pkAgent.start({ password }); + await pkAgent.stop(); + await pkAgent.destroy(); + await expect(pkAgent.start({ password })).rejects.toThrow( + errors.ErrorPolykeyAgentDestroyed, + ); + }); + test('start creates, stop leaves, and destroy destroys the node path', async () => { const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ + const pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, logger, }); - const providers = pk.identitiesManager.getProviders(); - // Exists - expect(providers[providerId]).toBeTruthy(); - // Matches clientID in config. - expect(providers[providerId].clientId).toEqual( - config.providers[providerId].clientId, + let nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + let stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase), + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + await pkAgent.stop(); + nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase), ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + await pkAgent.destroy(); + nodePathContents = await fs.promises.readdir(nodePath); + // The status will be the only file left over + expect(nodePathContents).toHaveLength(1); + expect(nodePathContents).toContain(config.defaults.statusBase); }); - test( - 'throw error if state version does not match config', - async () => { - // Creating an old version file. - const nodePath = path.join(dataDir, 'polykey'); - const versionFilePath = path.join(nodePath, 'state', 'version'); - const versionInfo = { ...config }; // Cheeky clone - versionInfo.stateVersion = config.stateVersion + 1; - const versionInfoString = JSON.stringify(versionInfo); - await fs.promises.mkdir(path.join(nodePath, 'state'), { - recursive: true, - }); - await fs.promises.writeFile(versionFilePath, versionInfoString); - - // Attempt to start a polykeyAgent. - await expect(async () => { - pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - }).rejects.toThrow(schemaErrors.ErrorSchemaVersionParse); - }, - global.polykeyStartupTimeout, - ); - test( - 'Creates the version file when starting Polykey', - async () => { - // Creating an old version file. - const nodePath = path.join(dataDir, 'polykey'); - const versionFilePath = path.join(nodePath, 'state', 'version'); - - // Attempt to start a polykeyAgent. - pk = await PolykeyAgent.createPolykeyAgent({ + test('start after stop', async () => { + const nodePath = `${dataDir}/polykey`; + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + await pkAgent.stop(); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); + await expect(pkAgent.start({ password })).resolves.not.toThrowError(); + expect(await status.readStatus()).toMatchObject({ status: 'LIVE' }); + await pkAgent.stop(); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); + await expect( + pkAgent.start({ password: 'wrong password' }), + ).rejects.toThrowError(errors.ErrorRootKeysParse); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); + await pkAgent.destroy(); + expect(await status.readStatus()).toMatchObject({ status: 'DEAD' }); + }); + test('schema state version is maintained after start and stop', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const statePath = path.join(nodePath, config.defaults.stateBase); + const schema = new Schema({ + statePath, + }); + const pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + expect(await schema.readVersion()).toBe(config.stateVersion); + await pkAgent.stop(); + // Still exists after being stopped + expect(await schema.readVersion()).toBe(config.stateVersion); + }); + test('cannot start during state version mismatch', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const statePath = path.join(nodePath, config.defaults.stateBase); + await fs.promises.mkdir(nodePath); + let schema = await Schema.createSchema({ + statePath, + stateVersion: (config.stateVersion + 1) as StateVersion, + logger, + fresh: true, + }); + await schema.stop(); + await expect( + PolykeyAgent.createPolykeyAgent({ password, nodePath, logger, - }); - await pk.stop(); - - const versionFileContents = await fs.promises.readFile(versionFilePath); - const versionInfo = JSON.parse(versionFileContents.toString()); - expect(versionInfo).toStrictEqual(config.stateVersion); - }, - global.polykeyStartupTimeout, - ); - test( - 'Stopping and destroying properly stops Polykey', - async () => { - // Starting. - const nodePath = `${dataDir}/polykey`; - pk = await PolykeyAgent.createPolykeyAgent({ + }), + ).rejects.toThrow(errors.ErrorSchemaVersionTooNew); + // The 0 version will always be too old + // Because we started our PK's state version as 1 + schema = await Schema.createSchema({ + statePath, + stateVersion: 0 as StateVersion, + logger, + fresh: true, + }); + await schema.stop(); + await expect( + PolykeyAgent.createPolykeyAgent({ password, nodePath, logger, - }); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - await pk.stop(); - await status.waitFor('DEAD', 2000); - await pk.destroy(); - await status.waitFor('DEAD', 2000); - }, - global.polykeyStartupTimeout * 2, - ); + }), + ).rejects.toThrow(errors.ErrorSchemaVersionTooOld); + }); }); diff --git a/tests/PolykeyClient.test.ts b/tests/PolykeyClient.test.ts index 8f97b80aa..c4c3a8ad3 100644 --- a/tests/PolykeyClient.test.ts +++ b/tests/PolykeyClient.test.ts @@ -5,26 +5,28 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyClient, PolykeyAgent } from '@'; import { Session } from '@/sessions'; -import * as keysUtils from '@/keys/utils'; +import { utils as keysUtils } from '@/keys'; import config from '@/config'; import * as testUtils from './utils'; -jest - .spyOn(keysUtils, 'generateKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); -jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockImplementation(testUtils.getGlobalKeyPair); - describe('PolykeyClient', () => { const password = 'password'; const logger = new Logger('PolykeyClient Test', LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let dataDir: string; let nodePath: string; let pkAgent: PolykeyAgent; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -41,6 +43,8 @@ describe('PolykeyClient', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('create PolykeyClient and connect to PolykeyAgent', async () => { const pkClient = await PolykeyClient.createPolykeyClient({ diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index 117babd30..c40e11e53 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -9,12 +9,11 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { utils as idUtils } from '@matrixai/id'; import { ACL, errors as aclErrors } from '@/acl'; -import { makeVaultId } from '@/vaults/utils'; -import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; +import { utils as keysUtils } from '@/keys'; +import { utils as vaultsUtils } from '@/vaults'; -describe('ACL', () => { - const logger = new Logger(`${ACL.name} Test`, LogLevel.WARN, [ +describe(ACL.name, () => { + const logger = new Logger(`${ACL.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); let dataDir: string; @@ -23,7 +22,6 @@ describe('ACL', () => { let vaultId2: VaultId; let vaultId3: VaultId; let vaultId4: VaultId; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -33,12 +31,18 @@ describe('ACL', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(dbKey), + crypto: { + key: dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); - vaultId1 = makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); - vaultId2 = makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); - vaultId3 = makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); - vaultId4 = makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); + vaultId1 = vaultsUtils.makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); + vaultId2 = vaultsUtils.makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); + vaultId3 = vaultsUtils.makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); + vaultId4 = vaultsUtils.makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); }); afterEach(async () => { await db.stop(); diff --git a/tests/acl/utils.test.ts b/tests/acl/utils.test.ts index 928ab2a72..c001ac695 100644 --- a/tests/acl/utils.test.ts +++ b/tests/acl/utils.test.ts @@ -1,6 +1,6 @@ import * as aclUtils from '@/acl/utils'; -describe('utils', () => { +describe('acl/utils', () => { test('merging permissions', async () => { const perm1 = { gestalt: { diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 22295ec4d..a2a68c27c 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -2,14 +2,13 @@ import type * as grpc from '@grpc/grpc-js'; import type { NodeAddress, NodeInfo } from '@/nodes/types'; import type { ClaimIdString, ClaimIntermediary } from '@/claims/types'; import type { Host, Port, TLSConfig } from '@/network/types'; -import type { GRPCClientAgent } from '@/agent'; import fs from 'fs'; import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Mutex } from 'async-mutex'; import { DB } from '@matrixai/db'; - +import { GRPCClientAgent } from '@/agent'; import { KeyManager } from '@/keys'; import { NodeManager } from '@/nodes'; import { VaultManager } from '@/vaults'; @@ -17,43 +16,49 @@ import { Sigchain } from '@/sigchain'; import { ACL } from '@/acl'; import { GestaltGraph } from '@/gestalts'; import { errors as agentErrors } from '@/agent'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; import { ForwardProxy, ReverseProxy } from '@/network'; import { NotificationsManager } from '@/notifications'; import { utils as claimsUtils, errors as claimsErrors } from '@/claims'; import { makeNodeId } from '@/nodes/utils'; -import * as testUtils from './utils'; +import * as keysUtils from '@/keys/utils'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; +import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; +import * as testAgentUtils from './utils'; +import * as testUtils from '../utils'; import TestNodeConnection from '../nodes/TestNodeConnection'; -import { makeCrypto } from '../utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); -describe('GRPC agent', () => { +describe(GRPCClientAgent.name, () => { const password = 'password'; - const logger = new Logger('AgentServerTest', LogLevel.WARN, [ + const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); const node1: NodeInfo = { id: makeNodeId('v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug'), chain: {}, }; - + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let client: GRPCClientAgent; let server: grpc.Server; let port: number; - let dataDir: string; let keysPath: string; let vaultsPath: string; let dbPath: string; - let keyManager: KeyManager; let vaultManager: VaultManager; let nodeManager: NodeManager; @@ -62,10 +67,8 @@ describe('GRPC agent', () => { let gestaltGraph: GestaltGraph; let db: DB; let notificationsManager: NotificationsManager; - let fwdProxy: ForwardProxy; let revProxy: ReverseProxy; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -73,14 +76,12 @@ describe('GRPC agent', () => { keysPath = path.join(dataDir, 'keys'); vaultsPath = path.join(dataDir, 'vaults'); dbPath = path.join(dataDir, 'db'); - keyManager = await KeyManager.createKeyManager({ password, keysPath, fs: fs, logger: logger, }); - const tlsConfig: TLSConfig = { keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -92,35 +93,35 @@ describe('GRPC agent', () => { await fwdProxy.start({ tlsConfig, }); - revProxy = new ReverseProxy({ logger: logger, }); - db = await DB.createDB({ dbPath: dbPath, fs: fs, logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); - acl = await ACL.createACL({ db: db, logger: logger, }); - gestaltGraph = await GestaltGraph.createGestaltGraph({ db: db, acl: acl, logger: logger, }); - sigchain = await Sigchain.createSigchain({ keyManager: keyManager, db: db, logger: logger, }); - nodeManager = await NodeManager.createNodeManager({ db: db, sigchain: sigchain, @@ -129,7 +130,6 @@ describe('GRPC agent', () => { revProxy: revProxy, logger: logger, }); - notificationsManager = await NotificationsManager.createNotificationsManager({ acl: acl, @@ -139,7 +139,6 @@ describe('GRPC agent', () => { messageCap: 5, logger: logger, }); - vaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath: vaultsPath, @@ -151,21 +150,19 @@ describe('GRPC agent', () => { fs: fs, logger: logger, }); - await nodeManager.start(); - [server, port] = await testUtils.openTestAgentServer({ + [server, port] = await testAgentUtils.openTestAgentServer({ keyManager, vaultManager, nodeManager, sigchain, notificationsManager, }); - client = await testUtils.openTestAgentClient(port); + client = await testAgentUtils.openTestAgentClient(port); }, global.polykeyStartupTimeout); afterEach(async () => { - await testUtils.closeTestAgentClient(client); - await testUtils.closeTestAgentServer(server); - + await testAgentUtils.closeTestAgentClient(client); + await testAgentUtils.closeTestAgentServer(server); await vaultManager.stop(); await notificationsManager.stop(); await sigchain.stop(); @@ -175,13 +172,11 @@ describe('GRPC agent', () => { await fwdProxy.stop(); await db.stop(); await keyManager.stop(); - await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test('GRPCClientAgent readiness', async () => { await client.destroy(); await expect(async () => { @@ -204,10 +199,10 @@ describe('GRPC agent', () => { const vaultPermMessage = new vaultsPB.NodePermission(); vaultPermMessage.setNodeId(node1.id); // VaultPermMessage.setVaultId(vault.vaultId); - const response = await client.vaultsPermisssionsCheck(vaultPermMessage); + const response = await client.vaultsPermissionsCheck(vaultPermMessage); expect(response.getPermission()).toBeFalsy(); // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = await client.vaultsPermisssionsCheck(vaultPermMessage); + const response2 = await client.vaultsPermissionsCheck(vaultPermMessage); expect(response2.getPermission()).toBeTruthy(); // Await vaultManager.deleteVault(vault.vaultId); }); diff --git a/tests/bin/agent/lock.test.ts b/tests/bin/agent/lock.test.ts index 74475b138..eb43b122b 100644 --- a/tests/bin/agent/lock.test.ts +++ b/tests/bin/agent/lock.test.ts @@ -7,21 +7,22 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Session } from '@/sessions'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); describe('lock', () => { const logger = new Logger('lock test', LogLevel.WARN, [new StreamHandler()]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -39,24 +40,21 @@ describe('lock', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const { exitCode } = await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); const session = await Session.createSession({ - sessionTokenPath: path.join( - global.binAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); @@ -64,7 +62,7 @@ describe('lock', () => { await session.stop(); }); test('lock ensures reauthentication is required', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; mockedPrompts.mockClear(); mockedPrompts.mockImplementation(async (_opts: any) => { return { password }; @@ -72,26 +70,26 @@ describe('lock', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); // Session token is deleted await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Will prompt to reauthenticate await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Prompted for password 1 time expect(mockedPrompts.mock.calls.length).toBe(1); diff --git a/tests/bin/agent/lockall.test.ts b/tests/bin/agent/lockall.test.ts index e6a5a3814..3c93cc1f8 100644 --- a/tests/bin/agent/lockall.test.ts +++ b/tests/bin/agent/lockall.test.ts @@ -8,6 +8,7 @@ import { Session } from '@/sessions'; import config from '@/config'; import * as clientErrors from '@/client/errors'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; /** * Mock prompts module which is used prompt for password @@ -19,12 +20,15 @@ describe('lockall', () => { const logger = new Logger('lockall test', LogLevel.WARN, [ new StreamHandler(), ]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -42,24 +46,21 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const { exitCode } = await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); const session = await Session.createSession({ - sessionTokenPath: path.join( - global.binAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); @@ -67,21 +68,21 @@ describe('lockall', () => { await session.stop(); }); test('lockall ensures reauthentication is required', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Token is deleted, reauthentication is required mockedPrompts.mockClear(); @@ -91,9 +92,9 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); // Prompted for password 1 time expect(mockedPrompts.mock.calls.length).toBe(1); @@ -103,16 +104,13 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); const session = await Session.createSession({ - sessionTokenPath: path.join( - global.binAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); @@ -121,19 +119,19 @@ describe('lockall', () => { await testBinUtils.pkStdio( ['agent', 'lockall'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); // Old token is invalid const { exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_TOKEN: token, }, - global.binAgentDir, + globalAgentDir, ); testBinUtils.expectProcessError( exitCode, diff --git a/tests/bin/agent/start.test.ts b/tests/bin/agent/start.test.ts index 9a4ffd272..e9870b204 100644 --- a/tests/bin/agent/start.test.ts +++ b/tests/bin/agent/start.test.ts @@ -3,11 +3,13 @@ import os from 'os'; import path from 'path'; import fs from 'fs'; import readline from 'readline'; +import * as jestMockProps from 'jest-mock-props'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { PolykeyAgent } from '@'; import { Status, errors as statusErrors } from '@/status'; import config from '@/config'; -import * as nodesUtils from '@/nodes/utils'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('start', () => { const logger = new Logger('start test', LogLevel.WARN, [new StreamHandler()]); @@ -35,6 +37,8 @@ describe('start', () => { path.join(dataDir, 'polykey'), '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -62,6 +66,11 @@ describe('start', () => { // Check for graceful exit const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -89,6 +98,8 @@ describe('start', () => { path.join(dataDir, 'out.log'), '--background-err-file', path.join(dataDir, 'err.log'), + '--workers', + '0', '--verbose', ], { @@ -132,6 +143,11 @@ describe('start', () => { expect(polykeyAgentOut).toHaveLength(0); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -146,13 +162,21 @@ describe('start', () => { global.defaultTimeout * 2, ); test( - 'concurrent starts are coalesced', + 'concurrent starts results in 1 success', async () => { const password = 'abc123'; // One of these processes is blocked const [agentProcess1, agentProcess2] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -161,7 +185,15 @@ describe('start', () => { logger.getChild('agentProcess1'), ), testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -220,13 +252,21 @@ describe('start', () => { global.defaultTimeout * 2, ); test( - 'concurrent bootstrap is coalesced', + 'concurrent with bootstrap results in 1 success', async () => { const password = 'abc123'; // One of these processes is blocked const [agentProcess, bootstrapProcess] = await Promise.all([ testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -235,7 +275,7 @@ describe('start', () => { logger.getChild('agentProcess'), ), testBinUtils.pkSpawn( - ['bootstrap', '--root-key-pair-bits', '1024', '--verbose'], + ['bootstrap', '--fresh', '--root-key-pair-bits', '1024', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -298,7 +338,15 @@ describe('start', () => { async () => { const password = 'abc123'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -318,7 +366,15 @@ describe('start', () => { expect(exitCode1).toBe(null); expect(signal1).toBe('SIGHUP'); const agentProcess2 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -328,6 +384,11 @@ describe('start', () => { ); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -349,7 +410,15 @@ describe('start', () => { async () => { const password = 'password'; const agentProcess1 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -383,6 +452,8 @@ describe('start', () => { 'start', '--root-key-pair-bits', '1024', + '--workers', + '0', '--fresh', '--verbose', ], @@ -410,6 +481,11 @@ describe('start', () => { // Check for graceful exit const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -425,6 +501,11 @@ describe('start', () => { const password2 = 'new password'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -436,6 +517,8 @@ describe('start', () => { path.join(dataDir, 'polykey'), '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -465,6 +548,8 @@ describe('start', () => { recoveryCodePath, '--root-key-pair-bits', '2048', + '--workers', + '0', '--verbose', ], { @@ -482,7 +567,7 @@ describe('start', () => { await testBinUtils.processExit(agentProcess2); // Check that the password has changed const agentProcess3 = await testBinUtils.pkSpawn( - ['agent', 'start', '--verbose'], + ['agent', 'start', '--workers', '0', '--verbose'], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password2, @@ -503,7 +588,15 @@ describe('start', () => { recursive: true, }); const agentProcess4 = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password2, @@ -526,6 +619,11 @@ describe('start', () => { async () => { const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -541,6 +639,8 @@ describe('start', () => { 'start', '--root-key-pair-bits', '1024', + '--workers', + '0', '--client-host', clientHost, '--client-port', @@ -570,245 +670,162 @@ describe('start', () => { }, global.defaultTimeout * 2, ); - describe('seed nodes', () => { - let seedNodeClose; - const connTimeoutTime = 500; - let seedNodeId; - const seedNodeHost = '127.0.0.1'; - let seedNodePort; - - const dummySeed1Id = nodesUtils.makeNodeId( - 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', - ); - const dummySeed1Host = '128.0.0.1'; - const dummySeed1Port = 1314; - const dummySeed2Id = nodesUtils.makeNodeId( - 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', - ); - const dummySeed2Host = '128.0.0.1'; - const dummySeed2Port = 1314; - + describe('start with global agent', () => { + let globalAgentStatus; + let globalAgentClose; + let agentDataDir; + let agent: PolykeyAgent; + let seedNodeId1; + let seedNodeHost1; + let seedNodePort1; + let seedNodeId2; + let seedNodeHost2; + let seedNodePort2; beforeAll(async () => { - seedNodeClose = await testBinUtils.pkAgent([ - '--connection-timeout', - connTimeoutTime.toString(), - '--ingress-host', - seedNodeHost, - ]); - const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), - fs, + ({ globalAgentStatus, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + // Additional seed node + agentDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + agent = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(agentDataDir, 'agent'), + keysConfig: { + rootKeyPairBits: 1024, + }, logger, }); - const statusInfo = await status.waitFor('LIVE', 5000); - // Get the dynamic seed node components - seedNodeId = statusInfo.data.nodeId; - seedNodePort = statusInfo.data.ingressPort; - }, global.maxTimeout); + seedNodeId1 = globalAgentStatus.data.nodeId; + seedNodeHost1 = globalAgentStatus.data.ingressHost; + seedNodePort1 = globalAgentStatus.data.ingressPort; + seedNodeId2 = agent.keyManager.getNodeId(); + seedNodeHost2 = agent.grpcServerAgent.host; + seedNodePort2 = agent.grpcServerAgent.port; + }, globalThis.maxTimeout); afterAll(async () => { - await seedNodeClose(); + await agent.stop(); + await globalAgentClose(); + await fs.promises.rm(agentDataDir, { + force: true, + recursive: true, + }); }); - test( - 'start with seed nodes as argument', + 'start with seed nodes option', async () => { const password = 'abc123'; - const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, password); const nodePath = path.join(dataDir, 'polykey'); - + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join( + nodePath, + config.defaults.statusLockBase, + ); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + const mockedConfigDefaultsNetwork = jestMockProps + .spyOnProp(config.defaults, 'network') + .mockValue({ + mainnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2, + }, + }, + testnet: {}, + }); await testBinUtils.pkStdio( [ 'agent', 'start', - '--node-path', - nodePath, - '--password-file', - passwordPath, '--root-key-pair-bits', '1024', + '--workers', + '0', '--seed-nodes', - `${seedNodeId}@${seedNodeHost}:${seedNodePort};${dummySeed1Id}@${dummySeed1Host}:${dummySeed1Port}`, - '--connection-timeout', - connTimeoutTime.toString(), + `${seedNodeId1}@${seedNodeHost1}:${seedNodePort1};`, + '--network', + 'mainnet', '--verbose', ], { - PK_SEED_NODES: `${dummySeed2Id}@${dummySeed2Host}:${dummySeed2Port}`, + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, }, dataDir, ); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - - // Check the seed nodes have been added to the node graph - const foundSeedNode = await testBinUtils.pkStdio([ - 'nodes', - 'find', - seedNodeId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundSeedNode.exitCode).toBe(0); - expect(foundSeedNode.stdout).toContain( - `Found node at ${seedNodeHost}:${seedNodePort}`, - ); - const foundDummy1 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed1Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundDummy1.exitCode).toBe(0); - expect(foundDummy1.stdout).toContain( - `Found node at ${dummySeed1Host}:${dummySeed1Port}`, - ); - // Check the seed node in the environment variable was superseded by the - // ones provided as CLI arguments - const notFoundDummy2 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed2Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(notFoundDummy2.exitCode).toBe(1); - expect(notFoundDummy2.stdout).toContain( - `Failed to find node ${dummySeed2Id}`, - ); await testBinUtils.pkStdio( - [ - 'agent', - 'stop', - '--node-path', - nodePath, - '--password-file', - passwordPath, - ], - undefined, + ['agent', 'stop'], + { + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + }, dataDir, ); - await status.waitFor('DEAD', 5000); + mockedConfigDefaultsNetwork.mockRestore(); + await status.waitFor('DEAD'); }, global.defaultTimeout * 2, ); - test( - 'start with seed nodes from environment variable and config file', + 'start with seed nodes environment variable', async () => { const password = 'abc123'; - const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, password); const nodePath = path.join(dataDir, 'polykey'); - + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join( + nodePath, + config.defaults.statusLockBase, + ); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + const mockedConfigDefaultsNetwork = jestMockProps + .spyOnProp(config.defaults, 'network') + .mockValue({ + mainnet: {}, + testnet: { + [seedNodeId2]: { + host: seedNodeHost2, + port: seedNodePort2, + }, + }, + }); await testBinUtils.pkStdio( [ 'agent', 'start', - '--node-path', - nodePath, - '--password-file', - passwordPath, '--root-key-pair-bits', '1024', - '--connection-timeout', - connTimeoutTime.toString(), + '--workers', + '0', '--verbose', ], { - PK_SEED_NODES: - `${seedNodeId}@${seedNodeHost}:${seedNodePort};` + - `${dummySeed1Id}@${dummySeed1Host}:${dummySeed1Port};` + - ``, + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + PK_SEED_NODES: `;${seedNodeId1}@${seedNodeHost1}:${seedNodePort1}`, + PK_NETWORK: 'testnet', }, dataDir, ); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 2000); - - // Check the seed nodes have been added to the node graph - const foundSeedNode = await testBinUtils.pkStdio([ - 'nodes', - 'find', - seedNodeId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundSeedNode.exitCode).toBe(0); - expect(foundSeedNode.stdout).toContain( - `Found node at ${seedNodeHost}:${seedNodePort}`, - ); - const foundDummy1 = await testBinUtils.pkStdio([ - 'nodes', - 'find', - dummySeed1Id, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundDummy1.exitCode).toBe(0); - expect(foundDummy1.stdout).toContain( - `Found node at ${dummySeed1Host}:${dummySeed1Port}`, - ); - // Check the seed node/s in config file were added from the flag - for (const configId in config.defaults.network.mainnet) { - const address = config.defaults.network.mainnet[configId]; - expect(address.host).toBeDefined(); - expect(address.port).toBeDefined(); - const foundConfig = await testBinUtils.pkStdio([ - 'nodes', - 'find', - configId, - '--node-path', - nodePath, - '--password-file', - passwordPath, - '--verbose', - ]); - expect(foundConfig.exitCode).toBe(0); - expect(foundConfig.stdout).toContain( - `Found node at ${address.host}:${address.port}`, - ); - } - await testBinUtils.pkStdio( - [ - 'agent', - 'stop', - '--node-path', - nodePath, - '--password-file', - passwordPath, - ], - undefined, + ['agent', 'stop'], + { + PK_NODE_PATH: nodePath, + PK_PASSWORD: password, + }, dataDir, ); - await status.waitFor('DEAD', 5000); + mockedConfigDefaultsNetwork.mockRestore(); + await status.waitFor('DEAD'); }, global.defaultTimeout * 2, ); diff --git a/tests/bin/agent/status.test.ts b/tests/bin/agent/status.test.ts index 4c31502e8..4181b1a16 100644 --- a/tests/bin/agent/status.test.ts +++ b/tests/bin/agent/status.test.ts @@ -6,6 +6,7 @@ import { Status } from '@/status'; import * as binErrors from '@/bin/errors'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('status', () => { const logger = new Logger('status test', LogLevel.WARN, [ @@ -30,11 +31,24 @@ describe('status', () => { const password = 'abc123'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); const agentProcess = await testBinUtils.pkSpawn( - ['agent', 'start', '--root-key-pair-bits', '1024', '--verbose'], + [ + 'agent', + 'start', + '--root-key-pair-bits', + '1024', + '--workers', + '0', + '--verbose', + ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), PK_PASSWORD: password, @@ -53,13 +67,16 @@ describe('status', () => { dataDir, )); expect(exitCode).toBe(0); + // If the command was slow, it may have become LIVE already expect(JSON.parse(stdout)).toMatchObject({ - status: 'STARTING', + status: expect.stringMatching(/STARTING|LIVE/), pid: agentProcess.pid, }); await status.waitFor('LIVE'); + const agentProcessExit = testBinUtils.processExit(agentProcess); agentProcess.kill('SIGTERM'); - await status.waitFor('STOPPING'); + // Cannot wait for STOPPING because waitFor polling may miss the transition + await status.waitFor('DEAD'); ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { @@ -69,11 +86,12 @@ describe('status', () => { dataDir, )); expect(exitCode).toBe(0); + // If the command was slow, it may have become DEAD already + // If it is DEAD, then pid property will be `undefined` expect(JSON.parse(stdout)).toMatchObject({ - status: 'STOPPING', - pid: agentProcess.pid, + status: expect.stringMatching(/STOPPING|DEAD/), }); - await testBinUtils.processExit(agentProcess); + await agentProcessExit; ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { @@ -103,16 +121,23 @@ describe('status', () => { ); }); describe('status with global agent', () => { - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); test('status on LIVE agent', async () => { const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join( + globalAgentDir, + config.defaults.statusLockBase, + ), fs, logger, }); @@ -120,10 +145,10 @@ describe('status', () => { const { exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json', '--verbose'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ @@ -147,16 +172,23 @@ describe('status', () => { }); test('status on remote LIVE agent', async () => { const passwordPath = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordPath, global.binAgentPassword); + await fs.promises.writeFile(passwordPath, globalAgentPassword); const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), + statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join( + globalAgentDir, + config.defaults.statusLockBase, + ), fs, logger, }); const statusInfo = (await status.readStatus())!; + // This still needs a `nodePath` because of session token path const { exitCode, stdout } = await testBinUtils.pkStdio([ 'agent', 'status', + '--node-path', + dataDir, '--password-file', passwordPath, '--node-id', diff --git a/tests/bin/agent/stop.test.ts b/tests/bin/agent/stop.test.ts index 889b64590..2710fbc08 100644 --- a/tests/bin/agent/stop.test.ts +++ b/tests/bin/agent/stop.test.ts @@ -34,6 +34,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), @@ -44,6 +46,11 @@ describe('stop', () => { expect(exitCode).toBe(0); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -67,6 +74,11 @@ describe('stop', () => { await fs.promises.writeFile(passwordPath, password); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -77,6 +89,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), @@ -85,6 +99,7 @@ describe('stop', () => { dataDir, ); expect(exitCode).toBe(0); + await status.waitFor('LIVE'); // Simultaneous calls to stop must use pkExec const [agentStop1, agentStop2] = await Promise.all([ testBinUtils.pkExec( @@ -102,7 +117,10 @@ describe('stop', () => { dataDir, ), ]); - await status.waitFor('STOPPING'); + // Cannot await for STOPPING + // It's not reliable until file watching is implemented + // So just 1 ms delay until sending another stop command + await sleep(1); const agentStop3 = await testBinUtils.pkStdio( ['agent', 'stop', '--node-path', path.join(dataDir, 'polykey')], { @@ -118,8 +136,16 @@ describe('stop', () => { }, dataDir, ); - expect(agentStop1.exitCode).toBe(0); - expect(agentStop2.exitCode).toBe(0); + // If the GRPC server gets closed after the GRPC connection is established + // then it's possible that one of these exit codes is 1 + if (agentStop1.exitCode === 1) { + expect(agentStop2.exitCode).toBe(0); + } else if (agentStop2.exitCode === 1) { + expect(agentStop1.exitCode).toBe(0); + } else { + expect(agentStop1.exitCode).toBe(0); + expect(agentStop2.exitCode).toBe(0); + } expect(agentStop3.exitCode).toBe(0); expect(agentStop4.exitCode).toBe(0); }, @@ -131,6 +157,11 @@ describe('stop', () => { const password = 'abc123'; const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); @@ -141,6 +172,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', '--verbose', ], { @@ -187,6 +220,8 @@ describe('stop', () => { // 1024 is the smallest size and is faster to start '--root-key-pair-bits', '1024', + '--workers', + '0', ], { PK_NODE_PATH: path.join(dataDir, 'polykey'), @@ -196,6 +231,11 @@ describe('stop', () => { ); const status = new Status({ statusPath: path.join(dataDir, 'polykey', config.defaults.statusBase), + statusLockPath: path.join( + dataDir, + 'polykey', + config.defaults.statusLockBase, + ), fs, logger, }); diff --git a/tests/bin/agent/unlock.test.ts b/tests/bin/agent/unlock.test.ts index 3cabdcd3e..530fb0492 100644 --- a/tests/bin/agent/unlock.test.ts +++ b/tests/bin/agent/unlock.test.ts @@ -5,15 +5,21 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { Session } from '@/sessions'; import config from '@/config'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; describe('unlock', () => { - const logger = new Logger('lock test', LogLevel.WARN, [new StreamHandler()]); - let pkAgentClose; + const logger = new Logger('unlock test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -30,10 +36,7 @@ describe('unlock', () => { test('unlock acquires session token', async () => { // Fresh session, to delete the token const session = await Session.createSession({ - sessionTokenPath: path.join( - global.binAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, fresh: true, @@ -42,19 +45,19 @@ describe('unlock', () => { ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'unlock'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); // Run command without password ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ status: 'LIVE' }); @@ -62,10 +65,10 @@ describe('unlock', () => { ({ exitCode, stdout } = await testBinUtils.pkStdio( ['agent', 'status', '--format', 'json'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_TOKEN: await session.readToken(), }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); expect(JSON.parse(stdout)).toMatchObject({ status: 'LIVE' }); diff --git a/tests/bin/bootstrap.test.ts b/tests/bin/bootstrap.test.ts index 9b557f794..c108d0345 100644 --- a/tests/bin/bootstrap.test.ts +++ b/tests/bin/bootstrap.test.ts @@ -112,7 +112,7 @@ describe('bootstrap', () => { global.defaultTimeout * 2, ); test( - 'concurrent bootstrapping are coalesced', + 'concurrent bootstrapping results in 1 success', async () => { const password = 'password'; const [bootstrapProcess1, bootstrapProcess2] = await Promise.all([ diff --git a/tests/bin/identities.test.ts b/tests/bin/identities/identities.test.ts similarity index 86% rename from tests/bin/identities.test.ts rename to tests/bin/identities/identities.test.ts index 042c7039f..fbfcad359 100644 --- a/tests/bin/identities.test.ts +++ b/tests/bin/identities/identities.test.ts @@ -1,8 +1,4 @@ -import type { - IdentityId, - IdentityInfo, - ProviderId, -} from '../../src/identities/types'; +import type { IdentityId, IdentityInfo, ProviderId } from '@/identities/types'; import type { NodeInfo } from '@/nodes/types'; import type { ClaimLinkIdentity, ClaimLinkNode } from '@/claims/types'; import os from 'os'; @@ -13,14 +9,9 @@ import { PolykeyAgent } from '@'; import { makeNodeId } from '@/nodes/utils'; import * as claimsUtils from '@/claims/utils'; import * as identitiesUtils from '@/identities/utils'; -import * as testUtils from './utils'; -import * as utils from './utils'; -import { - addRemoteDetails, - cleanupRemoteKeynode, - setupRemoteKeynode, -} from '../utils'; -import TestProvider from '../identities/TestProvider'; +import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; +import TestProvider from '../../identities/TestProvider'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -132,7 +123,7 @@ describe('CLI Identities', () => { .mockImplementation(() => {}); // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], {}, dataDir, @@ -163,7 +154,7 @@ describe('CLI Identities', () => { describe('commandAllowGestalts', () => { test('Should allow permissions on node.', async () => { const commands = genCommands(['allow', node1.id, 'notify']); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( node1.id, @@ -172,7 +163,7 @@ describe('CLI Identities', () => { expect(actionKeys).toContain('notify'); const command2 = genCommands(['allow', node1.id, 'scan']); - const result2 = await testUtils.pkStdio(command2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(command2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeeds. const actions2 = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -184,7 +175,7 @@ describe('CLI Identities', () => { // Should fail for invalid action. const command3 = genCommands(['allow', node1.id, 'invalid']); - const result3 = await testUtils.pkStdio(command3, {}, dataDir); + const result3 = await testBinUtils.pkStdio(command3, {}, dataDir); expect(result3.exitCode).toBe(1); // Should fail. }); test('Should allow permissions on Identity.', async () => { @@ -193,7 +184,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'notify', ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -209,7 +200,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'scan', ]); - const result2 = await testUtils.pkStdio(command2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(command2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeedes. const actions2 = @@ -227,13 +218,13 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'invalid', ]); - const result3 = await testUtils.pkStdio(command3, {}, dataDir); + const result3 = await testBinUtils.pkStdio(command3, {}, dataDir); expect(result3.exitCode).toBe(1); // Should fail. }); test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['allow', invaldNode.id, 'scan']), {}, dataDir, @@ -241,7 +232,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'allow', identityString( @@ -256,7 +247,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid permission. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['allow', invaldNode.id, 'invalidPermission']), {}, dataDir, @@ -274,7 +265,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['disallow', node1.id, 'notify']); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -302,7 +293,7 @@ describe('CLI Identities', () => { identityString(identity1.providerId, identity1.identityId), 'scan', ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -317,7 +308,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['disallow', invaldNode.id, 'scan']), {}, dataDir, @@ -325,7 +316,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'disallow', identityString( @@ -340,7 +331,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid permission. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['disallow', node1.id, 'invalidPermission']), {}, dataDir, @@ -358,7 +349,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['permissions', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Print result. expect(result.stdout).toContain('notify'); @@ -381,7 +372,7 @@ describe('CLI Identities', () => { 'permissions', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Print result. expect(result.stdout).toContain('scan'); @@ -391,7 +382,7 @@ describe('CLI Identities', () => { describe('commandTrustGestalts', () => { test('Should set trust on Node.', async () => { const commands = genCommands(['trust', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -405,7 +396,7 @@ describe('CLI Identities', () => { 'trust', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -419,7 +410,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['trust', invaldNode.id]), {}, dataDir, @@ -427,7 +418,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'trust', identityString( @@ -451,7 +442,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node1.id, 'scan'); const commands = genCommands(['untrust', node1.id]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = await polykeyAgent.gestaltGraph.getGestaltActionsByNode( @@ -478,7 +469,7 @@ describe('CLI Identities', () => { 'untrust', identityString(identity1.providerId, identity1.identityId), ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. const actions = @@ -493,7 +484,7 @@ describe('CLI Identities', () => { test('Should fail on invalid inputs.', async () => { let result; // Invalid node. - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands(['trust', invaldNode.id]), {}, dataDir, @@ -501,7 +492,7 @@ describe('CLI Identities', () => { expect(result.exitCode === 0).toBeFalsy(); // Fails.. // invalid identity - result = await testUtils.pkStdio( + result = await testBinUtils.pkStdio( genCommands([ 'untrust', identityString( @@ -531,7 +522,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. // Unauthenticate identity await polykeyAgent.identitiesManager.delToken( @@ -551,7 +542,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode === 0).toBeFalsy(); // Fails.. }); }); @@ -566,7 +557,7 @@ describe('CLI Identities', () => { testToken.providerId, testToken.identityId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('randomtestcode'); // Unauthenticate identity @@ -579,7 +570,7 @@ describe('CLI Identities', () => { describe('commandGetGestalts', () => { test('Should list gestalt by Node', async () => { const commands = ['identities', 'get', '-np', nodePath, node1.id]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain(node1.id); expect(result.stdout).toContain(identity1.providerId); @@ -593,7 +584,7 @@ describe('CLI Identities', () => { nodePath, identityString(identity1.providerId, identity1.identityId), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain(node1.id); expect(result.stdout).toContain(identity1.providerId); @@ -610,7 +601,7 @@ describe('CLI Identities', () => { await polykeyAgent.gestaltGraph.setGestaltActionByNode(node2.id, 'scan'); const commands = ['identities', 'list', '-np', nodePath]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain('notify'); expect(result.stdout).toContain('scan'); @@ -628,7 +619,7 @@ describe('CLI Identities', () => { '--format', 'json', ]; - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); expect(result2.exitCode).toBe(0); // Succeeds. expect(result2.stdout).toContain('notify'); expect(result2.stdout).toContain('scan'); @@ -654,33 +645,49 @@ describe('CLI Identities', () => { nodePath, testToken.providerId, ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // Succeeds. expect(result.stdout).toContain(testToken.providerId); expect(result.stdout).toContain(testToken.identityId); }); }); describe('commandDiscoverGestalts', () => { + let rootDataDir; // Test variables let nodeB: PolykeyAgent; let nodeC: PolykeyAgent; // Let testProvider: TestProvider; let identityId: IdentityId; - beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); // Setup the remote gestalt state here // Setting up remote nodes. - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); // Forming links // B->C // Adding connection details. - await addRemoteDetails(polykeyAgent, nodeB); - await addRemoteDetails(nodeB, polykeyAgent); - await addRemoteDetails(nodeB, nodeC); - await addRemoteDetails(nodeC, nodeB); - + await testNodesUtils.nodesConnect(polykeyAgent, nodeB); + await testNodesUtils.nodesConnect(nodeB, polykeyAgent); + await testNodesUtils.nodesConnect(nodeB, nodeC); + await testNodesUtils.nodesConnect(nodeC, nodeB); // Adding sigchain details. const claimBtoC: ClaimLinkNode = { type: 'node', @@ -708,17 +715,20 @@ describe('CLI Identities', () => { provider: testProvider.id, identity: identityId, }; - const claimEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - // Clean up the remote gestalt state here. - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); + await nodeC.stop(); + await nodeB.stop(); // Unclaim identity testProvider.links = {}; testProvider.linkIdCounter = 0; + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { // Clean the local nodes gestalt graph here. @@ -741,7 +751,7 @@ describe('CLI Identities', () => { nodePath, nodeB.nodeManager.getNodeId(), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); // We expect to find a gestalt now. @@ -764,7 +774,6 @@ describe('CLI Identities', () => { identityId, testToken.tokenData, ); - const commands = [ 'identities', 'discover', @@ -772,9 +781,8 @@ describe('CLI Identities', () => { nodePath, identityString(testProvider.id, identityId), ]; - const result = await testUtils.pkStdio(commands, {}, dataDir); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); expect(result.exitCode).toBe(0); - // We expect to find a gestalt now. const gestalt = await polykeyAgent.gestaltGraph.getGestalts(); expect(gestalt.length).not.toBe(0); diff --git a/tests/bin/keys.test.ts b/tests/bin/keys/keys.test.ts similarity index 79% rename from tests/bin/keys.test.ts rename to tests/bin/keys/keys.test.ts index 970eb9348..89192eb97 100644 --- a/tests/bin/keys.test.ts +++ b/tests/bin/keys/keys.test.ts @@ -3,7 +3,7 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -11,19 +11,6 @@ jest.mock('@/keys/utils', () => ({ jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('CLI keys', () => { const logger = new Logger('CLI Test', LogLevel.WARN, [new StreamHandler()]); let dataDir: string; @@ -59,7 +46,7 @@ describe('CLI keys', () => { }); beforeEach(async () => { // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], {}, dataDir, @@ -69,24 +56,28 @@ describe('CLI keys', () => { describe('commandCertChain', () => { test('should get the certificate chain', async () => { command = ['keys', 'certchain', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); describe('commandGetCert', () => { test('should get the certificate', async () => { command = ['keys', 'cert', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); describe('commandGetRootKeypair', () => { test('should get the root keypair', async () => { command = ['keys', 'root', '-np', nodePath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const result2 = await utils.pkStdio([...command, '-pk'], {}, dataDir); + const result2 = await testBinUtils.pkStdio( + [...command, '-pk'], + {}, + dataDir, + ); expect(result2.exitCode).toBe(0); }); }); @@ -97,7 +88,7 @@ describe('CLI keys', () => { encoding: 'binary', }); command = ['keys', 'encrypt', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -110,7 +101,7 @@ describe('CLI keys', () => { ); await fs.promises.writeFile(dataPath, encrypted, { encoding: 'binary' }); command = ['keys', 'decrypt', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -121,7 +112,7 @@ describe('CLI keys', () => { command = ['keys', 'sign', '-np', nodePath, dataPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -139,7 +130,7 @@ describe('CLI keys', () => { command = ['keys', 'verify', '-np', nodePath, dataPath, signatureTrue]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -156,7 +147,7 @@ describe('CLI keys', () => { command = ['keys', 'renew', '-np', nodePath, '-pnf', passPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const rootKeypairNew = polykeyAgent.keyManager.getRootKeyPair(); @@ -185,7 +176,7 @@ describe('CLI keys', () => { command = ['keys', 'reset', '-np', nodePath, '-pnf', passPath]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const rootKeypairNew = polykeyAgent.keyManager.getRootKeyPair(); @@ -213,7 +204,7 @@ describe('CLI keys', () => { command = ['keys', 'password', '-np', nodePath, '-pnf', passPath]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); await polykeyAgent.stop(); diff --git a/tests/bin/nodes.test.ts b/tests/bin/nodes.test.ts deleted file mode 100644 index 5b42f9b64..000000000 --- a/tests/bin/nodes.test.ts +++ /dev/null @@ -1,394 +0,0 @@ -import type { Host, Port } from '@/network/types'; -import type { NodeId, NodeAddress } from '@/nodes/types'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import PolykeyAgent from '@/PolykeyAgent'; -import { makeNodeId } from '@/nodes/utils'; -import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('CLI Nodes', () => { - const password = 'password'; - const logger = new Logger('pkStdio Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let nodePath: string; - let passwordFile: string; - let polykeyAgent: PolykeyAgent; - let remoteOnline: PolykeyAgent; - let remoteOffline: PolykeyAgent; - - let keynodeId: NodeId; - let remoteOnlineNodeId: NodeId; - let remoteOfflineNodeId: NodeId; - - let remoteOnlineHost: Host; - let remoteOnlinePort: Port; - let remoteOfflineHost: Host; - let remoteOfflinePort: Port; - - const nodeId1 = makeNodeId( - 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', - ); - const nodeId2 = makeNodeId( - 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', - ); - const nodeId3 = makeNodeId( - 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug', - ); - - // Helper functions - function genCommands(options: Array) { - return ['nodes', ...options, '-np', nodePath]; - } - - beforeAll(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - nodePath = path.join(dataDir, 'keynode'); - passwordFile = path.join(dataDir, 'passwordFile'); - await fs.promises.writeFile(passwordFile, 'password'); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: nodePath, - logger: logger, - }); - keynodeId = polykeyAgent.nodeManager.getNodeId(); - - // Setting up a remote keynode - remoteOnline = await testKeynodeUtils.setupRemoteKeynode({ - logger, - }); - remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); - remoteOnlineHost = remoteOnline.revProxy.ingressHost; - remoteOnlinePort = remoteOnline.revProxy.ingressPort; - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOnline); - - // Setting up an offline remote keynode - remoteOffline = await testKeynodeUtils.setupRemoteKeynode({ - logger, - }); - remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); - remoteOfflineHost = remoteOffline.revProxy.ingressHost; - remoteOfflinePort = remoteOffline.revProxy.ingressPort; - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOffline); - await remoteOffline.stop(); - - // Authorize session - await testUtils.pkStdio( - ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], - {}, - nodePath, - ); - }, global.polykeyStartupTimeout * 3); - afterAll(async () => { - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - await testKeynodeUtils.cleanupRemoteKeynode(remoteOnline); - await testKeynodeUtils.cleanupRemoteKeynode(remoteOffline); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - describe('commandClaimNode', () => { - beforeAll(async () => { - await remoteOnline.nodeManager.setNode(keynodeId, { - host: polykeyAgent.revProxy.ingressHost, - port: polykeyAgent.revProxy.ingressPort, - } as NodeAddress); - await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { - gestalt: { - notify: null, - }, - vaults: {}, - }); - await remoteOnline.acl.setNodePerm(keynodeId, { - gestalt: { - notify: null, - }, - vaults: {}, - }); - }); - afterEach(async () => { - await polykeyAgent.notificationsManager.clearNotifications(); - await remoteOnline.notificationsManager.clearNotifications(); - await polykeyAgent.sigchain.clearDB(); - await remoteOnline.sigchain.clearDB(); - }); - afterAll(async () => { - await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { - gestalt: {}, - vaults: {}, - }); - await remoteOnline.acl.setNodePerm(keynodeId, { - gestalt: {}, - vaults: {}, - }); - await remoteOnline.nodeManager.clearDB(); - }); - test( - 'Should send a gestalt invite', - async () => { - const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('Gestalt Invite'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }, - global.polykeyStartupTimeout * 4, - ); - test('Should send a gestalt invite (force invite)', async () => { - await remoteOnline.notificationsManager.sendNotification(keynodeId, { - type: 'GestaltInvite', - }); - const commands = genCommands([ - 'claim', - remoteOnlineNodeId, - '--force-invite', - ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('Gestalt Invite'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }); - test('Should claim remote node', async () => { - await remoteOnline.notificationsManager.sendNotification(keynodeId, { - type: 'GestaltInvite', - }); - const commands = genCommands(['claim', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); // Succeeds. - expect(result.stdout).toContain('cryptolink claim'); - expect(result.stdout).toContain(remoteOnlineNodeId); - }); - }); - describe('commandPingNode', () => { - test( - 'Should return failure when pinging an offline node', - async () => { - const commands = genCommands(['ping', remoteOfflineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(1); // Should fail with no response. for automation purposes. - expect(result.stdout).toContain('No response received'); - - // Checking for json output - const commands2 = genCommands([ - 'ping', - remoteOfflineNodeId, - '--format', - 'json', - ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(1); // Should fail with no response. for automation purposes. - expect(result2.stdout).toContain('No response received'); - }, - global.failedConnectionTimeout * 2, - ); - test( - "Should return failure if can't find the node", - async () => { - const fakeNodeId = nodeId1; - const commands = genCommands(['ping', fakeNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); // Should fail if node doesn't exist. - expect(result.stdout).toContain('Failed to resolve node ID'); - - // Json format. - const commands2 = genCommands(['ping', fakeNodeId, '--format', 'json']); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).not.toBe(0); // Should fail if node doesn't exist. - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('false'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain('Failed to resolve node ID'); - }, - global.failedConnectionTimeout * 2, - ); - test('Should return success when pinging a live node', async () => { - const commands = genCommands(['ping', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Node is Active.'); - - // Checking for Json output. - const commands2 = genCommands([ - 'ping', - remoteOnlineNodeId, - '--format', - 'json', - ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain('Node is Active'); - }); - }); - describe('commandFindNode', () => { - test('Should find an online node', async () => { - const commands = genCommands(['find', remoteOnlineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Found node at'); - expect(result.stdout).toContain(remoteOnlineHost); - expect(result.stdout).toContain(remoteOnlinePort); - - // Checking json format. - const commands2 = genCommands([ - 'find', - remoteOnlineNodeId, - '--format', - 'json', - ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain( - `Found node at ${remoteOnlineHost}:${remoteOnlinePort}`, - ); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(remoteOnlineNodeId); - }); - test('Should find an offline node', async () => { - const commands = genCommands(['find', remoteOfflineNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - expect(result.stdout).toContain('Found node at'); - expect(result.stdout).toContain(remoteOfflineHost); - expect(result.stdout).toContain(remoteOfflinePort); - - // Checking json format. - const commands2 = genCommands([ - 'find', - remoteOfflineNodeId, - '--format', - 'json', - ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(0); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('true'); - expect(result2.stdout).toContain('message'); - expect(result2.stdout).toContain( - `Found node at ${remoteOfflineHost}:${remoteOfflinePort}`, - ); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(remoteOfflineNodeId); - }); - test( - 'Should fail to find an unknown node', - async () => { - const unknownNodeId = nodeId2; - const commands = genCommands(['find', unknownNodeId]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(1); - expect(result.stdout).toContain(`Failed to find node ${unknownNodeId}`); - - // Checking json format. - const commands2 = genCommands([ - 'find', - unknownNodeId, - '--format', - 'json', - ]); - const result2 = await testUtils.pkStdio(commands2, {}, dataDir); - expect(result2.exitCode).toBe(1); - expect(result2.stdout).toContain(`message`); - expect(result2.stdout).toContain( - `Failed to find node ${unknownNodeId}`, - ); - expect(result2.stdout).toContain('id'); - expect(result2.stdout).toContain(unknownNodeId); - expect(result2.stdout).toContain('port'); - expect(result2.stdout).toContain('0'); - expect(result2.stdout).toContain('host'); - expect(result2.stdout).toContain('success'); - expect(result2.stdout).toContain('false'); - }, - global.failedConnectionTimeout * 2, - ); - }); - describe('commandAddNode', () => { - const validNodeId = nodeId3; - const invalidNodeId = 'INVALIDID' as NodeId; - const validHost = '0.0.0.0'; - const invalidHost = 'INVALIDHOST'; - const port = 55555; - afterEach(async () => { - await polykeyAgent.nodeManager.clearDB(); - }); - afterAll(async () => { - // Restore removed nodes - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOnline); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, remoteOffline); - }); - test('Should add the node', async () => { - const commands = genCommands([ - 'add', - validNodeId, - validHost, - port.toString(), - ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).toBe(0); - - // Checking if node was added. - const res = await polykeyAgent.nodeManager.getNode(validNodeId); - expect(res).toBeTruthy(); - expect(res!.host).toEqual(validHost); - expect(res!.port).toEqual(port); - }); - test( - 'Should fail to add the node (invalid node ID)', - async () => { - const commands = genCommands([ - 'add', - invalidNodeId, - validHost, - port.toString(), - ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); - expect(result.stderr).toContain('Invalid node ID.'); - }, - global.failedConnectionTimeout, - ); - test( - 'Should fail to add the node (invalid IP address)', - async () => { - const commands = genCommands([ - 'add', - validNodeId, - invalidHost, - port.toString(), - ]); - const result = await testUtils.pkStdio(commands, {}, dataDir); - expect(result.exitCode).not.toBe(0); - expect(result.stderr).toContain('Invalid IP address.'); - - // Checking if node was added. - const res = await polykeyAgent.nodeManager.getNode(validNodeId); - expect(res).toBeUndefined(); - }, - global.failedConnectionTimeout, - ); - }); -}); diff --git a/tests/bin/nodes/add.test.ts b/tests/bin/nodes/add.test.ts new file mode 100644 index 000000000..b238c9479 --- /dev/null +++ b/tests/bin/nodes/add.test.ts @@ -0,0 +1,119 @@ +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('add', () => { + const password = 'password'; + const logger = new Logger('add test', LogLevel.WARN, [new StreamHandler()]); + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + + const validNodeId = nodesUtils.makeNodeId( + 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', + ); + const invalidNodeId = 'INVALIDID' as NodeId; + const validHost = '0.0.0.0'; + const invalidHost = 'INVALIDHOST'; + const port = 55555; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterEach(async () => { + await polykeyAgent.nodeManager.clearDB(); + }); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + + test('add a node', async () => { + const commands = genCommands([ + 'add', + validNodeId, + validHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + + // Checking if node was added. + const res = await polykeyAgent.nodeManager.getNode(validNodeId); + expect(res).toBeTruthy(); + expect(res!.host).toEqual(validHost); + expect(res!.port).toEqual(port); + }); + test( + 'fail to add a node (invalid node ID)', + async () => { + const commands = genCommands([ + 'add', + invalidNodeId, + validHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('Invalid node ID.'); + }, + global.failedConnectionTimeout, + ); + test( + 'fail to add a node (invalid IP address)', + async () => { + const commands = genCommands([ + 'add', + validNodeId, + invalidHost, + port.toString(), + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); + expect(result.stderr).toContain('Invalid IP address.'); + + // Checking if node was added. + const res = await polykeyAgent.nodeManager.getNode(validNodeId); + expect(res).toBeUndefined(); + }, + global.failedConnectionTimeout, + ); +}); diff --git a/tests/bin/nodes/claim.test.ts b/tests/bin/nodes/claim.test.ts new file mode 100644 index 000000000..67f20bc53 --- /dev/null +++ b/tests/bin/nodes/claim.test.ts @@ -0,0 +1,143 @@ +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('claim', () => { + const password = 'password'; + const logger = new Logger('claim test', LogLevel.WARN, [new StreamHandler()]); + let rootDataDir: string; + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + + let keynodeId: NodeId; + let remoteOnlineNodeId: NodeId; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + keynodeId = polykeyAgent.nodeManager.getNodeId(); + // Setting up a remote keynode + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); + + await remoteOnline.nodeManager.setNode(keynodeId, { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }); + await polykeyAgent.acl.setNodePerm(remoteOnlineNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + await remoteOnline.acl.setNodePerm(keynodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 2); + + afterEach(async () => { + await polykeyAgent.notificationsManager.clearNotifications(); + await remoteOnline.notificationsManager.clearNotifications(); + await polykeyAgent.sigchain.clearDB(); + await remoteOnline.sigchain.clearDB(); + }); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await remoteOnline.stop(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test( + 'send a gestalt invite', + async () => { + const commands = genCommands(['claim', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('Gestalt Invite'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }, + global.polykeyStartupTimeout * 4, + ); + test('send a gestalt invite (force invite)', async () => { + await remoteOnline.notificationsManager.sendNotification(keynodeId, { + type: 'GestaltInvite', + }); + // Needs to be forced, as the local node has already received an invitation + const commands = genCommands([ + 'claim', + remoteOnlineNodeId, + '--force-invite', + ]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('Gestalt Invite'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }); + test('claim the remote node', async () => { + await remoteOnline.notificationsManager.sendNotification(keynodeId, { + type: 'GestaltInvite', + }); + // Received an invitation, so will attempt to perform the claiming process + const commands = genCommands(['claim', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); // Succeeds. + expect(result.stdout).toContain('cryptolink claim'); + expect(result.stdout).toContain(remoteOnlineNodeId); + }); +}); diff --git a/tests/bin/nodes/find.test.ts b/tests/bin/nodes/find.test.ts new file mode 100644 index 000000000..7393a2fee --- /dev/null +++ b/tests/bin/nodes/find.test.ts @@ -0,0 +1,197 @@ +import type { Host, Port } from '@/network/types'; +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('find', () => { + const password = 'password'; + const logger = new Logger('find test', LogLevel.WARN, [new StreamHandler()]); + let rootDataDir: string; + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + let remoteOffline: PolykeyAgent; + + let remoteOnlineNodeId: NodeId; + let remoteOfflineNodeId: NodeId; + + let remoteOnlineHost: Host; + let remoteOnlinePort: Port; + let remoteOfflineHost: Host; + let remoteOfflinePort: Port; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + + // Setting up a remote keynode + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + remoteOnlineHost = remoteOnline.revProxy.getIngressHost(); + remoteOnlinePort = remoteOnline.revProxy.getIngressPort(); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); + + // Setting up an offline remote keynode + remoteOffline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOffline'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); + remoteOfflineHost = remoteOffline.revProxy.getIngressHost(); + remoteOfflinePort = remoteOffline.revProxy.getIngressPort(); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOffline); + await remoteOffline.stop(); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await remoteOnline.stop(); + await remoteOffline.stop(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); + }); + + test('find an online node', async () => { + const commands = genCommands(['find', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Found node at'); + expect(result.stdout).toContain(remoteOnlineHost); + expect(result.stdout).toContain(remoteOnlinePort); + + // Checking json format. + const commands2 = genCommands([ + 'find', + remoteOnlineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain( + `Found node at ${remoteOnlineHost}:${remoteOnlinePort}`, + ); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(remoteOnlineNodeId); + }); + test('find an offline node', async () => { + const commands = genCommands(['find', remoteOfflineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Found node at'); + expect(result.stdout).toContain(remoteOfflineHost); + expect(result.stdout).toContain(remoteOfflinePort); + + // Checking json format. + const commands2 = genCommands([ + 'find', + remoteOfflineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain( + `Found node at ${remoteOfflineHost}:${remoteOfflinePort}`, + ); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(remoteOfflineNodeId); + }); + test( + 'fail to find an unknown node', + async () => { + const unknownNodeId = nodesUtils.makeNodeId( + 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg', + ); + const commands = genCommands(['find', unknownNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(1); + expect(result.stdout).toContain(`Failed to find node ${unknownNodeId}`); + + // Checking json format. + const commands2 = genCommands([ + 'find', + unknownNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(1); + expect(result2.stdout).toContain(`message`); + expect(result2.stdout).toContain(`Failed to find node ${unknownNodeId}`); + expect(result2.stdout).toContain('id'); + expect(result2.stdout).toContain(unknownNodeId); + expect(result2.stdout).toContain('port'); + expect(result2.stdout).toContain('0'); + expect(result2.stdout).toContain('host'); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('false'); + }, + global.failedConnectionTimeout * 2, + ); +}); diff --git a/tests/bin/nodes/ping.test.ts b/tests/bin/nodes/ping.test.ts new file mode 100644 index 000000000..902d7e915 --- /dev/null +++ b/tests/bin/nodes/ping.test.ts @@ -0,0 +1,158 @@ +import type { NodeId } from '@/nodes/types'; +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import PolykeyAgent from '@/PolykeyAgent'; +import * as nodesUtils from '@/nodes/utils'; +import * as testBinUtils from '../utils'; +import * as testNodesUtils from '../../nodes/utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); + +describe('ping', () => { + const password = 'password'; + const logger = new Logger('ping test', LogLevel.WARN, [new StreamHandler()]); + let rootDataDir: string; + let dataDir: string; + let nodePath: string; + let passwordFile: string; + let polykeyAgent: PolykeyAgent; + let remoteOnline: PolykeyAgent; + let remoteOffline: PolykeyAgent; + + let remoteOnlineNodeId: NodeId; + let remoteOfflineNodeId: NodeId; + + // Helper functions + function genCommands(options: Array) { + return ['nodes', ...options, '-np', nodePath]; + } + + beforeAll(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'keynode'); + passwordFile = path.join(dataDir, 'passwordFile'); + await fs.promises.writeFile(passwordFile, 'password'); + polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: nodePath, + logger: logger, + }); + + // Setting up a remote keynode + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOnline'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + remoteOnlineNodeId = remoteOnline.nodeManager.getNodeId(); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOnline); + + // Setting up an offline remote keynode + remoteOffline = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'remoteOffline'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + remoteOfflineNodeId = remoteOffline.nodeManager.getNodeId(); + await testNodesUtils.nodesConnect(polykeyAgent, remoteOffline); + await remoteOffline.stop(); + + // Authorize session + await testBinUtils.pkStdio( + ['agent', 'unlock', '-np', nodePath, '--password-file', passwordFile], + {}, + nodePath, + ); + }, global.polykeyStartupTimeout * 3); + afterAll(async () => { + await polykeyAgent.stop(); + await polykeyAgent.destroy(); + await remoteOnline.stop(); + await remoteOffline.stop(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); + }); + test( + 'fail when pinging an offline node', + async () => { + const commands = genCommands(['ping', remoteOfflineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(1); // Should fail with no response. for automation purposes. + expect(result.stdout).toContain('No response received'); + + // Checking for json output + const commands2 = genCommands([ + 'ping', + remoteOfflineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(1); // Should fail with no response. for automation purposes. + expect(result2.stdout).toContain('No response received'); + }, + global.failedConnectionTimeout * 2, + ); + test( + 'fail if node cannot be found', + async () => { + const fakeNodeId = nodesUtils.makeNodeId( + 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0', + ); + const commands = genCommands(['ping', fakeNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).not.toBe(0); // Should fail if node doesn't exist. + expect(result.stdout).toContain('Failed to resolve node ID'); + + // Json format. + const commands2 = genCommands(['ping', fakeNodeId, '--format', 'json']); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).not.toBe(0); // Should fail if node doesn't exist. + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('false'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain('Failed to resolve node ID'); + }, + global.failedConnectionTimeout * 2, + ); + test('succeed when pinging a live node', async () => { + const commands = genCommands(['ping', remoteOnlineNodeId]); + const result = await testBinUtils.pkStdio(commands, {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('Node is Active.'); + + // Checking for Json output. + const commands2 = genCommands([ + 'ping', + remoteOnlineNodeId, + '--format', + 'json', + ]); + const result2 = await testBinUtils.pkStdio(commands2, {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain('success'); + expect(result2.stdout).toContain('true'); + expect(result2.stdout).toContain('message'); + expect(result2.stdout).toContain('Node is Active'); + }); +}); diff --git a/tests/bin/notifications.test.ts b/tests/bin/notifications/notifications.test.ts similarity index 84% rename from tests/bin/notifications.test.ts rename to tests/bin/notifications/notifications.test.ts index a866b79d6..387767d08 100644 --- a/tests/bin/notifications.test.ts +++ b/tests/bin/notifications/notifications.test.ts @@ -8,8 +8,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { utils as idUtils } from '@matrixai/id'; import PolykeyAgent from '@/PolykeyAgent'; import { makeVaultId } from '@/vaults/utils'; -import * as utils from './utils'; -import * as testUtils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -64,12 +63,12 @@ describe('CLI Notifications', () => { senderNodeId = senderPolykeyAgent.nodeManager.getNodeId(); receiverNodeId = receiverPolykeyAgent.nodeManager.getNodeId(); await senderPolykeyAgent.nodeManager.setNode(receiverNodeId, { - host: receiverPolykeyAgent.revProxy.ingressHost, - port: receiverPolykeyAgent.revProxy.ingressPort, + host: receiverPolykeyAgent.revProxy.getIngressHost(), + port: receiverPolykeyAgent.revProxy.getIngressPort(), } as NodeAddress); // Authorize session - await utils.pkStdio([ + await testBinUtils.pkStdio([ 'agent', 'unlock', '-np', @@ -77,7 +76,7 @@ describe('CLI Notifications', () => { '--password-file', senderPasswordFile, ]); - await utils.pkStdio([ + await testBinUtils.pkStdio([ 'agent', 'unlock', '-np', @@ -108,7 +107,7 @@ describe('CLI Notifications', () => { vaults: {}, }); const commands = genCommandsSender(['send', receiverNodeId, 'msg']); - const result = await testUtils.pkStdio(commands, {}, senderDataDir); + const result = await testBinUtils.pkStdio(commands, {}, senderDataDir); expect(result.exitCode).toBe(0); // Succeeds const notifications = await receiverPolykeyAgent.notificationsManager.readNotifications(); @@ -123,7 +122,7 @@ describe('CLI Notifications', () => { vaults: {}, }); const commands = genCommandsSender(['send', receiverNodeId, 'msg']); - const result = await testUtils.pkStdio(commands, {}, senderDataDir); + const result = await testBinUtils.pkStdio(commands, {}, senderDataDir); expect(result.exitCode).toBe(0); // Succeeds const notifications = await receiverPolykeyAgent.notificationsManager.readNotifications(); @@ -153,11 +152,11 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver(['read']); - const result1 = await testUtils.pkStdio( + const result1 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -171,8 +170,8 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg4', ]); - await testUtils.pkStdio(senderCommands4, {}, senderDataDir); - const result2 = await testUtils.pkStdio( + await testBinUtils.pkStdio(senderCommands4, {}, senderDataDir); + const result2 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -205,19 +204,19 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands1 = genCommandsReceiver(['read']); - await testUtils.pkStdio(receiverCommands1, {}, receiverDataDir); + await testBinUtils.pkStdio(receiverCommands1, {}, receiverDataDir); const senderCommands4 = genCommandsSender([ 'send', receiverNodeId, 'msg4', ]); - await testUtils.pkStdio(senderCommands4, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands4, {}, senderDataDir); const receiverCommands2 = genCommandsReceiver(['read', '--unread']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands2, {}, receiverDataDir, @@ -250,11 +249,11 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver(['read', '--number', '2']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -286,9 +285,9 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommands = genCommandsReceiver([ 'read', '--unread', @@ -297,17 +296,17 @@ describe('CLI Notifications', () => { '--order', 'oldest', ]); - const result1 = await testUtils.pkStdio( + const result1 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, ); - const result2 = await testUtils.pkStdio( + const result2 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, ); - const result3 = await testUtils.pkStdio( + const result3 = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -321,7 +320,7 @@ describe('CLI Notifications', () => { }); test('Should read no notifications.', async () => { const receiverCommands = genCommandsReceiver(['read']); - const result = await testUtils.pkStdio( + const result = await testBinUtils.pkStdio( receiverCommands, {}, receiverDataDir, @@ -365,7 +364,7 @@ describe('CLI Notifications', () => { notificationData3, ); const commands = genCommandsReceiver(['read']); - const result = await testUtils.pkStdio(commands, {}, receiverDataDir); + const result = await testBinUtils.pkStdio(commands, {}, receiverDataDir); expect(result.exitCode).toBe(0); expect(result.stdout).toContain('Message from Keynode'); expect(result.stdout).toContain('invited you to join their Gestalt'); @@ -395,13 +394,13 @@ describe('CLI Notifications', () => { receiverNodeId, 'msg3', ]); - await testUtils.pkStdio(senderCommands1, {}, senderDataDir); - await testUtils.pkStdio(senderCommands2, {}, senderDataDir); - await testUtils.pkStdio(senderCommands3, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands1, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands2, {}, senderDataDir); + await testBinUtils.pkStdio(senderCommands3, {}, senderDataDir); const receiverCommandsClear = genCommandsReceiver(['clear']); const receiverCommandsRead = genCommandsReceiver(['read']); - await testUtils.pkStdio(receiverCommandsClear); - const result = await testUtils.pkStdio( + await testBinUtils.pkStdio(receiverCommandsClear); + const result = await testBinUtils.pkStdio( receiverCommandsRead, {}, receiverDataDir, diff --git a/tests/bin/polykey.test.ts b/tests/bin/polykey.test.ts index 688ec66d9..28bb328f6 100644 --- a/tests/bin/polykey.test.ts +++ b/tests/bin/polykey.test.ts @@ -1,8 +1,8 @@ -import { pkStdio } from './utils'; +import * as testBinUtils from './utils'; describe('polykey', () => { test('default help display', async () => { - const result = await pkStdio([]); + const result = await testBinUtils.pkStdio([]); expect(result.exitCode).toBe(0); expect(result.stdout).toBe(''); expect(result.stderr.length > 0).toBe(true); diff --git a/tests/bin/secret.test.ts b/tests/bin/secrets/secrets.test.ts similarity index 83% rename from tests/bin/secret.test.ts rename to tests/bin/secrets/secrets.test.ts index a9092d94c..292dacdc9 100644 --- a/tests/bin/secret.test.ts +++ b/tests/bin/secrets/secrets.test.ts @@ -5,7 +5,7 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { vaultOps } from '@/vaults'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -13,19 +13,6 @@ jest.mock('@/keys/utils', () => ({ jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('CLI secrets', () => { const password = 'password'; const logger = new Logger('CLI Test', LogLevel.WARN, [new StreamHandler()]); @@ -46,7 +33,7 @@ describe('CLI secrets', () => { logger: logger, }); // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', dataDir, '--password-file', passwordFile], {}, dataDir, @@ -79,7 +66,7 @@ describe('CLI secrets', () => { `${vaultName}:MySecret`, ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -103,7 +90,7 @@ describe('CLI secrets', () => { command = ['secrets', 'delete', '-np', dataDir, `${vaultName}:MySecret`]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); list = await vaultOps.listSecrets(vault); @@ -119,7 +106,7 @@ describe('CLI secrets', () => { command = ['secrets', 'get', '-np', dataDir, `${vaultName}:MySecret`]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -134,7 +121,7 @@ describe('CLI secrets', () => { command = ['secrets', 'list', '-np', dataDir, vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -152,7 +139,7 @@ describe('CLI secrets', () => { '-r', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); await vaultOps.addSecret(vault, 'dir1/MySecret1', 'this is the secret 1'); @@ -184,7 +171,7 @@ describe('CLI secrets', () => { 'MyRenamedSecret', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -214,7 +201,7 @@ describe('CLI secrets', () => { `${vaultName}:MySecret`, ]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); const list = await vaultOps.listSecrets(vault); @@ -249,7 +236,7 @@ describe('CLI secrets', () => { command = ['secrets', 'dir', '-np', dataDir, secretDir, vaultName]; - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); list = await vaultOps.listSecrets(vault); diff --git a/tests/bin/sessions.test.ts b/tests/bin/sessions.test.ts index 6cb95c393..b688eb2ef 100644 --- a/tests/bin/sessions.test.ts +++ b/tests/bin/sessions.test.ts @@ -14,23 +14,24 @@ import { sleep } from '@/utils'; import config from '@/config'; import * as clientErrors from '@/client/errors'; import * as testBinUtils from './utils'; +import * as testUtils from '../utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); -describe('CLI Sessions', () => { +describe('sessions', () => { const logger = new Logger('sessions test', LogLevel.WARN, [ new StreamHandler(), ]); - let pkAgentClose; + let globalAgentDir; + let globalAgentPassword; + let globalAgentClose; beforeAll(async () => { - pkAgentClose = await testBinUtils.pkAgent(); - }, global.maxTimeout); + ({ globalAgentDir, globalAgentPassword, globalAgentClose } = + await testUtils.setupGlobalAgent(logger)); + }, globalThis.maxTimeout); afterAll(async () => { - await pkAgentClose(); + await globalAgentClose(); }); let dataDir: string; beforeEach(async () => { @@ -46,10 +47,7 @@ describe('CLI Sessions', () => { }); test('serial commands refresh the session token', async () => { const session = await Session.createSession({ - sessionTokenPath: path.join( - global.binAgentDir, - config.defaults.tokenBase, - ), + sessionTokenPath: path.join(globalAgentDir, config.defaults.tokenBase), fs, logger, }); @@ -57,10 +55,10 @@ describe('CLI Sessions', () => { ({ exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); const token1 = await session.readToken(); @@ -71,10 +69,10 @@ describe('CLI Sessions', () => { ({ exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, + PK_NODE_PATH: globalAgentDir, + PK_PASSWORD: globalAgentPassword, }, - global.binAgentDir, + globalAgentDir, )); expect(exitCode).toBe(0); const token2 = await session.readToken(); @@ -87,11 +85,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: 'invalid', PK_TOKEN: 'token', }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -102,11 +100,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: 'invalid', PK_TOKEN: undefined, }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -117,11 +115,11 @@ describe('CLI Sessions', () => { ({ exitCode, stderr } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, PK_PASSWORD: undefined, PK_TOKEN: 'token', }, - global.binAgentDir, + globalAgentDir, )); testBinUtils.expectProcessError( exitCode, @@ -130,13 +128,13 @@ describe('CLI Sessions', () => { ); }); test('prompt for password to authenticate attended commands', async () => { - const password = global.binAgentPassword; + const password = globalAgentPassword; await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); mockedPrompts.mockClear(); mockedPrompts.mockImplementation(async (_opts: any) => { @@ -145,9 +143,9 @@ describe('CLI Sessions', () => { const { exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); // Prompted for password 1 time @@ -158,11 +156,11 @@ describe('CLI Sessions', () => { await testBinUtils.pkStdio( ['agent', 'lock'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); - const validPassword = global.binAgentPassword; + const validPassword = globalAgentPassword; const invalidPassword = 'invalid'; mockedPrompts.mockClear(); mockedPrompts @@ -171,9 +169,9 @@ describe('CLI Sessions', () => { const { exitCode } = await testBinUtils.pkStdio( ['agent', 'status'], { - PK_NODE_PATH: global.binAgentDir, + PK_NODE_PATH: globalAgentDir, }, - global.binAgentDir, + globalAgentDir, ); expect(exitCode).toBe(0); // Prompted for password 2 times diff --git a/tests/bin/utils.retryAuth.test.ts b/tests/bin/utils.retryAuthentication.test.ts similarity index 98% rename from tests/bin/utils.retryAuth.test.ts rename to tests/bin/utils.retryAuthentication.test.ts index 79f3fce08..9a97f050f 100644 --- a/tests/bin/utils.retryAuth.test.ts +++ b/tests/bin/utils.retryAuthentication.test.ts @@ -4,9 +4,6 @@ import mockedEnv from 'mocked-env'; import { utils as clientUtils, errors as clientErrors } from '@/client'; import * as binUtils from '@/bin/utils'; -/** - * Mock prompts module which is used prompt for password - */ jest.mock('prompts'); const mockedPrompts = mocked(prompts); diff --git a/tests/bin/utils.ts b/tests/bin/utils.ts index 8b8bf115b..47211b018 100644 --- a/tests/bin/utils.ts +++ b/tests/bin/utils.ts @@ -9,13 +9,9 @@ import readline from 'readline'; import * as mockProcess from 'jest-mock-process'; import mockedEnv from 'mocked-env'; import nexpect from 'nexpect'; -import lock from 'fd-lock'; import Logger from '@matrixai/logger'; import main from '@/bin/polykey'; import * as binUtils from '@/bin/utils'; -import { Status, errors as statusErrors } from '@/status'; -import config from '@/config'; -import { never, sleep } from '@/utils'; /** * Runs pk command functionally @@ -43,6 +39,10 @@ async function pkStdio( }> { cwd = cwd ?? (await fs.promises.mkdtemp(path.join(os.tmpdir(), 'polykey-test-'))); + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; // Parse the arguments of process.stdout.write and process.stderr.write const parseArgs = (args) => { @@ -132,6 +132,10 @@ async function pkExec( ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), @@ -197,6 +201,10 @@ async function pkSpawn( ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), @@ -260,6 +268,10 @@ async function pkExpect({ ...process.env, ...env, }; + // Recall that we attempt to connect to all specified seed nodes on agent start. + // Therefore, for testing purposes only, we default the seed nodes as empty + // (if not defined in the env) to ensure no attempted connections. A regular + // PolykeyAgent is expected to initially connect to the mainnet seed nodes env['PK_SEED_NODES'] = env['PK_SEED_NODES'] ?? ''; const tsConfigPath = path.resolve( path.join(global.projectDir, 'tsconfig.json'), @@ -309,111 +321,6 @@ async function pkExpect({ }); } -/** - * Creates a PK agent running in the global path - * Use this in beforeAll, and use the result in afterAll - * Uses a references directory as a reference count - * Uses fd-lock to serialise access to the pkAgent - * This means all test modules using this will be serialised - * Any beforeAll must use global.maxTimeout - * Tips for usage: - * * Do not restart this global agent - * * Ensure client-side side-effects are removed at the end of each test - * * Ensure server-side side-effects are removed at the end of each test - */ -async function pkAgent( - args: Array = [], - env: Record = {}, -): Promise<() => Promise> { - // The references directory will act like our reference count - await fs.promises.mkdir(path.join(global.binAgentDir, 'references'), { - recursive: true, - }); - const reference = Math.floor(Math.random() * 1000).toString(); - // Plus 1 to the reference count - await fs.promises.writeFile( - path.join(global.binAgentDir, 'references', reference), - reference, - ); - // This lock ensures serialised usage of global pkAgent - // It is placed after reference counting - // Because multiple test processes will queue up references - const testLockPath = path.join(global.binAgentDir, 'test.lock'); - const testLockFile = await fs.promises.open( - testLockPath, - fs.constants.O_WRONLY | fs.constants.O_CREAT, - ); - while (!lock(testLockFile.fd)) { - await sleep(1000); - } - // Here the agent server is part of the jest process - const { exitCode, stderr } = await pkStdio( - [ - 'agent', - 'start', - // 1024 is the smallest size and is faster to start - '--root-key-pair-bits', - '1024', - ...args, - ], - { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, - ...env, - }, - global.binAgentDir, - ); - // If the status is locked, we can ignore the start call - if (exitCode !== 0) { - // Last line of STDERR - const stdErrLine = stderr.trim().split('\n').pop(); - const e = new statusErrors.ErrorStatusLocked(); - // Expected output for ErrorStatusLocked - const eOutput = binUtils - .outputFormatter({ - type: 'error', - name: e.name, - description: e.description, - message: e.message, - }) - .trim(); - if (exitCode !== e.exitCode || stdErrLine !== eOutput) { - never(); - } - } - return async () => { - await fs.promises.rm( - path.join(global.binAgentDir, 'references', reference), - ); - lock.unlock(testLockFile.fd); - await testLockFile.close(); - // If the pids directory is not empty, there are other processes still running - try { - await fs.promises.rmdir(path.join(global.binAgentDir, 'references')); - } catch (e) { - if (e.code === 'ENOTEMPTY') { - return; - } - throw e; - } - const status = new Status({ - statusPath: path.join(global.binAgentDir, config.defaults.statusBase), - fs, - }); - await pkStdio( - ['agent', 'stop'], - { - PK_NODE_PATH: global.binAgentDir, - PK_PASSWORD: global.binAgentPassword, - }, - global.binAgentDir, - ); - // `pk agent stop` is asynchronous, need to wait for it to be DEAD - // This also means STDERR from the stopping agent may appear on the test logs - await status.waitFor('DEAD'); - }; -} - /** * Waits for child process to exit * When process is terminated with signal @@ -457,7 +364,6 @@ export { pkExec, pkSpawn, pkExpect, - pkAgent, processExit, expectProcessError, }; diff --git a/tests/bin/vaults.test.ts b/tests/bin/vaults/vaults.test.ts similarity index 89% rename from tests/bin/vaults.test.ts rename to tests/bin/vaults/vaults.test.ts index 818e919a8..86cadb604 100644 --- a/tests/bin/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -7,7 +7,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; import { makeNodeId } from '@/nodes/utils'; import { makeVaultIdPretty } from '@/vaults/utils'; -import * as utils from './utils'; +import * as testBinUtils from '../utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -86,7 +86,7 @@ describe('CLI vaults', () => { vaultNumber = 0; // Authorize session - await utils.pkStdio( + await testBinUtils.pkStdio( ['agent', 'unlock', '-np', dataDir, '--password-file', passwordFile], {}, dataDir, @@ -111,16 +111,16 @@ describe('CLI vaults', () => { await polykeyAgent.vaultManager.createVault('Vault1' as VaultName); await polykeyAgent.vaultManager.createVault('Vault2' as VaultName); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); }); }); describe('commandCreateVaults', () => { test('should create vaults', async () => { command = ['vaults', 'create', '-np', dataDir, 'MyTestVault']; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const result2 = await utils.pkStdio( + const result2 = await testBinUtils.pkStdio( ['vaults', 'touch', '-np', dataDir, 'MyTestVault2'], {}, dataDir, @@ -143,7 +143,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); @@ -166,7 +166,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); // Exit code of the exception expect(result.exitCode).toBe(10); @@ -188,7 +188,7 @@ describe('CLI vaults', () => { id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result2 = await utils.pkStdio([...command], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); @@ -206,7 +206,7 @@ describe('CLI vaults', () => { const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); }); }); @@ -217,7 +217,7 @@ describe('CLI vaults', () => { const id = await polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); fail(); // FIXME methods not implemented. @@ -245,7 +245,7 @@ describe('CLI vaults', () => { // await polykeyAgent.vaults.setVaultPermissions(node2.id, id!); // await polykeyAgent.vaults.setVaultPermissions(node3.id, id!); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( // id!, @@ -272,7 +272,7 @@ describe('CLI vaults', () => { // await polykeyAgent.vaults.unsetVaultPermissions(node2.id, vault.vaultId); - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); }); }); @@ -306,16 +306,16 @@ describe('CLI vaults', () => { // ); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -337,7 +337,7 @@ describe('CLI vaults', () => { // Vault does not exist on the source PolykeyAgent so the pull command throws an error which // caught, the error is checked and if it is ErrorVaultUndefined, then the Agent attempts a // clone instead - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Const list = (await polykeyAgent.vaults.listVaults()).map( @@ -382,16 +382,16 @@ describe('CLI vaults', () => { // ); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -425,7 +425,7 @@ describe('CLI vaults', () => { targetNodeId, ]; - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); // Await expect(clonedVault.listSecrets()).resolves.toStrictEqual([ @@ -454,16 +454,16 @@ describe('CLI vaults', () => { }); const targetNodeId = targetPolykeyAgent.nodeManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.ingressHost; - const targetPort = targetPolykeyAgent.revProxy.ingressPort; + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); await polykeyAgent.nodeManager.setNode(targetNodeId, { host: targetHost, port: targetPort, }); // Client agent: Start sending hole-punching packets to the target await polykeyAgent.nodeManager.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.egressHost; - const clientEgressPort = polykeyAgent.fwdProxy.egressPort; + const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); + const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); // Server agent: start sending hole-punching packets back to the 'client' // agent (in order to establish a connection) await targetPolykeyAgent.nodeManager.openConnection( @@ -498,7 +498,7 @@ describe('CLI vaults', () => { '-ni', targetNodeId as string, ]; - const result = await utils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command]); expect(result.exitCode).toBe(0); await targetPolykeyAgent.stop(); @@ -526,7 +526,7 @@ describe('CLI vaults', () => { const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const fileContents = await vault.access(async (efs) => { @@ -553,12 +553,12 @@ describe('CLI vaults', () => { const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); const command2 = ['vaults', 'version', '-np', dataDir, vaultName, 'last']; - const result2 = await utils.pkStdio([...command2], {}, dataDir); + const result2 = await testBinUtils.pkStdio([...command2], {}, dataDir); expect(result2.exitCode).toBe(0); }); test('should handle invalid version IDs', async () => { @@ -575,7 +575,7 @@ describe('CLI vaults', () => { 'NOT_A_VALID_CHECKOUT_ID', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); expect(result.stderr).toContain('ErrorVaultCommitUndefined'); @@ -590,7 +590,7 @@ describe('CLI vaults', () => { 'NOT_A_VALID_CHECKOUT_ID', ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(10); expect(result.stderr).toContain('ErrorVaultUndefined'); }); @@ -629,7 +629,7 @@ describe('CLI vaults', () => { test('Should get all commits', async () => { const command = ['vaults', 'log', '-np', dataDir, vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); @@ -638,7 +638,7 @@ describe('CLI vaults', () => { test('should get a part of the log', async () => { const command = ['vaults', 'log', '-np', dataDir, '-d', '2', vaultName]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).not.toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); @@ -657,7 +657,7 @@ describe('CLI vaults', () => { commit2Oid, ]; - const result = await utils.pkStdio([...command], {}, dataDir); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); expect(result.stdout).not.toContain(commit1Oid); expect(result.stdout).toContain(commit2Oid); diff --git a/tests/bootstrap/bootstrap.test.ts b/tests/bootstrap/bootstrap.test.ts deleted file mode 100644 index 7bad74e73..000000000 --- a/tests/bootstrap/bootstrap.test.ts +++ /dev/null @@ -1,81 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import PolykeyAgent from '@/PolykeyAgent'; -import * as bootstrapUtils from '@/bootstrap/utils'; -import { Status } from '@/status'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('Bootstrap', () => { - const logger = new Logger('AgentServerTest', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let nodePath: string; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'status-test-')); - nodePath = path.join(dataDir, 'Node'); - await fs.promises.mkdir(nodePath); - }); - afterEach(async () => { - await fs.promises.rm(nodePath, { - force: true, - recursive: true, - }); - }); - - describe('BootstrapPolykeyState', () => { - const password = 'password123'; - test( - 'should create state if no directory', - async () => { - // Await fs.promises.rmdir(nodePath); - await bootstrapUtils.bootstrapState({ nodePath, password, logger }); - // Should have keynode state; - }, - global.polykeyStartupTimeout * 4, - ); - - test('should create state if empty directory', async () => { - await bootstrapUtils.bootstrapState({ - nodePath, - password, - logger, - }); - }); - - test( - 'should be able to start agent on created state.', - async () => { - await bootstrapUtils.bootstrapState({ - nodePath, - password, - logger, - }); - const polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath, - logger, - }); - const statusPath = path.join(nodePath, 'status.json'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('LIVE', 10000); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - await status.waitFor('DEAD', 10000); - }, - global.polykeyStartupTimeout * 2, - ); - }); -}); diff --git a/tests/bootstrap/utils.test.ts b/tests/bootstrap/utils.test.ts new file mode 100644 index 000000000..7c4adbe53 --- /dev/null +++ b/tests/bootstrap/utils.test.ts @@ -0,0 +1,169 @@ +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import * as bootstrapUtils from '@/bootstrap/utils'; +import * as bootstrapErrors from '@/bootstrap/errors'; +import { utils as keysUtils } from '@/keys'; +import { errors as statusErrors } from '@/status'; +import config from '@/config'; +import * as testUtils from '../utils'; + +describe('bootstrap/utils', () => { + const logger = new Logger('bootstrap/utils test', LogLevel.WARN, [ + new StreamHandler(), + ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); + let dataDir: string; + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + }); + afterEach(async () => { + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test('bootstraps new node path', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const password = 'password'; + const recoveryCode = await bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger, + }); + expect(typeof recoveryCode).toBe('string'); + expect( + recoveryCode.split(' ').length === 12 || + recoveryCode.split(' ').length === 24, + ).toBe(true); + const nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents.length > 0).toBe(true); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + const stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase), + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + }); + test('bootstraps existing but empty node path', async () => { + const nodePath = path.join(dataDir, 'polykey'); + await fs.promises.mkdir(nodePath); + const password = 'password'; + const recoveryCode = await bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger, + }); + expect(typeof recoveryCode).toBe('string'); + expect( + recoveryCode.split(' ').length === 12 || + recoveryCode.split(' ').length === 24, + ).toBe(true); + const nodePathContents = await fs.promises.readdir(nodePath); + expect(nodePathContents.length > 0).toBe(true); + expect(nodePathContents).toContain(config.defaults.statusBase); + expect(nodePathContents).toContain(config.defaults.stateBase); + const stateContents = await fs.promises.readdir( + path.join(nodePath, config.defaults.stateBase), + ); + expect(stateContents).toContain(config.defaults.keysBase); + expect(stateContents).toContain(config.defaults.dbBase); + expect(stateContents).toContain(config.defaults.vaultsBase); + }); + test('bootstrap fails if non-empty node path', async () => { + // Normal file + const nodePath1 = path.join(dataDir, 'polykey1'); + await fs.promises.mkdir(nodePath1); + await fs.promises.writeFile( + path.join(nodePath1, 'random'), + 'normal file', + 'utf-8', + ); + const password = 'password'; + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath1, + fs, + logger, + }), + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + // Hidden file + const nodePath2 = path.join(dataDir, 'polykey2'); + await fs.promises.mkdir(nodePath2); + await fs.promises.writeFile( + path.join(nodePath2, '.random'), + 'hidden file', + 'utf-8', + ); + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath2, + fs, + logger, + }), + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + // Directory + const nodePath3 = path.join(dataDir, 'polykey3'); + await fs.promises.mkdir(nodePath3); + await fs.promises.mkdir(path.join(nodePath3, 'random')); + await expect( + bootstrapUtils.bootstrapState({ + password, + nodePath: nodePath3, + fs, + logger, + }), + ).rejects.toThrowError(bootstrapErrors.ErrorBootstrapExistingState); + }); + test('concurrent bootstrapping results in 1 success', async () => { + const nodePath = path.join(dataDir, 'polykey'); + const password = 'password'; + const [result1, result2] = await Promise.allSettled([ + bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger, + }), + bootstrapUtils.bootstrapState({ + password, + nodePath, + fs, + logger, + }), + ]); + expect( + (result1.status === 'rejected' && + result1.reason instanceof statusErrors.ErrorStatusLocked) || + (result2.status === 'rejected' && + result2.reason instanceof statusErrors.ErrorStatusLocked), + ).toBe(true); + expect( + (result1.status === 'fulfilled' && typeof result1.value === 'string') || + (result2.status === 'fulfilled' && typeof result2.value === 'string'), + ).toBe(true); + }); +}); diff --git a/tests/claims/utils.test.ts b/tests/claims/utils.test.ts index fccd00136..f00a32bfb 100644 --- a/tests/claims/utils.test.ts +++ b/tests/claims/utils.test.ts @@ -3,59 +3,24 @@ import type { PrivateKeyPem, PublicKeyPem } from '@/keys/types'; import type { IdentityId, ProviderId } from '@/identities/types'; import type { NodeId } from '@/nodes/types'; import type { Claim } from '@/claims/types'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; import { createPublicKey, createPrivateKey } from 'crypto'; import { generalVerify, GeneralSign } from 'jose'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import canonicalize from 'canonicalize'; -import { KeyManager } from '@/keys'; import { sleep } from '@/utils'; - import * as claimsUtils from '@/claims/utils'; import * as claimsErrors from '@/claims/errors'; import * as keysUtils from '@/keys/utils'; +import * as testUtils from '../utils'; -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -describe('Claims utils', () => { - const password = 'password'; - const logger = new Logger('Claims Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let dataDir: string; - let keyManager: KeyManager; +describe('claims/utils', () => { let publicKey: PublicKeyPem; let privateKey: PrivateKeyPem; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const keysPath = `${dataDir}/keys`; - keyManager = await KeyManager.createKeyManager({ - password, - keysPath, - logger, - }); - publicKey = keyManager.getRootKeyPairPem().publicKey; - privateKey = keyManager.getRootKeyPairPem().privateKey; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + const globalKeyPairPem = keysUtils.keyPairToPem(globalKeyPair); + publicKey = globalKeyPairPem.publicKey; + privateKey = globalKeyPairPem.privateKey; }); - afterEach(async () => { - await keyManager.stop(); - await keyManager.destroy(); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - test('creates a claim (both node and identity)', async () => { const nodeClaim = await claimsUtils.createClaim({ privateKey, @@ -348,7 +313,7 @@ describe('Claims utils', () => { expect(await claimsUtils.verifyClaimSignature(claim, publicKey)).toBe(true); // Create some dummy public key, and check that this does not verify - const dummyKeyPair = await keysUtils.generateKeyPair(4096); + const dummyKeyPair = await keysUtils.generateKeyPair(2048); const dummyPublicKey = await keysUtils.publicKeyToPem( dummyKeyPair.publicKey, ); diff --git a/tests/client/GRPCClientClient.test.ts b/tests/client/GRPCClientClient.test.ts index f32216641..1bd989c18 100644 --- a/tests/client/GRPCClientClient.test.ts +++ b/tests/client/GRPCClientClient.test.ts @@ -6,64 +6,71 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { GRPCClientClient } from '@/client'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import { PolykeyAgent } from '@'; +import { utils as keysUtils } from '@/keys'; import { Status } from '@/status'; -import * as binProcessors from '@/bin/utils/processors'; import { Session } from '@/sessions'; -import config from '@/config'; import { errors as clientErrors } from '@/client'; -import * as testUtils from './utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import config from '@/config'; +import * as binProcessors from '@/bin/utils/processors'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as testClientUtils from './utils'; +import * as testUtils from '../utils'; -describe('GRPCClientClient', () => { +describe(GRPCClientClient.name, () => { const password = 'password'; - const logger = new Logger('GRPCClientClientTest', LogLevel.WARN, [ + const logger = new Logger(`${GRPCClientClient.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let client: GRPCClientClient; let server: grpc.Server; let port: number; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let dataDir: string; let nodePath: string; let nodeId: NodeId; let session: Session; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); nodePath = path.join(dataDir, 'node'); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath, logger: logger, }); - nodeId = polykeyAgent.nodeManager.getNodeId(); - [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + nodeId = pkAgent.nodeManager.getNodeId(); + [server, port] = await testClientUtils.openTestClientServer({ + pkAgent, }); const sessionTokenPath = path.join(nodePath, 'sessionToken'); const session = new Session({ sessionTokenPath, fs, logger }); - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); await session.start({ sessionToken, }); - }, global.polykeyStartupTimeout); + }); afterAll(async () => { await client.destroy(); - await testUtils.closeTestClientServer(server); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await testClientUtils.closeTestClientServer(server); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('cannot be called when destroyed', async () => { client = await GRPCClientClient.createGRPCClientClient({ @@ -97,6 +104,7 @@ describe('GRPCClientClient', () => { ); const status = new Status({ statusPath: path.join(nodePath, config.defaults.statusBase), + statusLockPath: path.join(nodePath, config.defaults.statusLockBase), fs, logger, }); diff --git a/tests/client/rpcAgent.test.ts b/tests/client/rpcAgent.test.ts deleted file mode 100644 index 68a623f5f..000000000 --- a/tests/client/rpcAgent.test.ts +++ /dev/null @@ -1,123 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { PolykeyAgent } from '@'; -import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import { KeyManager } from '@/keys'; -import { ForwardProxy } from '@/network'; -import * as grpcUtils from '@/grpc/utils'; -import { Status } from '@/status'; -import * as testUtils from './utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); - -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ -describe('Agent client service', () => { - const password = 'password'; - const logger = new Logger('AgentClientServerTest', LogLevel.WARN, [ - new StreamHandler(), - ]); - let client: ClientServiceClient; - let server: grpc.Server; - let port: number; - let dataDir: string; - let polykeyAgent: PolykeyAgent; - let keyManager: KeyManager; - let passwordFile: string; - let callCredentials: grpc.Metadata; - - beforeAll(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - - passwordFile = path.join(dataDir, 'password'); - await fs.promises.writeFile(passwordFile, 'password'); - const keysPath = path.join(dataDir, 'keys'); - - keyManager = await KeyManager.createKeyManager({ - keysPath, - password, - logger, - }); - - const fwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, - }); - - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir, - logger, - fwdProxy, - keyManager, - }); - - [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, - secure: false, - }); - - client = await testUtils.openSimpleClientClient(port); - }, global.polykeyStartupTimeout); - afterAll(async () => { - await testUtils.closeTestClientServer(server); - testUtils.closeSimpleClientClient(client); - - await polykeyAgent.stop(); - await polykeyAgent.destroy(); - - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - await fs.promises.rm(passwordFile); - }); - beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); - callCredentials = testUtils.createCallCredentials(sessionToken); - }); - - test( - 'stopping the agent', - async () => { - const agentStop = grpcUtils.promisifyUnaryCall( - client, - client.agentStop, - ); - - const emptyMessage = new utilsPB.EmptyMessage(); - await agentStop(emptyMessage, callCredentials); - - const statusPath = path.join(polykeyAgent.nodePath, 'status'); - const status = new Status({ - statusPath, - fs, - logger, - }); - await status.waitFor('DEAD', 10000); - }, - global.polykeyStartupTimeout * 2, - ); -}); diff --git a/tests/client/rpcGestalts.test.ts b/tests/client/rpcGestalts.test.ts index 3fd020e88..72e6fe96d 100644 --- a/tests/client/rpcGestalts.test.ts +++ b/tests/client/rpcGestalts.test.ts @@ -24,26 +24,12 @@ import * as nodesUtils from '@/nodes/utils'; import * as testUtils from './utils'; import TestProvider from '../identities/TestProvider'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Client service', () => { const password = 'password'; const logger = new Logger('ClientServerTest', LogLevel.WARN, [ @@ -53,7 +39,7 @@ describe('Client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let gestaltGraph: GestaltGraph; @@ -110,7 +96,7 @@ describe('Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -118,16 +104,16 @@ describe('Client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; - gestaltGraph = polykeyAgent.gestaltGraph; - identitiesManager = polykeyAgent.identitiesManager; + nodeManager = pkAgent.nodeManager; + gestaltGraph = pkAgent.gestaltGraph; + identitiesManager = pkAgent.identitiesManager; // Adding provider const testProvider = new TestProvider(); identitiesManager.registerProvider(testProvider); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -142,8 +128,8 @@ describe('Client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -152,7 +138,7 @@ describe('Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); afterEach(async () => { diff --git a/tests/client/rpcIdentities.test.ts b/tests/client/rpcIdentities.test.ts index 47da0d94c..b31cb5e1a 100644 --- a/tests/client/rpcIdentities.test.ts +++ b/tests/client/rpcIdentities.test.ts @@ -15,26 +15,12 @@ import * as grpcUtils from '@/grpc/utils'; import * as testUtils from './utils'; import TestProvider from '../identities/TestProvider'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Identities Client service', () => { const password = 'password'; const logger = new Logger('IdentitiesClientServerTest', LogLevel.WARN, [ @@ -44,7 +30,7 @@ describe('Identities Client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let identitiesManager: IdentitiesManager; let passwordFile: string; @@ -79,7 +65,7 @@ describe('Identities Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -87,14 +73,14 @@ describe('Identities Client service', () => { keyManager, }); - identitiesManager = polykeyAgent.identitiesManager; + identitiesManager = pkAgent.identitiesManager; // Adding provider. testProvider = new TestProvider(); identitiesManager.registerProvider(testProvider); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -104,8 +90,8 @@ describe('Identities Client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -114,7 +100,7 @@ describe('Identities Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); @@ -153,13 +139,13 @@ describe('Identities Client service', () => { step++; } expect( - await polykeyAgent.identitiesManager.getToken( + await pkAgent.identitiesManager.getToken( testToken.providerId, testToken.identityId, ), ).toEqual(testToken.tokenData); expect(genReadable.stream.destroyed).toBeTruthy(); - await polykeyAgent.identitiesManager.delToken( + await pkAgent.identitiesManager.delToken( testToken.providerId, testToken.identityId, ); @@ -293,9 +279,9 @@ describe('Identities Client service', () => { expect(claim.payload.data.type).toBe('identity'); expect(claim.payload.data.provider).toBe(testToken.providerId); expect(claim.payload.data.identity).toBe(testToken.identityId); - expect(claim.payload.data.node).toBe(polykeyAgent.nodeManager.getNodeId()); + expect(claim.payload.data.node).toBe(pkAgent.nodeManager.getNodeId()); - await polykeyAgent.identitiesManager.delToken( + await pkAgent.identitiesManager.delToken( testToken.providerId, testToken.identityId, ); diff --git a/tests/client/rpcKeys.test.ts b/tests/client/rpcKeys.test.ts index 14a926fba..b48c41832 100644 --- a/tests/client/rpcKeys.test.ts +++ b/tests/client/rpcKeys.test.ts @@ -14,26 +14,12 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Keys client service', () => { const password = 'password'; const logger = new Logger('KeysClientServerTest', LogLevel.WARN, [ @@ -44,7 +30,7 @@ describe('Keys client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let passwordFile: string; @@ -70,7 +56,7 @@ describe('Keys client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -78,10 +64,10 @@ describe('Keys client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -91,8 +77,8 @@ describe('Keys client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -101,7 +87,7 @@ describe('Keys client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); @@ -133,11 +119,11 @@ describe('Keys client service', () => { const keyPair = keyManager.getRootKeyPairPem(); const nodeId1 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig1 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig1 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig1 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig1 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig1 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig1 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig1: TLSConfig = { keyPrivatePem: keyPair.privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -153,11 +139,11 @@ describe('Keys client service', () => { const key = await getRootKeyPair(emptyMessage, callCredentials); const nodeId2 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig2 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig2 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig2 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig2 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig2 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig2 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig2: TLSConfig = { keyPrivatePem: key.getPrivate(), certChainPem: await keyManager.getRootCertChainPem(), @@ -182,11 +168,11 @@ describe('Keys client service', () => { const rootKeyPair1 = keyManager.getRootKeyPairPem(); const nodeId1 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig1 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig1 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig1 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig1 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig1 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig1 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig1: TLSConfig = { keyPrivatePem: rootKeyPair1.privateKey, certChainPem: await keyManager.getRootCertChainPem(), @@ -200,11 +186,11 @@ describe('Keys client service', () => { const rootKeyPair2 = keyManager.getRootKeyPairPem(); const nodeId2 = nodeManager.getNodeId(); // @ts-ignore - get protected property - const fwdTLSConfig2 = polykeyAgent.fwdProxy.tlsConfig; + const fwdTLSConfig2 = pkAgent.fwdProxy.tlsConfig; // @ts-ignore - get protected property - const revTLSConfig2 = polykeyAgent.revProxy.tlsConfig; + const revTLSConfig2 = pkAgent.revProxy.tlsConfig; // @ts-ignore - get protected property - const serverTLSConfig2 = polykeyAgent.grpcServerClient.tlsConfig; + const serverTLSConfig2 = pkAgent.grpcServerClient.tlsConfig; const expectedTLSConfig2: TLSConfig = { keyPrivatePem: rootKeyPair2.privateKey, certChainPem: await keyManager.getRootCertChainPem(), diff --git a/tests/client/rpcNodes.test.ts b/tests/client/rpcNodes.test.ts index 13769ec6b..833892a13 100644 --- a/tests/client/rpcNodes.test.ts +++ b/tests/client/rpcNodes.test.ts @@ -17,42 +17,31 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as nodesErrors from '@/nodes/errors'; import { makeNodeId } from '@/nodes/utils'; +import config from '@/config'; import { Status } from '@/status'; +import { sleep } from '@/utils'; import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; +import * as testNodesUtils from '../nodes/utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Client service', () => { const password = 'password'; - const logger = new Logger('rpcNodes Test', LogLevel.WARN, [ + const logger = new Logger('rpcNodes Test', LogLevel.DEBUG, [ new StreamHandler(), ]); let client: ClientServiceClient; let server: grpc.Server; let port: number; + let rootDataDir: string; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let polykeyServer: PolykeyAgent; @@ -70,6 +59,10 @@ describe('Client service', () => { ); beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -89,27 +82,36 @@ describe('Client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, fwdProxy, keyManager, + forwardProxyConfig: { + connTimeoutTime: 2000, + }, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); client = await testUtils.openSimpleClientClient(port); - polykeyServer = await testKeynodeUtils.setupRemoteKeynode({ - logger: logger, + polykeyServer = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'polykeyServer'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, }); - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -123,14 +125,18 @@ describe('Client service', () => { }); }, global.polykeyStartupTimeout); afterAll(async () => { - await testKeynodeUtils.cleanupRemoteKeynode(polykeyServer); + await polykeyServer.stop(); await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); await fs.promises.rm(dataDir, { force: true, recursive: true, @@ -138,18 +144,18 @@ describe('Client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); - await polykeyAgent.notificationsManager.clearNotifications(); + await pkAgent.notificationsManager.clearNotifications(); await polykeyServer.notificationsManager.clearNotifications(); }); afterEach(async () => { await polykeyServer.start({ password }); - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: {}, vaults: {}, }); - await polykeyAgent.nodeManager.clearDB(); + await pkAgent.nodeManager.clearDB(); await polykeyServer.nodeManager.clearDB(); }); @@ -178,11 +184,19 @@ describe('Client service', () => { 'should ping a node (online + offline)', async () => { const serverNodeId = polykeyServer.nodeManager.getNodeId(); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); await polykeyServer.stop(); - const statusPath = path.join(polykeyServer.nodePath, 'status.json'); + const statusPath = path.join( + polykeyServer.nodePath, + config.defaults.statusBase, + ); + const statusLockPath = path.join( + polykeyServer.nodePath, + config.defaults.statusLockBase, + ); const status = new Status({ statusPath, + statusLockPath, fs, logger, }); @@ -202,12 +216,14 @@ describe('Client service', () => { await polykeyServer.start({ password: 'password' }); await status.waitFor('LIVE', 10000); // Update the details (changed because we started again) - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); const res2 = await nodesPing(nodeMessage, callCredentials); expect(res2.getSuccess()).toEqual(true); // Case 3: pre-existing connection no longer active, so offline await polykeyServer.stop(); await status.waitFor('DEAD', 10000); + // Currently need this timeout - also set COnnectionForward setTImeout to 1000 + await sleep(3000); const res3 = await nodesPing(nodeMessage, callCredentials); expect(res3.getSuccess()).toEqual(false); }, @@ -236,7 +252,7 @@ describe('Client service', () => { test( 'should find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Case 2: node can be found on the remote node const nodeId = nodeId1; const nodeAddress: NodeAddress = { @@ -261,7 +277,7 @@ describe('Client service', () => { test( 'should fail to find a node (contacts remote node)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Case 3: node exhausts all contacts and cannot find node const nodeId = nodeId1; // Add a single dummy node to the server node graph database @@ -285,7 +301,7 @@ describe('Client service', () => { global.failedConnectionTimeout * 2, ); test('should send a gestalt invite (no existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); // Node Claim Case 1: No invitations have been received const nodesClaim = grpcUtils.promisifyUnaryCall( client, @@ -299,8 +315,8 @@ describe('Client service', () => { expect(res.getSuccess()).not.toBeTruthy(); }); test('should send a gestalt invite (existing invitation)', async () => { - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, polykeyAgent); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(polykeyServer, pkAgent); // Node Claim Case 2: Already received an invite; force invite await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), @@ -320,7 +336,7 @@ describe('Client service', () => { expect(res.getSuccess()).not.toBeTruthy(); }); test('should claim node', async () => { - await polykeyAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(polykeyServer.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -332,8 +348,8 @@ describe('Client service', () => { }, vaults: {}, }); - await testKeynodeUtils.addRemoteDetails(polykeyAgent, polykeyServer); - await testKeynodeUtils.addRemoteDetails(polykeyServer, polykeyAgent); + await testNodesUtils.nodesConnect(pkAgent, polykeyServer); + await testNodesUtils.nodesConnect(polykeyServer, pkAgent); // Node Claim Case 3: Already received an invite; claim node await polykeyServer.notificationsManager.sendNotification( nodeManager.getNodeId(), diff --git a/tests/client/rpcNotifications.test.ts b/tests/client/rpcNotifications.test.ts index ce9187334..9d85038c9 100644 --- a/tests/client/rpcNotifications.test.ts +++ b/tests/client/rpcNotifications.test.ts @@ -1,5 +1,5 @@ import type * as grpc from '@grpc/grpc-js'; -import type { NodeInfo } from '@/nodes/types'; +import type { NodeInfo, NodeAddress } from '@/nodes/types'; import type { NodeManager } from '@/nodes'; import type { NotificationData } from '@/notifications/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; @@ -8,7 +8,6 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { utils as idUtils } from '@matrixai/id'; - import { PolykeyAgent } from '@'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as notificationsPB from '@/proto/js/polykey/v1/notifications/notifications_pb'; @@ -17,28 +16,14 @@ import { ForwardProxy } from '@/network'; import * as grpcUtils from '@/grpc/utils'; import * as vaultsUtils from '@/vaults/utils'; import * as testUtils from './utils'; -import * as testKeynodeUtils from '../utils'; +import * as testNodesUtils from '../nodes/utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Notifications client service', () => { const password = 'password'; const logger = new Logger('NotificationsClientServerTest', LogLevel.WARN, [ @@ -48,7 +33,7 @@ describe('Notifications client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let nodeManager: NodeManager; let passwordFile: string; @@ -75,7 +60,7 @@ describe('Notifications client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -83,10 +68,10 @@ describe('Notifications client service', () => { keyManager, }); - nodeManager = polykeyAgent.nodeManager; + nodeManager = pkAgent.nodeManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -101,8 +86,8 @@ describe('Notifications client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -111,28 +96,49 @@ describe('Notifications client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); describe('Notifications RPC', () => { + let receiverDataDir: string; + let senderDataDir: string; let receiver: PolykeyAgent; let sender: PolykeyAgent; beforeAll(async () => { - receiver = await testKeynodeUtils.setupRemoteKeynode({ logger }); - sender = await testKeynodeUtils.setupRemoteKeynode({ logger }); - - await sender.nodeManager.setNode(node1.id, { - host: polykeyAgent.revProxy.ingressHost, - port: polykeyAgent.revProxy.ingressPort, + receiverDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + senderDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + receiver = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: receiverDataDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, }); + sender = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: senderDataDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + await sender.nodeManager.setNode(node1.id, { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + } as NodeAddress); await receiver.acl.setNodePerm(node1.id, { gestalt: { notify: null, }, vaults: {}, }); - await polykeyAgent.acl.setNodePerm(sender.nodeManager.getNodeId(), { + await pkAgent.acl.setNodePerm(sender.nodeManager.getNodeId(), { gestalt: { notify: null, }, @@ -140,17 +146,25 @@ describe('Notifications client service', () => { }); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - await testKeynodeUtils.cleanupRemoteKeynode(receiver); - await testKeynodeUtils.cleanupRemoteKeynode(sender); + await sender.stop(); + await receiver.stop(); + await fs.promises.rm(senderDataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(receiverDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { await receiver.notificationsManager.clearNotifications(); await sender.notificationsManager.clearNotifications(); - await polykeyAgent.notificationsManager.clearNotifications(); + await pkAgent.notificationsManager.clearNotifications(); }); test('should send notifications.', async () => { // Set up a remote node receiver and add its details to agent - await testKeynodeUtils.addRemoteDetails(polykeyAgent, receiver); + await testNodesUtils.nodesConnect(pkAgent, receiver); const notificationsSend = grpcUtils.promisifyUnaryCall( @@ -171,7 +185,7 @@ describe('Notifications client service', () => { type: 'General', message: 'msg', }); - expect(notifs[0].senderId).toEqual(polykeyAgent.nodeManager.getNodeId()); + expect(notifs[0].senderId).toEqual(pkAgent.nodeManager.getNodeId()); expect(notifs[0].isRead).toBeTruthy(); }); test('should read all notifications.', async () => { @@ -292,8 +306,7 @@ describe('Notifications client service', () => { await notificationsClear(emptyMessage, callCredentials); // Call read notifications to check there are none - const notifs = - await polykeyAgent.notificationsManager.readNotifications(); + const notifs = await pkAgent.notificationsManager.readNotifications(); expect(notifs).toEqual([]); }); }); diff --git a/tests/client/rpcSessions.test.ts b/tests/client/rpcSessions.test.ts index 581e93725..89240d8cb 100644 --- a/tests/client/rpcSessions.test.ts +++ b/tests/client/rpcSessions.test.ts @@ -12,7 +12,6 @@ import * as grpcUtils from '@/grpc/utils'; import * as clientUtils from '@/client/utils'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: @@ -28,7 +27,7 @@ describe('Sessions client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let passwordFile: string; let callCredentials: grpc.Metadata; @@ -53,7 +52,7 @@ describe('Sessions client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -62,7 +61,7 @@ describe('Sessions client service', () => { }); [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -72,8 +71,8 @@ describe('Sessions client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -82,30 +81,30 @@ describe('Sessions client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); test('can request a session', async () => { const unlock = grpcUtils.promisifyUnaryCall( client, - client.sessionsUnlock, + client.agentUnlock, ); const pCall = unlock(new utilsPB.EmptyMessage(), callCredentials); const meta = await pCall.meta; const token = clientUtils.decodeAuthToSession(meta); - const result = await polykeyAgent.sessionManager.verifyToken(token!); + const result = await pkAgent.sessionManager.verifyToken(token!); expect(result).toBeTruthy(); }); test('can lock all sessions', async () => { const lockall = grpcUtils.promisifyUnaryCall( client, - client.sessionsLockAll, + client.agentLockAll, ); await lockall(new utilsPB.EmptyMessage(), callCredentials); const prevToken = clientUtils.decodeAuthToSession(callCredentials); - const result = await polykeyAgent.sessionManager.verifyToken(prevToken!); + const result = await pkAgent.sessionManager.verifyToken(prevToken!); expect(result).toBeFalsy(); }); }); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 9c1c28542..5b051f25f 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -18,26 +18,12 @@ import * as vaultsUtils from '@/vaults/utils'; import { vaultOps } from '@/vaults'; import * as testUtils from './utils'; -// Mocks. jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), generateDeterministicKeyPair: jest.requireActual('@/keys/utils').generateKeyPair, })); -/** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. - * Setting up the PolykeyAgent has been done in a beforeAll block. - * Keep this in mind when adding or editing tests. - * Any side effects need to be undone when the test has completed. - * Preferably within a `afterEach()` since any cleanup will be skipped inside a failing test. - * - * - left over state can cause a test to fail in certain cases. - * - left over state can cause similar tests to succeed when they should fail. - * - starting or stopping the agent within tests should be done on a new instance of the polykey agent. - * - when in doubt test each modified or added test on it's own as well as the whole file. - * - Looking into adding a way to safely clear each domain's DB information with out breaking modules. - */ describe('Vaults client service', () => { const password = 'password'; const logger = new Logger('VaultsClientServerTest', LogLevel.WARN, [ @@ -55,7 +41,7 @@ describe('Vaults client service', () => { let server: grpc.Server; let port: number; let dataDir: string; - let polykeyAgent: PolykeyAgent; + let pkAgent: PolykeyAgent; let keyManager: KeyManager; let vaultManager: VaultManager; let passwordFile: string; @@ -81,7 +67,7 @@ describe('Vaults client service', () => { logger: logger, }); - polykeyAgent = await PolykeyAgent.createPolykeyAgent({ + pkAgent = await PolykeyAgent.createPolykeyAgent({ password, nodePath: dataDir, logger, @@ -89,10 +75,10 @@ describe('Vaults client service', () => { keyManager, }); - vaultManager = polykeyAgent.vaultManager; + vaultManager = pkAgent.vaultManager; [server, port] = await testUtils.openTestClientServer({ - polykeyAgent, + pkAgent, secure: false, }); @@ -102,8 +88,8 @@ describe('Vaults client service', () => { await testUtils.closeTestClientServer(server); testUtils.closeSimpleClientClient(client); - await polykeyAgent.stop(); - await polykeyAgent.destroy(); + await pkAgent.stop(); + await pkAgent.destroy(); await fs.promises.rm(dataDir, { force: true, @@ -112,7 +98,7 @@ describe('Vaults client service', () => { await fs.promises.rm(passwordFile); }); beforeEach(async () => { - const sessionToken = await polykeyAgent.sessionManager.createToken(); + const sessionToken = await pkAgent.sessionManager.createToken(); callCredentials = testUtils.createCallCredentials(sessionToken); }); afterEach(async () => { diff --git a/tests/client/service/agentStop.test.ts b/tests/client/service/agentStop.test.ts new file mode 100644 index 000000000..c93b11e37 --- /dev/null +++ b/tests/client/service/agentStop.test.ts @@ -0,0 +1,174 @@ +import type { Host, Port } from '@/network/types'; +import type { SessionToken } from '@/sessions/types'; +import fs from 'fs'; +import path from 'path'; +import os from 'os'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { running } from '@matrixai/async-init'; +import { PolykeyAgent } from '@'; +import { utils as keysUtils } from '@/keys'; +import { GRPCServer } from '@/grpc'; +import { Status } from '@/status'; +import { + GRPCClientClient, + ClientServiceService, + utils as clientUtils, + errors as clientErrors, +} from '@/client'; +import agentStop from '@/client/service/agentStop'; +import config from '@/config'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import * as testUtils from '../../utils'; + +describe('agentStop', () => { + const logger = new Logger('agentStop test', LogLevel.WARN, [ + new StreamHandler(), + ]); + const password = 'helloworld'; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); + let dataDir: string; + let nodePath: string; + let pkAgent: PolykeyAgent; + let grpcServer: GRPCServer; + let grpcClient: GRPCClientClient; + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + nodePath = path.join(dataDir, 'polykey'); + // Note that by doing this, the agent the call is stopping is a separate agent + pkAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath, + logger, + }); + const authenticate = clientUtils.authenticator( + pkAgent.sessionManager, + pkAgent.keyManager, + ); + const clientService = { + agentStop: agentStop({ + authenticate, + pkAgent: pkAgent as unknown as PolykeyAgent, + }), + }; + grpcServer = new GRPCServer({ logger }); + await grpcServer.start({ + services: [[ClientServiceService, clientService]], + host: '127.0.0.1' as Host, + port: 0 as Port, + }); + grpcClient = await GRPCClientClient.createGRPCClientClient({ + nodeId: pkAgent.keyManager.getNodeId(), + host: '127.0.0.1' as Host, + port: grpcServer.port, + logger, + }); + }); + afterEach(async () => { + await grpcClient.destroy(); + await grpcServer.stop(); + await pkAgent.stop(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); + }); + test('stops the agent with password', async () => { + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + const response = await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromPassword(password), + ); + expect(response).toBeInstanceOf(utilsPB.EmptyMessage); + // While the `agentStop` is asynchronous + // There is a synchronous switch to `running` + expect(pkAgent[running]).toBe(false); + // It may already be stopping + expect(await status.readStatus()).toMatchObject({ + status: expect.stringMatching(/LIVE|STOPPING|DEAD/), + }); + await status.waitFor('DEAD'); + expect(pkAgent[running]).toBe(false); + }); + test('stops the agent with token', async () => { + const token = await pkAgent.sessionManager.createToken(); + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + const response = await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromSession(token), + ); + expect(response).toBeInstanceOf(utilsPB.EmptyMessage); + // While the `agentStop` is asynchronous + // There is a synchronous switch to `running` + expect(pkAgent[running]).toBe(false); + // It may already be stopping + expect(await status.readStatus()).toMatchObject({ + status: expect.stringMatching(/LIVE|STOPPING|DEAD/), + }); + await status.waitFor('DEAD'); + expect(pkAgent[running]).toBe(false); + }); + test('cannot stop the agent if not authenticated', async () => { + const statusPath = path.join(nodePath, config.defaults.statusBase); + const statusLockPath = path.join(nodePath, config.defaults.statusLockBase); + const status = new Status({ + statusPath, + statusLockPath, + fs, + logger, + }); + const request = new utilsPB.EmptyMessage(); + await expect(async () => { + await grpcClient.agentStop(request); + }).rejects.toThrow(clientErrors.ErrorClientAuthMissing); + expect(pkAgent[running]).toBe(true); + await expect(async () => { + await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromPassword('wrong password'), + ); + }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); + expect(pkAgent[running]).toBe(true); + await expect(async () => { + await grpcClient.agentStop( + request, + clientUtils.encodeAuthFromSession('wrong token' as SessionToken), + ); + }).rejects.toThrow(clientErrors.ErrorClientAuthDenied); + expect(pkAgent[running]).toBe(true); + expect(await status.readStatus()).toMatchObject({ + status: 'LIVE', + }); + }); +}); diff --git a/tests/client/utils.ts b/tests/client/utils.ts index 7f7181daa..5ba0079cd 100644 --- a/tests/client/utils.ts +++ b/tests/client/utils.ts @@ -16,35 +16,35 @@ import { promisify } from '@/utils'; import * as grpcUtils from '@/grpc/utils'; async function openTestClientServer({ - polykeyAgent, + pkAgent, secure, }: { - polykeyAgent: PolykeyAgent; + pkAgent: PolykeyAgent; secure?: boolean; }) { const _secure = secure ?? true; const clientService: IClientServiceServer = createClientService({ - polykeyAgent, - keyManager: polykeyAgent.keyManager, - vaultManager: polykeyAgent.vaultManager, - nodeManager: polykeyAgent.nodeManager, - identitiesManager: polykeyAgent.identitiesManager, - gestaltGraph: polykeyAgent.gestaltGraph, - sessionManager: polykeyAgent.sessionManager, - notificationsManager: polykeyAgent.notificationsManager, - discovery: polykeyAgent.discovery, - sigchain: polykeyAgent.sigchain, - fwdProxy: polykeyAgent.fwdProxy, - revProxy: polykeyAgent.revProxy, - grpcServerClient: polykeyAgent.grpcServerClient, - grpcServerAgent: polykeyAgent.grpcServerAgent, - fs: polykeyAgent.fs, + pkAgent, + keyManager: pkAgent.keyManager, + vaultManager: pkAgent.vaultManager, + nodeManager: pkAgent.nodeManager, + identitiesManager: pkAgent.identitiesManager, + gestaltGraph: pkAgent.gestaltGraph, + sessionManager: pkAgent.sessionManager, + notificationsManager: pkAgent.notificationsManager, + discovery: pkAgent.discovery, + sigchain: pkAgent.sigchain, + fwdProxy: pkAgent.fwdProxy, + revProxy: pkAgent.revProxy, + grpcServerClient: pkAgent.grpcServerClient, + grpcServerAgent: pkAgent.grpcServerAgent, + fs: pkAgent.fs, }); const callCredentials = _secure ? grpcUtils.serverSecureCredentials( - polykeyAgent.keyManager.getRootKeyPairPem().privateKey, - await polykeyAgent.keyManager.getRootCertChainPem(), + pkAgent.keyManager.getRootKeyPairPem().privateKey, + await pkAgent.keyManager.getRootCertChainPem(), ) : grpcUtils.serverInsecureCredentials(); diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index f912635e3..e60d77d3c 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -3,17 +3,13 @@ import type { IdentityId, ProviderId } from '@/identities/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; -import Logger, { LogLevel } from '@matrixai/logger'; -import { Discovery } from '@/discovery'; -import PolykeyAgent from '@/PolykeyAgent'; -import * as discoveryErrors from '@/discovery/errors'; -import * as claimsUtils from '@/claims/utils'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { destroyed } from '@matrixai/async-init'; +import { PolykeyAgent } from '@'; +import { utils as claimsUtils } from '@/claims'; +import { Discovery, errors as discoveryErrors } from '@/discovery'; +import * as testNodesUtils from '../nodes/utils'; import TestProvider from '../identities/TestProvider'; -import { - addRemoteDetails, - cleanupRemoteKeynode, - setupRemoteKeynode, -} from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -23,9 +19,10 @@ jest.mock('@/keys/utils', () => ({ })); describe('Discovery', () => { - // Constants. const password = 'password'; - const logger = new Logger('Discovery Tests', LogLevel.WARN); + const logger = new Logger('Discovery Tests', LogLevel.WARN, [ + new StreamHandler(), + ]); const testToken = { providerId: 'test-provider' as ProviderId, identityId: 'test_user' as IdentityId, @@ -73,16 +70,17 @@ describe('Discovery', () => { 'Starts and stops', async () => { // Not started. - expect(discovery.destroyed).toBeFalsy(); + expect(discovery[destroyed]).toBeFalsy(); // Starting. await discovery.destroy(); - expect(discovery.destroyed).toBeTruthy(); + expect(discovery[destroyed]).toBeTruthy(); }, global.polykeyStartupTimeout, ); }); describe('Discovery process', () => { + let rootDataDir; // Nodes should form the chain A->B->C let nodeA: PolykeyAgent; let nodeB: PolykeyAgent; @@ -91,18 +89,41 @@ describe('Discovery', () => { let identityId: IdentityId; beforeAll(async () => { - // Setting up remote nodes. - nodeA = await setupRemoteKeynode({ logger }); - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); - + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + // Setting up remote nodes + nodeA = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeA'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); // Forming links // A->B->C // Adding connection details. - await addRemoteDetails(nodeA, nodeB); - await addRemoteDetails(nodeB, nodeA); - await addRemoteDetails(nodeB, nodeC); - await addRemoteDetails(nodeC, nodeB); + await testNodesUtils.nodesConnect(nodeA, nodeB); + await testNodesUtils.nodesConnect(nodeB, nodeA); + await testNodesUtils.nodesConnect(nodeB, nodeC); + await testNodesUtils.nodesConnect(nodeC, nodeB); // Adding sigchain details. const claimBtoC: ClaimLinkNode = { type: 'node', @@ -133,16 +154,20 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityId, }; - const claimEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claim = await claimsUtils.decodeClaim(claimEncoded); await testProvider.publishClaim(identityId, claim); }, global.polykeyStartupTimeout * 3); afterAll(async () => { - await cleanupRemoteKeynode(nodeA); - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); + await nodeC.stop(); + await nodeB.stop(); + await nodeA.stop(); testProvider.links = {}; testProvider.linkIdCounter = 0; + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); beforeEach(async () => { await nodeA.gestaltGraph.clearDB(); @@ -195,6 +220,7 @@ describe('Discovery', () => { }); }); describe('End-to-end discovery between two gestalts', () => { + let rootDataDir; // Gestalt 1 let nodeA: PolykeyAgent; let nodeB: PolykeyAgent; @@ -206,19 +232,50 @@ describe('Discovery', () => { let testProvider: TestProvider; beforeAll(async () => { + rootDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); // Setting up remote nodes. - nodeA = await setupRemoteKeynode({ logger }); - nodeB = await setupRemoteKeynode({ logger }); - nodeC = await setupRemoteKeynode({ logger }); - nodeD = await setupRemoteKeynode({ logger }); + nodeA = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeA'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeB = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeB'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeC = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeC'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); + nodeD = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(rootDataDir, 'nodeD'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); // Adding connection details - await addRemoteDetails(nodeA, nodeB); - await addRemoteDetails(nodeA, nodeD); - await addRemoteDetails(nodeB, nodeA); - await addRemoteDetails(nodeC, nodeB); - await addRemoteDetails(nodeC, nodeD); - await addRemoteDetails(nodeD, nodeC); + await testNodesUtils.nodesConnect(nodeA, nodeB); + await testNodesUtils.nodesConnect(nodeA, nodeD); + await testNodesUtils.nodesConnect(nodeB, nodeA); + await testNodesUtils.nodesConnect(nodeC, nodeB); + await testNodesUtils.nodesConnect(nodeC, nodeD); + await testNodesUtils.nodesConnect(nodeD, nodeC); // Setting up identity provider testProvider = new TestProvider(); @@ -237,10 +294,14 @@ describe('Discovery', () => { identityIdB = (await gen2.next()).value as IdentityId; }, global.polykeyStartupTimeout * 4); afterAll(async () => { - await cleanupRemoteKeynode(nodeA); - await cleanupRemoteKeynode(nodeB); - await cleanupRemoteKeynode(nodeC); - await cleanupRemoteKeynode(nodeD); + await nodeD.stop(); + await nodeC.stop(); + await nodeB.stop(); + await nodeA.stop(); + await fs.promises.rm(rootDataDir, { + force: true, + recursive: true, + }); }); afterEach(async () => { await nodeA.gestaltGraph.clearDB(); @@ -263,7 +324,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdB, }; - const claimBEncoded = await nodeD.sigchain.addClaim(claimIdentToD); + const [, claimBEncoded] = await nodeD.sigchain.addClaim(claimIdentToD); const claimB = claimsUtils.decodeClaim(claimBEncoded); await testProvider.publishClaim(identityIdB, claimB); @@ -298,7 +359,7 @@ describe('Discovery', () => { provider: testProvider.id, identity: identityIdA, }; - const claimAEncoded = await nodeB.sigchain.addClaim(claimIdentToB); + const [, claimAEncoded] = await nodeB.sigchain.addClaim(claimIdentToB); const claimA = claimsUtils.decodeClaim(claimAEncoded); await testProvider.publishClaim(identityIdA, claimA); diff --git a/tests/gestalts/GestaltGraph.test.ts b/tests/gestalts/GestaltGraph.test.ts index 74739e2e9..b818c9842 100644 --- a/tests/gestalts/GestaltGraph.test.ts +++ b/tests/gestalts/GestaltGraph.test.ts @@ -22,7 +22,6 @@ import { } from '@/gestalts'; import { ACL } from '@/acl'; import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; describe('GestaltGraph', () => { const logger = new Logger('GestaltGraph Test', LogLevel.WARN, [ @@ -49,7 +48,13 @@ describe('GestaltGraph', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(await keysUtils.generateKey()), + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); acl = await ACL.createACL({ db, logger }); diff --git a/tests/global.d.ts b/tests/global.d.ts new file mode 100644 index 000000000..8fe267fd3 --- /dev/null +++ b/tests/global.d.ts @@ -0,0 +1,12 @@ +/* eslint-disable no-var */ +/** + * Follows the globals in jest.config.ts + * @module + */ +declare var projectDir: string; +declare var testDir: string; +declare var dataDir: string; +declare var defaultTimeout: number; +declare var polykeyStartupTimeout: number; +declare var failedConnectionTimeout: number; +declare var maxTimeout: number; diff --git a/tests/globalSetup.ts b/tests/globalSetup.ts index 8a976ac68..fde412205 100644 --- a/tests/globalSetup.ts +++ b/tests/globalSetup.ts @@ -1,45 +1,16 @@ +/* eslint-disable no-console */ +import process from 'process'; + /** * Global setup for all jest tests * Side-effects are performed here - * No variable context is passed to the test modules * Jest does not support `@/` imports here - * @module */ -import os from 'os'; -import fs from 'fs'; -import path from 'path'; -import * as keysUtils from '../src/keys/utils'; - async function setup() { - // eslint-disable-next-line no-console console.log('\nGLOBAL SETUP'); - // Globals defined in setup.ts must be copied here - const keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - const binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - // Setup global root key pair - // eslint-disable-next-line no-console - console.log(`Creating global.keyPairDir: ${keyPairDir}`); - await fs.promises.rm(keyPairDir, { force: true, recursive: true }); - await fs.promises.mkdir(keyPairDir); - const rootKeyPair = await keysUtils.generateKeyPair(1024); - const rootKeyPairPem = keysUtils.keyPairToPem(rootKeyPair); - await Promise.all([ - fs.promises.writeFile( - path.join(keyPairDir, 'root.pub'), - rootKeyPairPem.publicKey, - 'utf-8', - ), - fs.promises.writeFile( - path.join(keyPairDir, 'root.key'), - rootKeyPairPem.privateKey, - 'utf-8', - ), - ]); - // Setup global agent directory - // eslint-disable-next-line no-console - console.log(`Creating global.binAgentDir: ${binAgentDir}`); - await fs.promises.rm(binAgentDir, { force: true, recursive: true }); - await fs.promises.mkdir(binAgentDir); + // The globalDataDir is already created + const globalDataDir = process.env['GLOBAL_DATA_DIR']!; + console.log(`Global Data Dir: ${globalDataDir}`); } export default setup; diff --git a/tests/globalTeardown.ts b/tests/globalTeardown.ts index 0959608ad..c199c4d5b 100644 --- a/tests/globalTeardown.ts +++ b/tests/globalTeardown.ts @@ -1,27 +1,16 @@ +/* eslint-disable no-console */ +import fs from 'fs'; + /** * Global teardown for all jest tests * Side-effects are performed here - * No variable context is inherited from test modules * Jest does not support `@/` imports here - * @module */ -import os from 'os'; -import fs from 'fs'; -import path from 'path'; - async function teardown() { - // eslint-disable-next-line no-console console.log('GLOBAL TEARDOWN'); - // Globals defined in setup.ts must be copied here - const keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - const binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - // eslint-disable-next-line no-console - console.log(`Destroying global.keyPairDir: ${keyPairDir}`); - await fs.promises.rm(keyPairDir, { force: true, recursive: true }); - // The global agent directory must be fresh - // eslint-disable-next-line no-console - console.log(`Destroying global.binAgentDir: ${binAgentDir}`); - await fs.promises.rm(binAgentDir, { force: true, recursive: true }); + const globalDataDir = process.env['GLOBAL_DATA_DIR']!; + console.log(`Destroying Global Data Dir: ${globalDataDir}`); + await fs.promises.rm(globalDataDir, { recursive: true }); } export default teardown; diff --git a/tests/grpc/GRPCServer.test.ts b/tests/grpc/GRPCServer.test.ts index ac7f18c69..49ba78a28 100644 --- a/tests/grpc/GRPCServer.test.ts +++ b/tests/grpc/GRPCServer.test.ts @@ -1,38 +1,40 @@ +import type { Authenticate } from '@/client/types'; import type { Host, Port } from '@/network/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; - import { GRPCServer, utils as grpcUtils } from '@/grpc'; -import { KeyManager } from '@/keys'; -import { utils as networkUtils } from '@/network'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { SessionManager } from '@/sessions'; +import * as networkUtils from '@/network/utils'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as grpcErrors from '@/grpc/errors'; -import { SessionManager } from '@/sessions'; import * as clientUtils from '@/client/utils'; -import * as keysUtils from '@/keys/utils'; -import * as utils from './utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as testGrpcUtils from './utils'; +import * as testUtils from '../utils'; describe('GRPCServer', () => { + const logger = new Logger('GRPCServer Test', LogLevel.WARN, [ + new StreamHandler(), + ]); const password = 'password'; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; let dataDir: string; let keyManager: KeyManager; let db: DB; let sessionManager: SessionManager; - let authenticate: clientUtils.Authenticate; - const logger = new Logger('GRPCServer Test', LogLevel.WARN, [ - new StreamHandler(), - ]); + let authenticate: Authenticate; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -70,6 +72,8 @@ describe('GRPCServer', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('GRPCServer readiness', async () => { const server = new GRPCServer({ @@ -99,8 +103,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -128,8 +132,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -147,7 +151,7 @@ describe('GRPCServer', () => { clientKeyPair.privateKey, 31536000, ); - const client = await utils.openTestClientSecure( + const client = await testGrpcUtils.openTestClientSecure( nodeIdServer, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -163,7 +167,7 @@ describe('GRPCServer', () => { expect(pCall.call.getPeer()).toBe(`dns:127.0.0.1:${server.port}`); const m_ = await pCall; expect(m_.getChallenge()).toBe(m.getChallenge()); - utils.closeTestClientSecure(client); + testGrpcUtils.closeTestClientSecure(client); await server.stop(); }); test('changing the private key and certificate on the fly', async () => { @@ -180,8 +184,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -200,7 +204,7 @@ describe('GRPCServer', () => { ); // First client connection const nodeIdServer1 = networkUtils.certNodeId(serverCert1); - const client1 = await utils.openTestClientSecure( + const client1 = await testGrpcUtils.openTestClientSecure( nodeIdServer1, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -237,7 +241,7 @@ describe('GRPCServer', () => { expect(m2_.getChallenge()).toBe(m2.getChallenge()); // Second client connection const nodeIdServer2 = networkUtils.certNodeId(serverCert2); - const client2 = await utils.openTestClientSecure( + const client2 = await testGrpcUtils.openTestClientSecure( nodeIdServer2, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -253,8 +257,8 @@ describe('GRPCServer', () => { expect(pCall3.call.getPeer()).toBe(`dns:127.0.0.1:${server.port}`); const m3_ = await pCall3; expect(m3_.getChallenge()).toBe(m3.getChallenge()); - utils.closeTestClientSecure(client1); - utils.closeTestClientSecure(client2); + testGrpcUtils.closeTestClientSecure(client1); + testGrpcUtils.closeTestClientSecure(client2); await server.stop(); }); test('authenticated commands acquire a token', async () => { @@ -271,8 +275,8 @@ describe('GRPCServer', () => { await server.start({ services: [ [ - utils.TestServiceService, - utils.createTestService({ authenticate, logger }), + testGrpcUtils.TestServiceService, + testGrpcUtils.createTestService({ authenticate, logger }), ], ], host: '127.0.0.1' as Host, @@ -290,7 +294,7 @@ describe('GRPCServer', () => { clientKeyPair.privateKey, 31536000, ); - const client = await utils.openTestClientSecure( + const client = await testGrpcUtils.openTestClientSecure( nodeIdServer, server.port, keysUtils.privateKeyToPem(clientKeyPair.privateKey), @@ -310,7 +314,7 @@ describe('GRPCServer', () => { expect(typeof token).toBe('string'); expect(token!.length > 0).toBe(true); expect(await sessionManager.verifyToken(token!)).toBe(true); - utils.closeTestClientSecure(client); + testGrpcUtils.closeTestClientSecure(client); await server.stop(); }); }); diff --git a/tests/grpc/utils/GRPCClientTest.ts b/tests/grpc/utils/GRPCClientTest.ts index ba96a4882..ddcc21dbe 100644 --- a/tests/grpc/utils/GRPCClientTest.ts +++ b/tests/grpc/utils/GRPCClientTest.ts @@ -63,9 +63,7 @@ class GRPCClientTest extends GRPCClient { } public async destroy() { - this.logger.info(`Destroying ${this.constructor.name}`); await super.destroy(); - this.logger.info(`Destroyed ${this.constructor.name}`); } @ready() diff --git a/tests/grpc/utils/testService.ts b/tests/grpc/utils/testService.ts index b6ee75236..8d126025d 100644 --- a/tests/grpc/utils/testService.ts +++ b/tests/grpc/utils/testService.ts @@ -4,7 +4,7 @@ * streaming, client streaming and bidirectional streaming * @module */ - +import type { Authenticate } from '@/client/types'; import type { SessionToken } from '@/sessions/types'; import type { ITestServiceServer } from '@/proto/js/polykey/v1/test_service_grpc_pb'; import Logger from '@matrixai/logger'; @@ -17,7 +17,7 @@ function createTestService({ authenticate, logger = new Logger('TestService'), }: { - authenticate: clientUtils.Authenticate; + authenticate: Authenticate; logger?: Logger; }) { const testService: ITestServiceServer = { diff --git a/tests/grpc/utils/utils.ts b/tests/grpc/utils/utils.ts index 8ab8e295a..9aa2bff36 100644 --- a/tests/grpc/utils/utils.ts +++ b/tests/grpc/utils/utils.ts @@ -1,6 +1,6 @@ import type Logger from '@matrixai/logger'; +import type { Authenticate } from '@/client/types'; import type { NodeId } from '@/nodes/types'; -import type * as clientUtils from '@/client/utils'; import * as grpc from '@grpc/grpc-js'; import { utils as grpcUtils } from '@/grpc'; import { promisify } from '@/utils'; @@ -11,7 +11,7 @@ import { import createTestService from './testService'; async function openTestServer( - authenticate: clientUtils.Authenticate, + authenticate: Authenticate, logger?: Logger, ): Promise<[grpc.Server, number]> { const testService = createTestService({ authenticate, logger }); @@ -80,7 +80,7 @@ function closeTestClientSecure(client: TestServiceClient) { async function openTestServerSecure( keyPrivatePem, certChainPem, - authenticate: clientUtils.Authenticate, + authenticate: Authenticate, logger?: Logger, ): Promise<[grpc.Server, number]> { const testService = createTestService({ authenticate, logger }); diff --git a/tests/identities/IdentitiesManager.test.ts b/tests/identities/IdentitiesManager.test.ts index cbc2aeb04..497f6592c 100644 --- a/tests/identities/IdentitiesManager.test.ts +++ b/tests/identities/IdentitiesManager.test.ts @@ -17,7 +17,6 @@ import { IdentitiesManager, providers } from '@/identities'; import * as identitiesErrors from '@/identities/errors'; import * as keysUtils from '@/keys/utils'; import TestProvider from './TestProvider'; -import { makeCrypto } from '../utils'; describe('IdentitiesManager', () => { const logger = new Logger('IdentitiesManager Test', LogLevel.WARN, [ @@ -33,7 +32,13 @@ describe('IdentitiesManager', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(await keysUtils.generateKey()), + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); }); afterEach(async () => { diff --git a/tests/index.test.ts b/tests/index.test.ts index 952f089a8..b8be30718 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -1,33 +1,9 @@ -import fs from 'fs'; -import path from 'path'; -import os from 'os'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { PolykeyAgent } from '@'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as polykey from '@'; describe('index', () => { - const logger = new Logger('index test', LogLevel.WARN, [new StreamHandler()]); - let dataDir; - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - }); - test('construction of Polykey', async () => { - const password = 'password'; - const pk = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir, - logger, - }); - expect(pk).toBeInstanceOf(PolykeyAgent); - await pk.stop(); - await pk.destroy(); + test('exports PolykeyAgent, PolykeyClient and errors', async () => { + expect('PolykeyAgent' in polykey).toBe(true); + expect('PolykeyClient' in polykey).toBe(true); + expect('errors' in polykey).toBe(true); }); }); diff --git a/tests/keys/KeyManager.test.ts b/tests/keys/KeyManager.test.ts index a1771bcbb..7dafd65c4 100644 --- a/tests/keys/KeyManager.test.ts +++ b/tests/keys/KeyManager.test.ts @@ -11,7 +11,6 @@ import * as keysErrors from '@/keys/errors'; import * as workersUtils from '@/workers/utils'; import * as keysUtils from '@/keys/utils'; import { sleep } from '@/utils'; -import { makeCrypto } from '../utils'; describe('KeyManager', () => { const password = 'password'; @@ -24,7 +23,7 @@ describe('KeyManager', () => { let mockedGenerateDeterministicKeyPair; beforeAll(async () => { // Key pair generated once for mocking - keyPair = await keysUtils.generateKeyPair(4096); + keyPair = await keysUtils.generateKeyPair(1024); workerManager = await workersUtils.createWorkerManager({ cores: 1, logger, @@ -54,8 +53,8 @@ describe('KeyManager', () => { test('KeyManager readiness', async () => { const keysPath = `${dataDir}/keys`; const keyManager = await KeyManager.createKeyManager({ - password, keysPath, + password, logger, }); await expect(async () => { @@ -78,8 +77,8 @@ describe('KeyManager', () => { test('constructs root key pair, root cert, root certs and db key', async () => { const keysPath = `${dataDir}/keys`; const keyManager = await KeyManager.createKeyManager({ - password, keysPath, + password, logger, }); const keysPathContents = await fs.promises.readdir(keysPath); @@ -210,8 +209,12 @@ describe('KeyManager', () => { }); // No way we can encrypt 1000 bytes without a ridiculous key size const plainText = Buffer.from(new Array(1000 + 1).join('A')); + const maxSize = keysUtils.maxEncryptSize( + keysUtils.publicKeyBitSize(keyPair.publicKey) / 8, + 32, + ); await expect(keyManager.encryptWithRootKeyPair(plainText)).rejects.toThrow( - 'Maximum plain text byte size is 446', + `Maximum plain text byte size is ${maxSize}`, ); await keyManager.stop(); }); @@ -257,16 +260,12 @@ describe('KeyManager', () => { logger, }); }).rejects.toThrow(keysErrors.ErrorRootKeysParse); - await expect( - (async () => { - await KeyManager.createKeyManager({ - password: 'newpassword', - keysPath, - logger, - }); - await keyManager.stop(); - })(), - ).resolves.toBeUndefined(); + await KeyManager.createKeyManager({ + password: 'newpassword', + keysPath, + logger, + }); + await keyManager.stop(); }); test('can reset root certificate', async () => { const keysPath = `${dataDir}/keys`; @@ -305,7 +304,13 @@ describe('KeyManager', () => { const db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); const rootKeyPair1 = keyManager.getRootKeyPair(); const rootCert1 = keyManager.getRootCert(); @@ -348,7 +353,13 @@ describe('KeyManager', () => { const db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); const rootKeyPair1 = keyManager.getRootKeyPair(); const rootCert1 = keyManager.getRootCert(); diff --git a/tests/keys/utils.test.ts b/tests/keys/utils.test.ts index 9d3e30a6e..11b2a1563 100644 --- a/tests/keys/utils.test.ts +++ b/tests/keys/utils.test.ts @@ -3,14 +3,14 @@ import * as keysUtils from '@/keys/utils'; describe('utils', () => { test('key pair copy', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const keyPairPem = keysUtils.keyPairToPem(keyPair); const keyPair2 = keysUtils.keyPairCopy(keyPair); const keyPairPem2 = keysUtils.keyPairToPem(keyPair2); expect(keyPairPem).toStrictEqual(keyPairPem2); }); test('to and from der encoding', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const cert = keysUtils.generateCertificate( keyPair.publicKey, keyPair.privateKey, @@ -24,7 +24,7 @@ describe('utils', () => { expect(certPem).toBe(certPem_); }); test('certificate copy', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); const cert = keysUtils.generateCertificate( keyPair.publicKey, keyPair.privateKey, @@ -37,7 +37,7 @@ describe('utils', () => { expect(certPem).toBe(certPem2); }); test('encryption and decryption of private key', async () => { - const keyPair = await keysUtils.generateKeyPair(4096); + const keyPair = await keysUtils.generateKeyPair(1024); // Try first password const password = (await keysUtils.getRandomBytes(10)).toString('base64'); const privateKeyPemEncrypted = keysUtils.encryptPrivateKey( diff --git a/tests/network/ForwardProxy.test.ts b/tests/network/ForwardProxy.test.ts index 67c354c28..a8dd2fb5c 100644 --- a/tests/network/ForwardProxy.test.ts +++ b/tests/network/ForwardProxy.test.ts @@ -1,5 +1,5 @@ import type { Socket } from 'net'; - +import type { KeyPairPem } from '@/keys/types'; import type { Host, Port } from '@/network/types'; import type { NodeId } from '@/nodes/types'; import http from 'http'; @@ -13,85 +13,86 @@ import { errors as networkErrors, } from '@/network'; import * as keysUtils from '@/keys/utils'; -import { promisify, promise, timerStart, timerStop } from '@/utils'; - -describe('ForwardProxy', () => { - const logger = new Logger('ForwardProxy Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let keyPairPem, certPem; - let fwdProxy: ForwardProxy; - let authToken: string; +import { promisify, promise, timerStart, timerStop, poll } from '@/utils'; +import * as testUtils from '../utils'; - // Helper functions - async function connect( - host: string, - port: number, - token: string, - path: string, - ): Promise { - const socket = await new Promise((resolve, reject) => { - const req = http.request({ - method: 'CONNECT', - path: path, - host: host, - port: port, - headers: { - 'Proxy-Authorization': `Basic ${token}`, - }, - }); - req.end(); - req.once('connect', (res, clientSocket) => { - if (res.statusCode === 200) { - resolve(clientSocket); - } else { - reject(new Error(res.statusCode!.toString())); - } - }); - req.once('error', (e) => { - reject(e); - }); +/** + * Mock HTTP Connect Request + * This is what clients to the ForwardProxy should be doing + * Returns the network socket established + * @throws Error on failure to connect, may contain status code as message + */ +async function httpConnect( + host: string, + port: number, + token: string, + path: string, +): Promise { + const tokenEncoded = Buffer.from(token, 'utf-8').toString('base64'); + const socket = await new Promise((resolve, reject) => { + const req = http.request({ + method: 'CONNECT', + path: path, + host: host, + port: port, + headers: { + 'Proxy-Authorization': `Basic ${tokenEncoded}`, + }, + }); + req.end(); + req.once('connect', (res, clientSocket) => { + if (res.statusCode === 200) { + resolve(clientSocket); + } else { + reject(new Error(res.statusCode!.toString())); + } + }); + req.once('error', (e) => { + reject(e); }); - return socket; - } + }); + return socket; +} +describe(ForwardProxy.name, () => { + const logger = new Logger(`${ForwardProxy.name} test`, LogLevel.WARN, [ + new StreamHandler(), + ]); + const authToken = 'abc123'; + let keyPairPem: KeyPairPem; + let certPem: string; beforeAll(async () => { - const keyPair = await keysUtils.generateKeyPair(4096); - keyPairPem = keysUtils.keyPairToPem(keyPair); + const globalKeyPair = await testUtils.setupGlobalKeypair(); + keyPairPem = keysUtils.keyPairToPem(globalKeyPair); const cert = keysUtils.generateCertificate( - keyPair.publicKey, - keyPair.privateKey, - keyPair.privateKey, + globalKeyPair.publicKey, + globalKeyPair.privateKey, + globalKeyPair.privateKey, 86400, ); certPem = keysUtils.certToPem(cert); }); - beforeEach(async () => { - authToken = 'sdafjs8'; - fwdProxy = new ForwardProxy({ + test('forward proxy readiness', async () => { + const fwdProxy = new ForwardProxy({ authToken, logger, }); + // Should be a noop (already stopped) + await fwdProxy.stop(); await fwdProxy.start({ - proxyHost: '::1' as Host, tlsConfig: { keyPrivatePem: keyPairPem.privateKey, certChainPem: certPem, }, }); - }); - afterEach(async () => { - await fwdProxy.stop(); - }); - - test('reverseProxy readiness', async () => { - const fwdProxy = new ForwardProxy({ - authToken: '', - logger: logger, - }); - - // Should be a noop - await fwdProxy.stop(); + expect(typeof fwdProxy.getProxyHost()).toBe('string'); + expect(typeof fwdProxy.getProxyPort()).toBe('number'); + expect(fwdProxy.getProxyPort()).toBeGreaterThan(0); + expect(typeof fwdProxy.getEgressHost()).toBe('string'); + expect(typeof fwdProxy.getEgressPort()).toBe('number'); + expect(fwdProxy.getEgressPort()).toBeGreaterThan(0); + expect(fwdProxy.getConnectionCount()).toBe(0); + // Should be a noop (already started) await fwdProxy.start({ tlsConfig: { keyPrivatePem: keyPairPem.privateKey, @@ -100,21 +101,12 @@ describe('ForwardProxy', () => { }); await fwdProxy.stop(); expect(() => { - fwdProxy.proxyHost; - }).toThrow(networkErrors.ErrorForwardProxyNotStarted); + fwdProxy.getProxyHost(); + }).toThrow(networkErrors.ErrorForwardProxyNotRunning); await expect(async () => { await fwdProxy.closeConnection('::1' as Host, 1 as Port); - }).rejects.toThrow(networkErrors.ErrorForwardProxyNotStarted); - }); - test('starting and stopping the forward proxy', async () => { - expect(typeof fwdProxy.proxyHost).toBe('string'); - expect(typeof fwdProxy.proxyPort).toBe('number'); - expect(fwdProxy.proxyPort).toBeGreaterThan(0); - expect(typeof fwdProxy.egressHost).toBe('string'); - expect(typeof fwdProxy.egressPort).toBe('number'); - expect(fwdProxy.egressPort).toBeGreaterThan(0); - expect(fwdProxy.connectionCount).toBe(0); - await fwdProxy.stop(); + }).rejects.toThrow(networkErrors.ErrorForwardProxyNotRunning); + // Start it again await fwdProxy.start({ proxyHost: '::1' as Host, tlsConfig: { @@ -122,68 +114,115 @@ describe('ForwardProxy', () => { certChainPem: certPem, }, }); - expect(fwdProxy.proxyHost).toBe('::1'); + expect(fwdProxy.getProxyHost()).toBe('::1'); await fwdProxy.stop(); }); - test('connect failures to the forward proxy', async () => { - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); + test('HTTP CONNECT bad request failures to the forward proxy', async () => { + // The forward proxy will emit error logs when this occurs + // In production these connect errors should never happen + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy CONNECT bad request'), + }); + await fwdProxy.start({ + proxyHost: '::1' as Host, + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); // Incorrect auth token await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - 'sdfisojfo', + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + 'incorrect auth token', `127.0.0.1:80?nodeId=${encodeURIComponent('SOMENODEID')}`, ), ).rejects.toThrow('407'); // No node id await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, '127.0.0.1:80', ), ).rejects.toThrow('400'); // Missing target await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `?nodeId=${encodeURIComponent('123')}`, - ), - ).rejects.toThrow('400'); - // Targetting an un-used port - await expect(() => - connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `127.0.0.1:0?nodeId=${encodeURIComponent('123')}`, + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `?nodeId=${encodeURIComponent('SOMENODEID')}`, ), ).rejects.toThrow('400'); await fwdProxy.stop(); }); - test('open connection to port 0 fails', async () => { + test('connection to port 0 fails', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy port 0'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); // Cannot open connection to port 0 await expect(() => fwdProxy.openConnection('abc' as NodeId, '127.0.0.1' as Host, 0 as Port), ).rejects.toThrow(networkErrors.ErrorConnectionStart); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:0?nodeId=${encodeURIComponent('abc')}`, + ), + ).rejects.toThrow('502'); await fwdProxy.stop(); }); - test('open connection timeout due to hanging remote', async () => { + test('connection start timeout due to hanging remote', async () => { + // 1 seconds to wait to establish a connection + // Must reduce the ping interval time to 100ms + // Also reduce the end tome to 100ms + // So that we can test timeouts quicker + const fwdProxy = new ForwardProxy({ + authToken, + connConnectTime: 1000, + connKeepAliveIntervalTime: 100, + connEndTime: 100, + logger: logger.getChild('ForwardProxy connection timeout'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); // This UTP server will just hang and not respond - let receivedConnection = false; + let recievedCount = 0; const utpSocketHang = UTP.createServer(() => { - receivedConnection = true; + recievedCount++; }); const utpSocketHangListen = promisify(utpSocketHang.listen).bind( utpSocketHang, ); await utpSocketHangListen(0, '127.0.0.1'); const utpSocketHangPort = utpSocketHang.address().port; - const timer = timerStart(3000); + await expect(() => + fwdProxy.openConnection( + 'abc' as NodeId, + '127.0.0.1' as Host, + utpSocketHangPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); + expect(recievedCount).toBe(1); + // Can override the timer + const timer = timerStart(2000); await expect(() => fwdProxy.openConnection( 'abc' as NodeId, @@ -193,17 +232,36 @@ describe('ForwardProxy', () => { ), ).rejects.toThrow(networkErrors.ErrorConnectionStartTimeout); timerStop(timer); - expect(receivedConnection).toBe(true); + expect(recievedCount).toBe(2); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:${utpSocketHangPort}?nodeId=${encodeURIComponent('abc')}`, + ), + ).rejects.toThrow('504'); + expect(recievedCount).toBe(3); utpSocketHang.close(); utpSocketHang.unref(); await fwdProxy.stop(); }); - test('open connection reset due to ending remote', async () => { + test('connection reset due to ending remote', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy connection reset'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); // This UTP Server will immediately end and destroy // the connection upon receiving a connection - let receivedConnection = false; + let recievedCount = 0; const utpSocketEnd = UTP.createServer((utpConn) => { - receivedConnection = true; + recievedCount++; utpConn.end(); utpConn.destroy(); }); @@ -219,7 +277,7 @@ describe('ForwardProxy', () => { utpSocketEndPort as Port, ), ).rejects.toThrow(networkErrors.ErrorConnectionStart); - expect(receivedConnection).toBe(true); + expect(recievedCount).toBe(1); // The actual error is UTP_ECONNRESET to be precise await expect(() => fwdProxy.openConnection( @@ -228,43 +286,960 @@ describe('ForwardProxy', () => { utpSocketEndPort as Port, ), ).rejects.toThrow(/UTP_ECONNRESET/); + expect(recievedCount).toBe(2); + // 502 Bad Gateway on HTTP Connect + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `127.0.0.1:${utpSocketEndPort}?nodeId=${encodeURIComponent('abc')}`, + ), + ).rejects.toThrow('502'); + expect(recievedCount).toBe(3); utpSocketEnd.close(); utpSocketEnd.unref(); await fwdProxy.stop(); }); test('open connection fails due to missing certificates', async () => { - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy missing certificates'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + // This is a TLS handshake failure + await expect(() => + fwdProxy.openConnection( + 'somerandomnodeid' as NodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorConnectionStart); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // The TLS socket throw an error because there's no suitable signature algorithm + expect(tlsSocketError.mock.calls.length).toBe(1); + // Expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty( + 'code', + 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM', + ); + // The TLS socket end event never was emitted + expect(tlsSocketEnd.mock.calls.length).toBe(0); + // The TLS socket close event is emitted with error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(true); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT fails due to missing certificates', async () => { + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy missing certificates'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + // All TLS servers must have a certificate and associated key + // This is TLS socket is therefore dead on arrival by not providing + // any certificate nor key + const tlsSocket = new tls.TLSSocket(utpConn, { + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + // TLS socket will be closed as soon as error is emitted + // Therefore this will never be called + // However the socket is ended anyway automatically + tlsSocket.on('end', () => { + tlsSocketEnd(); + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + // This is an TLS handshake failure + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + 'somerandomnodeid', + )}`, + ), + ).rejects.toThrow('502'); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // The TLS socket throw an error because there's no suitable signature algorithm + expect(tlsSocketError.mock.calls.length).toBe(1); + // Expect(tlsSocketError.mock.calls[0][0]).toBeInstanceOf(Error); + expect(tlsSocketError.mock.calls[0][0]).toHaveProperty( + 'code', + 'ERR_SSL_NO_SUITABLE_SIGNATURE_ALGORITHM', + ); + // The TLS socket end event never was emitted + expect(tlsSocketEnd.mock.calls.length).toBe(0); + // The TLS socket close event is emitted with error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(true); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection fails due to invalid node id', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy invalid node id'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + let secured = false; + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(() => + fwdProxy.openConnection( + 'somerandomnodeid' as NodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ), + ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); + await expect(remoteReadyP).resolves.toBeUndefined(); + expect(secured).toBe(true); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // No TLS socket errors this time + // The client side figured that the node id is incorect + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the tls socket is ended from the client side + expect(tlsSocketEnd.mock.calls.length).toBe(1); + // The TLS socket close event is emitted without error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT fails due to invalid node id', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild('ForwardProxy invalid node id'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + let secured = false; + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + secured = true; + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(() => + httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + 'somerandomnodeid', + )}`, + ), + ).rejects.toThrow('526'); + await expect(remoteReadyP).resolves.toBeUndefined(); + expect(secured).toBe(true); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + // No TLS socket errors this time + // The client side figured taht the node id is incorect + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the tls socket is ended from the client side + expect(tlsSocketEnd.mock.calls.length).toBe(1); + // The TLS socket close event is emitted without error + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection success - forward initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild( + 'ForwardProxy open connection success - forward initiates end', + ), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + await fwdProxy.closeConnection( + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(0); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('open connection success - reverse initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + connEndTime: 5000, + logger: logger.getChild( + 'ForwardProxy open connection success - reverse initiates end', + ), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // Will use this to simulate reverse initiating end + let tlsSocket_: tls.TLSSocket; + // This UTP server will hold the connection + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + utpConn.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + // Start the graceful ending of the tls socket + logger.debug('Reverse: begins tlsSocket ending'); + const { p: endP, resolveP: resolveEndP } = promise(); + tlsSocket_!.removeAllListeners('end'); + tlsSocket_!.once('end', resolveEndP); + tlsSocket_!.end(); + await endP; + // Force destroy the socket due to buggy tlsSocket and utpConn + tlsSocket_!.destroy(); + logger.debug('Reverse: finishes tlsSocket ending'); + await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the reverse side initiates the end + // Therefore, this handler is removed + expect(tlsSocketEnd.mock.calls.length).toBe(0); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success - forward initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - forward initiates end', + ), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); + const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); + const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = + promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // This UTP server will hold the connection + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); + tlsSocket.end(); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { + await send(networkUtils.pingBuffer); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); + const handleMessage = async (data: Buffer) => { + const msg = networkUtils.unserializeNetworkMessage(data); + if (msg.type === 'ping') { + await send(networkUtils.pongBuffer); + } else if (msg.type === 'pong') { + resolveRemoteReadyP(); + } + }; + utpSocket.on('message', handleMessage); + const send = async (data: Buffer) => { + const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); + await utpSocketSend(data, 0, data.byteLength, egressPort, egressHost); + }; + const utpSocketListen = promisify(utpSocket.listen).bind(utpSocket); + await utpSocketListen(0, '127.0.0.1'); + const utpSocketHost = utpSocket.address().address; + const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // Normal sockets defaults to `allowHalfOpen: false` + // Therefore this isn't strictly necessary + // Here we are just adding it in ensure consistent behaviour + // If this wasn't done by default, then there should be an error + // emitted on the ConnectionForward tlsSocket as ErrorConnectionEndTimeout + const clientSocketEnd = jest.fn(); + clientSocket.on('end', () => { + clientSocketEnd(); + clientSocket.end(); + }); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + await fwdProxy.closeConnection( + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(0); + expect(clientSocketEnd.mock.calls.length).toBe(1); + await expect(localClosedP).resolves.toBeUndefined(); + await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); + utpSocket.off('message', handleMessage); + utpSocket.close(); + utpSocket.unref(); + await fwdProxy.stop(); + }); + test('HTTP CONNECT success - reverse initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); + const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverCert = keysUtils.generateCertificate( + serverKeyPair.publicKey, + serverKeyPair.privateKey, + serverKeyPair.privateKey, + 86400, + ); + const serverCertPem = keysUtils.certToPem(serverCert); + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - reverse initiates end', + ), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); + // Will use this to simulate reverse initiating end + let tlsSocket_: tls.TLSSocket; // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket_ = tlsSocket; + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -282,23 +1257,81 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - // This is an SSL handshake failure - await expect(() => - fwdProxy.openConnection( - 'somerandomnodeid' as NodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ), - ).rejects.toThrow(networkErrors.ErrorConnectionStart); + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, + ); + await expect(remoteReadyP).resolves.toBeUndefined(); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + // Normal sockets defaults to `allowHalfOpen: false` + // Therefore this isn't strictly necessary + // Here we are just adding it in ensure consistent behaviour + // If this wasn't done by default, then there should be an error + // emitted on the ConnectionForward tlsSocket as ErrorConnectionEndTimeout + const clientSocketEnd = jest.fn(); + clientSocket.on('end', () => { + clientSocketEnd(); + clientSocket.end(); + }); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + // Start the graceful ending of the tls socket + logger.debug('Reverse: begins tlsSocket ending'); + const { p: endP, resolveP: resolveEndP } = promise(); + tlsSocket_!.removeAllListeners('end'); + tlsSocket_!.once('end', resolveEndP); + tlsSocket_!.end(); + await endP; + // Force destroy the socket due to buggy tlsSocket and utpConn + tlsSocket_!.destroy(); + logger.debug('Reverse: finishes tlsSocket ending'); + await expect(localClosedP).resolves.toBeUndefined(); + expect(clientSocketEnd.mock.calls.length).toBe(1); await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + // This time the reverse side initiates the end + // Therefore, this handler is removed + expect(tlsSocketEnd.mock.calls.length).toBe(0); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await fwdProxy.stop(); }); - test('open connection fails due to invalid node id', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); + test('HTTP CONNECT success - client initiates end', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -307,44 +1340,72 @@ describe('ForwardProxy', () => { 86400, ); const serverCertPem = keysUtils.certToPem(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const serverNodeId = networkUtils.certNodeId(serverCert); + const fwdProxy = new ForwardProxy({ + authToken, + logger: logger.getChild( + 'ForwardProxy HTTP CONNECT success - client initiates end', + ), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); + const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = + promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - let secured = false; - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - secured = true; - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + logger.debug('Reverse: responded tlsSocket ending'); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -362,27 +1423,71 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); - await expect(() => - fwdProxy.openConnection( - 'somerandomnodeid' as NodeId, - utpSocketHost as Host, - utpSocketPort as Port, - ), - ).rejects.toThrow(networkErrors.ErrorCertChainUnclaimed); + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, + ); await expect(remoteReadyP).resolves.toBeUndefined(); - // The secure event won't be fired - // because the connection will be ended before that happens - expect(secured).toBe(false); - expect(fwdProxy.connectionCount).toBe(0); + await expect(remoteSecureP).resolves.toBeUndefined(); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); + // Opening a duplicate connection is noop + await fwdProxy.openConnection( + serverNodeId, + utpSocketHost as Host, + utpSocketPort as Port, + ); + expect(fwdProxy.getConnectionCount()).toBe(1); + const { p: endP, resolveP: resolveEndP } = promise(); + // By default net sockets have `allowHalfOpen: false` + // Here we override the behaviour by removing the end listener + // And replacing it with our own, and remember to also force destroy + clientSocket.removeAllListeners('end'); + clientSocket.on('end', () => { + resolveEndP(); + clientSocket.destroy(); + }); + logger.debug('Client: begins clientSocket ending'); + clientSocket.end(); + await endP; + logger.debug('Client: finishes clientSocket ending'); + await expect(localClosedP).resolves.toBeUndefined(); await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await fwdProxy.stop(); }); - test('open connection success', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); + test('HTTP CONNECT success by opening connection first', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -392,45 +1497,68 @@ describe('ForwardProxy', () => { ); const serverCertPem = keysUtils.certToPem(serverCert); const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -448,7 +1576,6 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); await fwdProxy.openConnection( serverNodeId, utpSocketHost as Host, @@ -456,26 +1583,39 @@ describe('ForwardProxy', () => { ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); - // Opening a duplicate connection is noop - await fwdProxy.openConnection( - serverNodeId, - utpSocketHost as Host, - utpSocketPort as Port, + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, + `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( + serverNodeId, + )}`, ); - expect(fwdProxy.connectionCount).toBe(1); + expect(clientSocket).toBeInstanceOf(net.Socket); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); + const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); + clientSocket.on('close', () => { + resolveLocalClosedP(); + }); await fwdProxy.closeConnection( utpSocketHost as Host, utpSocketPort as Port, ); - expect(fwdProxy.connectionCount).toBe(0); + await expect(localClosedP).resolves.toBeUndefined(); await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await fwdProxy.stop(); }); - test('connect success by opening connection first', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); + test('open connection keepalive timeout', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -485,49 +1625,75 @@ describe('ForwardProxy', () => { ); const serverCertPem = keysUtils.certToPem(serverCert); const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + connKeepAliveTimeoutTime: 1000, + connKeepAliveIntervalTime: 100, + logger: logger.getChild('ForwardProxy open connection keepalive timeout'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); + // Do nothing here + // To trigger keep alive timeout } else if (msg.type === 'pong') { resolveRemoteReadyP(); } @@ -541,6 +1707,7 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; + expect(fwdProxy.getConnectionCount()).toBe(0); await fwdProxy.openConnection( serverNodeId, utpSocketHost as Host, @@ -548,35 +1715,22 @@ describe('ForwardProxy', () => { ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - const clientSocket = await connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, - `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( - serverNodeId, - )}`, - ); - expect(clientSocket).toBeInstanceOf(net.Socket); - expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); - const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); - clientSocket.on('close', () => { - resolveLocalClosedP(); - }); - await fwdProxy.closeConnection( - utpSocketHost as Host, - utpSocketPort as Port, - ); - await expect(localClosedP).resolves.toBeUndefined(); + expect(fwdProxy.getConnectionCount()).toBe(1); + // When ErrorConnectionTimeout is triggered + // This results in the destruction of the socket await expect(remoteClosedP).resolves.toBeUndefined(); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await fwdProxy.stop(); }); - test('connect success by direct connection', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); + test('HTTP CONNECT keepalive timeout', async () => { + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -586,49 +1740,75 @@ describe('ForwardProxy', () => { ); const serverCertPem = keysUtils.certToPem(serverCert); const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + connKeepAliveTimeoutTime: 1000, + connKeepAliveIntervalTime: 100, + logger: logger.getChild('ForwardProxy HTTP CONNECT keepalive timeout'), + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); + const utpConnError = jest.fn(); + const tlsSocketError = jest.fn(); + const tlsSocketEnd = jest.fn(); + const tlsSocketClose = jest.fn(); // This UTP server will hold the connection - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + utpConn.on('error', (e) => { + utpConnError(e); + }); + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('error', (e) => { + tlsSocketError(e); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + tlsSocketEnd(); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', (hadError) => { + tlsSocketClose(hadError); + resolveRemoteClosedP(); + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { - await send(networkUtils.pongBuffer); + // Do nothing here + // To trigger keep alive timeout } else if (msg.type === 'pong') { resolveRemoteReadyP(); } @@ -642,11 +1822,11 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; - const authTokenEncoded = Buffer.from(authToken, 'utf-8').toString('base64'); - const clientSocket = await connect( - fwdProxy.proxyHost, - fwdProxy.proxyPort, - authTokenEncoded, + expect(fwdProxy.getConnectionCount()).toBe(0); + const clientSocket = await httpConnect( + fwdProxy.getProxyHost(), + fwdProxy.getProxyPort(), + authToken, `${utpSocketHost}:${utpSocketPort}?nodeId=${encodeURIComponent( serverNodeId, )}`, @@ -654,25 +1834,42 @@ describe('ForwardProxy', () => { await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); expect(clientSocket).toBeInstanceOf(net.Socket); - expect(clientSocket.remoteAddress).toBe(fwdProxy.proxyHost); - expect(clientSocket.remotePort).toBe(fwdProxy.proxyPort); + expect(clientSocket.remoteAddress).toBe(fwdProxy.getProxyHost()); + expect(clientSocket.remotePort).toBe(fwdProxy.getProxyPort()); const { p: localClosedP, resolveP: resolveLocalClosedP } = promise(); clientSocket.on('close', () => { resolveLocalClosedP(); }); - await fwdProxy.closeConnection( - utpSocketHost as Host, - utpSocketPort as Port, - ); + expect(fwdProxy.getConnectionCount()).toBe(1); + // When ErrorConnectionTimeout is triggered + // This results in the destruction of the socket await expect(localClosedP).resolves.toBeUndefined(); await expect(remoteClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return fwdProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); + expect(utpConnError.mock.calls.length).toBe(0); + expect(tlsSocketError.mock.calls.length).toBe(0); + expect(tlsSocketEnd.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls.length).toBe(1); + expect(tlsSocketClose.mock.calls[0][0]).toBe(false); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); await fwdProxy.stop(); }); test('stopping the proxy with open connections', async () => { - const serverKeyPair = await keysUtils.generateKeyPair(4096); + const serverKeyPair = await keysUtils.generateKeyPair(1024); const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, @@ -682,44 +1879,55 @@ describe('ForwardProxy', () => { ); const serverCertPem = keysUtils.certToPem(serverCert); const serverNodeId = networkUtils.certNodeId(serverCert); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); const { p: remoteReadyP, resolveP: resolveRemoteReadyP } = promise(); const { p: remoteSecureP, resolveP: resolveRemoteSecureP } = promise(); const { p: remoteClosedP, resolveP: resolveRemoteClosedP } = promise(); - const utpSocket = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('secure', () => { - resolveRemoteSecureP(); - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('secure', () => { + resolveRemoteSecureP(); + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP(); + }); + tlsSocket.on('end', () => { + logger.debug('Reverse: receives tlsSocket ending'); + if (utpConn.destroyed) { + logger.debug('Reverse: destroys tlsSocket'); + tlsSocket.destroy(); + } else { + logger.debug('Reverse: responds tlsSocket ending'); tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + await send(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP; + clearInterval(punchInterval); + }); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -737,7 +1945,7 @@ describe('ForwardProxy', () => { await utpSocketListen(0, '127.0.0.1'); const utpSocketHost = utpSocket.address().address; const utpSocketPort = utpSocket.address().port; - expect(fwdProxy.connectionCount).toBe(0); + expect(fwdProxy.getConnectionCount()).toBe(0); await fwdProxy.openConnection( serverNodeId, utpSocketHost as Host, @@ -745,9 +1953,9 @@ describe('ForwardProxy', () => { ); await expect(remoteReadyP).resolves.toBeUndefined(); await expect(remoteSecureP).resolves.toBeUndefined(); - expect(fwdProxy.connectionCount).toBe(1); + expect(fwdProxy.getConnectionCount()).toBe(1); await fwdProxy.stop(); - expect(fwdProxy.connectionCount).toBe(0); + expect(fwdProxy.getConnectionCount()).toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -755,7 +1963,7 @@ describe('ForwardProxy', () => { }); test('open connection to multiple servers', async () => { // First server keys - const serverKeyPair1 = await keysUtils.generateKeyPair(4096); + const serverKeyPair1 = await keysUtils.generateKeyPair(1024); const serverKeyPairPem1 = keysUtils.keyPairToPem(serverKeyPair1); const serverCert1 = keysUtils.generateCertificate( serverKeyPair1.publicKey, @@ -766,7 +1974,7 @@ describe('ForwardProxy', () => { const serverCertPem1 = keysUtils.certToPem(serverCert1); const serverNodeId1 = networkUtils.certNodeId(serverCert1); // Second server keys - const serverKeyPair2 = await keysUtils.generateKeyPair(4096); + const serverKeyPair2 = await keysUtils.generateKeyPair(1024); const serverKeyPairPem2 = keysUtils.keyPairToPem(serverKeyPair2); const serverCert2 = keysUtils.generateCertificate( serverKeyPair2.publicKey, @@ -776,8 +1984,18 @@ describe('ForwardProxy', () => { ); const serverCertPem2 = keysUtils.certToPem(serverCert2); const serverNodeId2 = networkUtils.certNodeId(serverCert2); - const egressHost = fwdProxy.egressHost; - const egressPort = fwdProxy.egressPort; + const fwdProxy = new ForwardProxy({ + authToken, + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyPairPem.privateKey, + certChainPem: certPem, + }, + }); + const egressHost = fwdProxy.getEgressHost(); + const egressPort = fwdProxy.getEgressPort(); // First signals const { p: remoteReadyP1, resolveP: resolveRemoteReadyP1 } = promise(); @@ -788,34 +2006,32 @@ describe('ForwardProxy', () => { promise(); const { p: remoteClosedP2, resolveP: resolveRemoteClosedP2 } = promise(); - const utpSocket1 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem1, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP1(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket1 = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem1.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem1, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP1(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + await send1(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send1(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send1(networkUtils.pingBuffer); - }, 1000); - await remoteReadyP1; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 1000); + await remoteReadyP1; + clearInterval(punchInterval); + }); const handleMessage1 = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -833,34 +2049,32 @@ describe('ForwardProxy', () => { await utpSocketListen1(0, '127.0.0.1'); const utpSocketHost1 = utpSocket1.address().address; const utpSocketPort1 = utpSocket1.address().port; - const utpSocket2 = UTP.createServer( - async (utpConn) => { - const tlsSocket = new tls.TLSSocket(utpConn, { - key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), - cert: Buffer.from(serverCertPem2, 'ascii'), - isServer: true, - requestCert: true, - rejectUnauthorized: false, - }); - tlsSocket.on('close', () => { - resolveRemoteClosedP2(); - }); - // AllowHalfOpen is buggy - // this ends the connection in case it doesn't work - tlsSocket.on('end', () => { + const utpSocket2 = UTP.createServer(async (utpConn) => { + const tlsSocket = new tls.TLSSocket(utpConn, { + key: Buffer.from(serverKeyPairPem2.privateKey, 'ascii'), + cert: Buffer.from(serverCertPem2, 'ascii'), + isServer: true, + requestCert: true, + rejectUnauthorized: false, + }); + tlsSocket.on('close', () => { + resolveRemoteClosedP2(); + }); + tlsSocket.on('end', () => { + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { tlsSocket.end(); - }); + tlsSocket.destroy(); + } + }); + await send2(networkUtils.pingBuffer); + const punchInterval = setInterval(async () => { await send2(networkUtils.pingBuffer); - const punchInterval = setInterval(async () => { - await send2(networkUtils.pingBuffer); - }, 2000); - await remoteReadyP2; - clearInterval(punchInterval); - }, - { - allowHalfOpen: false, - }, - ); + }, 2000); + await remoteReadyP2; + clearInterval(punchInterval); + }); const handleMessage2 = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { @@ -878,7 +2092,7 @@ describe('ForwardProxy', () => { await utpSocketListen2(0, '127.0.0.1'); const utpSocketHost2 = utpSocket2.address().address; const utpSocketPort2 = utpSocket2.address().port; - expect(fwdProxy.connectionCount).toBe(0); + expect(fwdProxy.getConnectionCount()).toBe(0); await fwdProxy.openConnection( serverNodeId1, utpSocketHost1 as Host, @@ -889,7 +2103,7 @@ describe('ForwardProxy', () => { utpSocketHost2 as Host, utpSocketPort2 as Port, ); - expect(fwdProxy.connectionCount).toBe(2); + expect(fwdProxy.getConnectionCount()).toBe(2); await expect(remoteReadyP1).resolves.toBeUndefined(); await expect(remoteReadyP2).resolves.toBeUndefined(); await fwdProxy.closeConnection( @@ -900,7 +2114,7 @@ describe('ForwardProxy', () => { utpSocketHost2 as Host, utpSocketPort2 as Port, ); - expect(fwdProxy.connectionCount).toBe(0); + expect(fwdProxy.getConnectionCount()).toBe(0); await expect(remoteClosedP1).resolves.toBeUndefined(); await expect(remoteClosedP2).resolves.toBeUndefined(); utpSocket1.off('message', handleMessage1); diff --git a/tests/network/ReverseProxy.test.ts b/tests/network/ReverseProxy.test.ts index f319d66b4..8f0f3550c 100644 --- a/tests/network/ReverseProxy.test.ts +++ b/tests/network/ReverseProxy.test.ts @@ -1,5 +1,4 @@ import type { AddressInfo } from 'net'; - import type { Host, Port } from '@/network/types'; import type { KeyPairPem } from '@/keys/types'; import net from 'net'; @@ -12,54 +11,75 @@ import { errors as networkErrors, } from '@/network'; import * as keysUtils from '@/keys/utils'; -import { promisify, promise, timerStart, timerStop, sleep } from '@/utils'; - -describe('ReverseProxy', () => { - const logger = new Logger('ReverseProxy Test', LogLevel.WARN, [ - new StreamHandler(), - ]); - let keyPairPem: KeyPairPem, certPem: string; +import { promisify, promise, timerStart, timerStop, poll } from '@/utils'; +import * as testUtils from '../utils'; - // Helper functions - function server(end: boolean = false) { - const { p: serverConnP, resolveP: resolveServerConnP } = promise(); - const { p: serverConnClosedP, resolveP: resolveServerConnClosedP } = - promise(); - const server = net.createServer((conn) => { +/** + * Mock TCP server + * This is the server that the ReverseProxy will be proxying to + */ +function tcpServer(end: boolean = false) { + const { p: serverConnP, resolveP: resolveServerConnP } = promise(); + const { p: serverConnEndP, resolveP: resolveServerConnEndP } = + promise(); + const { p: serverConnClosedP, resolveP: resolveServerConnClosedP } = + promise(); + const server = net.createServer( + { + allowHalfOpen: false, + }, + (conn) => { resolveServerConnP(); + conn.on('end', () => { + resolveServerConnEndP(); + conn.end(); + conn.destroy(); + }); conn.once('close', () => { resolveServerConnClosedP(); }); if (end) { + conn.removeAllListeners('end'); + conn.on('end', () => { + resolveServerConnEndP(); + conn.destroy(); + }); conn.end(); - conn.destroy(); } - }); - const serverClose = promisify(server.close).bind(server); - const serverListen = promisify(server.listen).bind(server); - const serverHost = () => { - return (server.address() as AddressInfo).address as Host; - }; - const serverPort = () => { - return (server.address() as AddressInfo).port as Port; - }; - return { - serverListen, - serverClose, - serverConnP, - serverConnClosedP, - serverHost, - serverPort, - }; - } + }, + ); + const serverClose = promisify(server.close).bind(server); + const serverListen = promisify(server.listen).bind(server); + const serverHost = () => { + return (server.address() as AddressInfo).address as Host; + }; + const serverPort = () => { + return (server.address() as AddressInfo).port as Port; + }; + return { + serverListen, + serverClose, + serverConnP, + serverConnEndP, + serverConnClosedP, + serverHost, + serverPort, + }; +} +describe(ReverseProxy.name, () => { + const logger = new Logger(`${ReverseProxy.name} test`, LogLevel.WARN, [ + new StreamHandler(), + ]); + let keyPairPem: KeyPairPem; + let certPem: string; beforeAll(async () => { - const keyPair = await keysUtils.generateKeyPair(4096); - keyPairPem = keysUtils.keyPairToPem(keyPair); + const globalKeyPair = await testUtils.setupGlobalKeypair(); + keyPairPem = keysUtils.keyPairToPem(globalKeyPair); const cert = keysUtils.generateCertificate( - keyPair.publicKey, - keyPair.privateKey, - keyPair.privateKey, + globalKeyPair.publicKey, + globalKeyPair.privateKey, + globalKeyPair.privateKey, 86400, ); certPem = keysUtils.certToPem(cert); @@ -68,7 +88,6 @@ describe('ReverseProxy', () => { const revProxy = new ReverseProxy({ logger: logger, }); - // Should be a noop await revProxy.stop(); await revProxy.start({ @@ -80,19 +99,13 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - await revProxy.stop(); - expect(() => { - revProxy.ingressHost; - }).toThrow(networkErrors.ErrorReverseProxyNotStarted); - expect(() => { - revProxy.getConnectionInfoByProxy('::1' as Host, 1 as Port); - }).toThrow(networkErrors.ErrorReverseProxyNotStarted); - }); - test('starting and stopping the reverse proxy', async () => { - const revProxy = new ReverseProxy({ - logger: logger, - }); - // Starting the rev proxy doesn't start a connection to the server + expect(typeof revProxy.getServerHost()).toBe('string'); + expect(typeof revProxy.getServerPort()).toBe('number'); + expect(revProxy.getServerPort()).toBeGreaterThan(0); + expect(typeof revProxy.getIngressHost()).toBe('string'); + expect(typeof revProxy.getIngressPort()).toBe('number'); + expect(revProxy.getIngressPort()).toBeGreaterThan(0); + // Should be a noop (already started) await revProxy.start({ serverHost: '::1' as Host, serverPort: 1 as Port, @@ -102,15 +115,14 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - expect(typeof revProxy.ingressHost).toBe('string'); - expect(typeof revProxy.ingressPort).toBe('number'); - expect(revProxy.ingressPort).toBeGreaterThan(0); - expect(typeof revProxy.serverHost).toBe('string'); - expect(typeof revProxy.serverPort).toBe('number'); - expect(revProxy.connectionCount).toBe(0); - expect(revProxy.serverHost).toBe('::1'); - expect(revProxy.serverPort).toBe(1); await revProxy.stop(); + expect(() => { + revProxy.getIngressHost(); + }).toThrow(networkErrors.ErrorReverseProxyNotRunning); + expect(() => { + revProxy.getConnectionInfoByProxy('::1' as Host, 1 as Port); + }).toThrow(networkErrors.ErrorReverseProxyNotRunning); + // Start it again await revProxy.start({ serverHost: '::1' as Host, serverPort: 1 as Port, @@ -120,12 +132,12 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - expect(revProxy.ingressHost).toBe('127.0.0.1'); + expect(revProxy.getServerHost()).toBe('::1'); await revProxy.stop(); }); test('open connection to port 0 fails', async () => { const revProxy = new ReverseProxy({ - logger: logger, + logger: logger.getChild('ReverseProxy port 0'), }); const { serverListen, @@ -134,7 +146,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -164,7 +176,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -207,7 +219,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -218,8 +230,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -236,7 +248,7 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort as Port); await expect(serverConnP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); @@ -257,7 +269,7 @@ describe('ReverseProxy', () => { serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -268,8 +280,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); // First client const utpSocket1 = UTP(); const handleMessage1 = async (data: Buffer) => { @@ -304,10 +316,10 @@ describe('ReverseProxy', () => { const utpSocketPort2 = utpSocket2.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort1 as Port); await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort2 as Port); - expect(revProxy.connectionCount).toBe(2); + expect(revProxy.getConnectionCount()).toBe(2); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort1 as Port); await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort2 as Port); - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); await expect(serverConnP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); utpSocket1.off('message', handleMessage1); @@ -328,10 +340,11 @@ describe('ReverseProxy', () => { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(true); + } = tcpServer(true); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -342,8 +355,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -360,14 +373,14 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await expect(serverConnP).resolves.toBeUndefined(); - // The server closed the connection + // The server receives the end confirmation for graceful exit + await expect(serverConnEndP).resolves.toBeUndefined(); + // The server is closed await expect(serverConnClosedP).resolves.toBeUndefined(); - // Wait for the end signal to be received - await sleep(2000); // The rev proxy won't have this connection - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -375,23 +388,27 @@ describe('ReverseProxy', () => { await serverClose(); }); test('connect timeout due to hanging client', async () => { - // ConnConnectTime will affect ErrorConnectionComposeTimeout during compose - // connTimeoutTime will affect ErrorConnectionTimeout which is needed - // because failing to connect to the open connection - // doesn't automatically mean the connection is destroyed + // `connConnectTime` will affect ErrorConnectionComposeTimeout + // `connKeepAliveTimeoutTime` will affect ErrorConnectionTimeout which is needed + // This should trigger both ErrorConnectionComposeTimeout and ErrorConnectionTimeout + // ErrorConnectionComposeTimeout results in a failed composition + // ErrorConnectionTimeout results in stopping the connection + // Failing to connect to the open connection doesn't + // automatically mean the connection is destroyed const revProxy = new ReverseProxy({ connConnectTime: 3000, - connTimeoutTime: 3000, + connKeepAliveTimeoutTime: 3000, logger: logger, }); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -402,8 +419,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -420,8 +437,9 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); // This retries multiple times + // This will eventually fail and trigger a ErrorConnectionComposeTimeout const utpConn = utpSocket.connect(ingressPort, ingressHost); utpConn.setTimeout(2000, () => { utpConn.emit('error', new Error('TIMED OUT')); @@ -439,11 +457,22 @@ describe('ReverseProxy', () => { // The client connection times out await expect(utpConnErrorP).rejects.toThrow(/TIMED OUT/); await utpConnClosedP; - // Wait for the open connection to timeout - await sleep(3000); - expect(revProxy.connectionCount).toBe(0); await expect(serverConnP).resolves.toBeUndefined(); + await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -451,23 +480,23 @@ describe('ReverseProxy', () => { await serverClose(); }); test('connect fails due to missing client certificates', async () => { - // ConnConnectTime will affect ErrorConnectionComposeTimeout during compose - // connTimeoutTime will affect ErrorConnectionTimeout which is needed - // because failing to connect to the open connection + // `connKeepAliveTimeoutTime` will affect ErrorConnectionTimeout + // Note that failing to connect to the open connection // doesn't automatically mean the connection is destroyed + // reverse proxy keeps the connection alive until `connKeepAliveTimeoutTime` expires const revProxy = new ReverseProxy({ - connConnectTime: 3000, - connTimeoutTime: 3000, + connKeepAliveTimeoutTime: 2000, logger: logger, }); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0); await revProxy.start({ serverHost: serverHost(), @@ -478,8 +507,8 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const utpSocket = UTP(); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -496,17 +525,12 @@ describe('ReverseProxy', () => { await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); const { p: tlsSocketClosedP, resolveP: resolveTlsSocketClosedP } = promise(); const utpConn = utpSocket.connect(ingressPort, ingressHost); - // This will propagate the error to tlsSocket - utpConn.setTimeout(2000, () => { - utpConn.emit('error', new Error('TIMED OUT')); - }); - // TLS socket without a certificate - // should also cause a timeout - // the secure event never occurs + // This will send an empty certificate chain + // Expect `ErrorCertChainEmpty` let secureConnection = false; const tlsSocket = tls.connect( { @@ -527,6 +551,7 @@ describe('ReverseProxy', () => { tlsSocket.destroy(); } else { tlsSocket.end(); + tlsSocket.destroy(); } }); tlsSocket.on('close', () => { @@ -537,11 +562,23 @@ describe('ReverseProxy', () => { // We won't receive an error because it will be closed expect(errored).toBe(false); expect(secureConnection).toBe(true); - // Wait for the open connection to timeout - await sleep(3000); - expect(revProxy.connectionCount).toBe(0); await expect(serverConnP).resolves.toBeUndefined(); + // Eventually `ErrorConnectionTimeout` occurs, and these will be gracefully closed + await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); + // Connection count should reach 0 eventually + await expect( + poll( + async () => { + return revProxy.getConnectionCount(); + }, + (_, result) => { + if (result === 0) return true; + return false; + }, + 100, + ), + ).resolves.toBe(0); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -549,7 +586,7 @@ describe('ReverseProxy', () => { await serverClose(); }); test('connect success', async () => { - const clientKeyPair = await keysUtils.generateKeyPair(4096); + const clientKeyPair = await keysUtils.generateKeyPair(1024); const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( clientKeyPair.publicKey, @@ -562,10 +599,11 @@ describe('ReverseProxy', () => { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); + } = tcpServer(); await serverListen(0, '127.0.0.1'); const revProxy = new ReverseProxy({ logger: logger, @@ -579,13 +617,13 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); const { p: clientReadyP, resolveP: resolveClientReadyP } = promise(); const { p: clientSecureConnectP, resolveP: resolveClientSecureConnectP } = promise(); const { p: clientCloseP, resolveP: resolveClientCloseP } = promise(); - const utpSocket = UTP({ allowHalfOpen: false }); + const utpSocket = UTP({ allowHalfOpen: true }); const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); @@ -614,26 +652,30 @@ describe('ReverseProxy', () => { resolveClientSecureConnectP(); }, ); + let tlsSocketEnded = false; tlsSocket.on('end', () => { - logger.info('GOT THE END EVENT'); + tlsSocketEnded = true; if (utpConn.destroyed) { tlsSocket.destroy(); } else { tlsSocket.end(); + tlsSocket.destroy(); } }); tlsSocket.on('close', () => { resolveClientCloseP(); }); await send(networkUtils.pingBuffer); - expect(revProxy.connectionCount).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); await clientReadyP; await clientSecureConnectP; await serverConnP; await revProxy.closeConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(0); + expect(revProxy.getConnectionCount()).toBe(0); await clientCloseP; + await serverConnEndP; await serverConnClosedP; + expect(tlsSocketEnded).toBe(true); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); @@ -641,18 +683,28 @@ describe('ReverseProxy', () => { await serverClose(); }); test('stopping the proxy with open connections', async () => { - const revProxy = new ReverseProxy({ - logger: logger, - }); + const clientKeyPair = await keysUtils.generateKeyPair(1024); + const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + const clientCert = keysUtils.generateCertificate( + clientKeyPair.publicKey, + clientKeyPair.privateKey, + clientKeyPair.privateKey, + 86400, + ); + const clientCertPem = keysUtils.certToPem(clientCert); const { serverListen, serverClose, serverConnP, + serverConnEndP, serverConnClosedP, serverHost, serverPort, - } = server(); - await serverListen(0); + } = tcpServer(); + await serverListen(0, '127.0.0.1'); + const revProxy = new ReverseProxy({ + logger: logger, + }); await revProxy.start({ serverHost: serverHost(), serverPort: serverPort(), @@ -662,12 +714,18 @@ describe('ReverseProxy', () => { certChainPem: certPem, }, }); - const ingressHost = revProxy.ingressHost; - const ingressPort = revProxy.ingressPort; - const utpSocket = UTP(); + const ingressHost = revProxy.getIngressHost(); + const ingressPort = revProxy.getIngressPort(); + const { p: clientReadyP, resolveP: resolveClientReadyP } = promise(); + const { p: clientSecureConnectP, resolveP: resolveClientSecureConnectP } = + promise(); + const { p: clientCloseP, resolveP: resolveClientCloseP } = promise(); + const utpSocket = UTP({ allowHalfOpen: true }); + const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); const handleMessage = async (data: Buffer) => { const msg = networkUtils.unserializeNetworkMessage(data); if (msg.type === 'ping') { + resolveClientReadyP(); await send(networkUtils.pongBuffer); } }; @@ -676,15 +734,46 @@ describe('ReverseProxy', () => { const utpSocketSend = promisify(utpSocket.send).bind(utpSocket); await utpSocketSend(data, 0, data.byteLength, ingressPort, ingressHost); }; - const utpSocketBind = promisify(utpSocket.bind).bind(utpSocket); await utpSocketBind(0, '127.0.0.1'); const utpSocketPort = utpSocket.address().port; await revProxy.openConnection('127.0.0.1' as Host, utpSocketPort as Port); - expect(revProxy.connectionCount).toBe(1); + const utpConn = utpSocket.connect(ingressPort, ingressHost); + const tlsSocket = tls.connect( + { + key: Buffer.from(clientKeyPairPem.privateKey, 'ascii'), + cert: Buffer.from(clientCertPem, 'ascii'), + socket: utpConn, + rejectUnauthorized: false, + }, + () => { + resolveClientSecureConnectP(); + }, + ); + let tlsSocketEnded = false; + tlsSocket.on('end', () => { + tlsSocketEnded = true; + if (utpConn.destroyed) { + tlsSocket.destroy(); + } else { + tlsSocket.end(); + tlsSocket.destroy(); + } + }); + tlsSocket.on('close', () => { + resolveClientCloseP(); + }); + await send(networkUtils.pingBuffer); + expect(revProxy.getConnectionCount()).toBe(1); + await clientReadyP; + await clientSecureConnectP; + await serverConnP; + // Stopping with 1 active connection (not just opened) await revProxy.stop(); - expect(revProxy.connectionCount).toBe(0); - await expect(serverConnP).resolves.toBeUndefined(); + expect(revProxy.getConnectionCount()).toBe(0); + await clientCloseP; + await expect(serverConnEndP).resolves.toBeUndefined(); await expect(serverConnClosedP).resolves.toBeUndefined(); + expect(tlsSocketEnded).toBe(true); utpSocket.off('message', handleMessage); utpSocket.close(); utpSocket.unref(); diff --git a/tests/network/index.test.ts b/tests/network/index.test.ts index 128aad672..913e045a0 100644 --- a/tests/network/index.test.ts +++ b/tests/network/index.test.ts @@ -4,40 +4,54 @@ import grpc from '@grpc/grpc-js'; import { utils as keysUtils } from '@/keys'; import { ForwardProxy, ReverseProxy, utils as networkUtils } from '@/network'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import { sleep } from '@/utils'; import { openTestServer, closeTestServer, GRPCClientTest } from '../grpc/utils'; describe('network index', () => { const logger = new Logger('Network Test', LogLevel.WARN, [ new StreamHandler(), ]); - test('integration of forward and reverse proxy', async () => { + const authenticate = async (_metaClient, metaServer = new grpc.Metadata()) => + metaServer; + let clientKeyPairPem; + let clientCertPem; + let clientNodeId; + let serverKeyPairPem; + let serverCertPem; + let serverNodeId; + beforeAll(async () => { // Client keys - const clientKeyPair = await keysUtils.generateKeyPair(4096); - const clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); + const clientKeyPair = await keysUtils.generateKeyPair(1024); + clientKeyPairPem = keysUtils.keyPairToPem(clientKeyPair); const clientCert = keysUtils.generateCertificate( clientKeyPair.publicKey, clientKeyPair.privateKey, clientKeyPair.privateKey, 12332432423, ); - const clientCertPem = keysUtils.certToPem(clientCert); - const clientNodeId = networkUtils.certNodeId(clientCert); + clientCertPem = keysUtils.certToPem(clientCert); + clientNodeId = networkUtils.certNodeId(clientCert); // Server keys - const serverKeyPair = await keysUtils.generateKeyPair(4096); - const serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); + const serverKeyPair = await keysUtils.generateKeyPair(1024); + serverKeyPairPem = keysUtils.keyPairToPem(serverKeyPair); const serverCert = keysUtils.generateCertificate( serverKeyPair.publicKey, serverKeyPair.privateKey, serverKeyPair.privateKey, 12332432423, ); - const serverCertPem = keysUtils.certToPem(serverCert); - const serverNodeId = networkUtils.certNodeId(serverCert); - const authenticate = async (metaClient, metaServer = new grpc.Metadata()) => - metaServer; - const [server, serverPort] = await openTestServer(authenticate, logger); - const revProxy = new ReverseProxy({ - logger, + serverCertPem = keysUtils.certToPem(serverCert); + serverNodeId = networkUtils.certNodeId(serverCert); + }); + let server; + let revProxy; + let fwdProxy; + let client; + beforeEach(async () => { + let serverPort; + [server, serverPort] = await openTestServer(authenticate, logger); + revProxy = new ReverseProxy({ + logger: logger.getChild('ReverseProxy integration'), }); await revProxy.start({ serverHost: '127.0.0.1' as Host, @@ -49,9 +63,9 @@ describe('network index', () => { certChainPem: serverCertPem, }, }); - const fwdProxy = new ForwardProxy({ + fwdProxy = new ForwardProxy({ authToken: 'abc', - logger, + logger: logger.getChild('ForwardProxy integration'), }); await fwdProxy.start({ tlsConfig: { @@ -63,17 +77,27 @@ describe('network index', () => { egressHost: '127.0.0.1' as Host, egressPort: 0 as Port, }); - const client = await GRPCClientTest.createGRPCClientTest({ + client = await GRPCClientTest.createGRPCClientTest({ nodeId: serverNodeId, - host: revProxy.ingressHost, - port: revProxy.ingressPort, + host: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), proxyConfig: { - host: fwdProxy.proxyHost, - port: fwdProxy.proxyPort, + host: fwdProxy.getProxyHost(), + port: fwdProxy.getProxyPort(), authToken: fwdProxy.authToken, }, logger, }); + }); + afterEach(async () => { + // All calls here are idempotent + // they will work even when they are already shutdown + await client.destroy(); + await fwdProxy.stop(); + await revProxy.stop(); + await closeTestServer(server); + }); + test('grpc integration with unary and stream calls', async () => { const m = new utilsPB.EchoMessage(); const challenge = 'Hello!'; m.setChallenge(challenge); @@ -105,36 +129,65 @@ describe('network index', () => { expect(duplexStreamResponse.value.getChallenge()).toBe(m.getChallenge()); } // Ensure that the connection count is the same - expect(fwdProxy.connectionCount).toBe(1); - expect(revProxy.connectionCount).toBe(1); + expect(fwdProxy.getConnectionCount()).toBe(1); + expect(revProxy.getConnectionCount()).toBe(1); expect( fwdProxy.getConnectionInfoByIngress(client.host, client.port), ).toEqual( expect.objectContaining({ nodeId: serverNodeId, - egressHost: fwdProxy.egressHost, - egressPort: fwdProxy.egressPort, - ingressHost: revProxy.ingressHost, - ingressPort: revProxy.ingressPort, + egressHost: fwdProxy.getEgressHost(), + egressPort: fwdProxy.getEgressPort(), + ingressHost: revProxy.getIngressHost(), + ingressPort: revProxy.getIngressPort(), }), ); expect( revProxy.getConnectionInfoByEgress( - fwdProxy.egressHost, - fwdProxy.egressPort, + fwdProxy.getEgressHost(), + fwdProxy.getEgressPort(), ), ).toEqual( expect.objectContaining({ nodeId: clientNodeId, - egressHost: fwdProxy.egressHost, - egressPort: fwdProxy.egressPort, - ingressHost: revProxy.ingressHost, - ingressPort: revProxy.ingressPort, + egressHost: fwdProxy.getEgressHost(), + egressPort: fwdProxy.getEgressPort(), + ingressHost: revProxy.getIngressHost(), + ingressPort: revProxy.getIngressPort(), }), ); + }); + test('client initiates end', async () => { + // Wait for network to settle + await sleep(100); + // GRPC client end simultaneously triggers the server to end the connection + // This is because the GRPC send ending frames at HTTP2-level await client.destroy(); + // Wait for network to settle + await sleep(100); + }); + test('server initiates end', async () => { + // Wait for network to settle + await sleep(100); + // Closing the GRPC server will automatically change the state of the client + // However because the GRPCClient has not integrated state changes of the underlying channel + // Then the GRPCClient won't be in a destroyed state until we explicitly destroy it + await closeTestServer(server); + // Wait for network to settle + await sleep(100); + }); + test('forward initiates end', async () => { + // Wait for network to settle + await sleep(100); await fwdProxy.stop(); + // Wait for network to settle + await sleep(100); + }); + test('reverse initiates end', async () => { + // Wait for network to settle + await sleep(100); await revProxy.stop(); - await closeTestServer(server); + // Wait for network to settle + await sleep(100); }); }); diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 3407119a0..35aac2fb9 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -9,7 +9,7 @@ import { DB } from '@matrixai/db'; import { ForwardProxy, ReverseProxy } from '@/network'; import { NodeConnection, NodeManager } from '@/nodes'; import { VaultManager } from '@/vaults'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import GRPCServer from '@/grpc/GRPCServer'; import { AgentServiceService, createAgentService } from '@/agent'; import { ACL } from '@/acl'; @@ -23,7 +23,6 @@ import * as networkErrors from '@/network/errors'; import { makeNodeId } from '@/nodes/utils'; import { poll } from '@/utils'; import * as nodesTestUtils from './utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -134,7 +133,13 @@ describe('NodeConnection', () => { dbPath: serverDbPath, fs: fs, logger: logger, - crypto: makeCrypto(serverKeyManager.dbKey), + crypto: { + key: serverKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); serverACL = await ACL.createACL({ db: serverDb, @@ -402,8 +407,8 @@ describe('NodeConnection', () => { await conn.stop(); await serverRevProxy.closeConnection( - clientFwdProxy.egressHost, - clientFwdProxy.egressPort, + clientFwdProxy.getEgressHost(), + clientFwdProxy.getEgressPort(), ); await conn.destroy(); }); @@ -461,8 +466,8 @@ describe('NodeConnection', () => { await conn.stop(); await serverRevProxy.closeConnection( - clientFwdProxy.egressHost, - clientFwdProxy.egressPort, + clientFwdProxy.getEgressHost(), + clientFwdProxy.getEgressPort(), ); await conn.destroy(); }); diff --git a/tests/nodes/NodeGraph.test.ts b/tests/nodes/NodeGraph.test.ts index 71c8ff1de..c9f9ac4ad 100644 --- a/tests/nodes/NodeGraph.test.ts +++ b/tests/nodes/NodeGraph.test.ts @@ -7,13 +7,12 @@ import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { NodeManager, errors as nodesErrors } from '@/nodes'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { ForwardProxy, ReverseProxy } from '@/network'; import * as nodesUtils from '@/nodes/utils'; import { Sigchain } from '@/sigchain'; import { makeNodeId } from '@/nodes/utils'; import * as nodesTestUtils from './utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -130,7 +129,13 @@ describe('NodeGraph', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); sigchain = await Sigchain.createSigchain({ keyManager: keyManager, diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index 060bb6fc0..c91123f60 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -2,24 +2,19 @@ import type { ClaimIdString } from '@/claims/types'; import type { CertificatePem, KeyPairPem, PublicKeyPem } from '@/keys/types'; import type { Host, Port } from '@/network/types'; import type { NodeId, NodeAddress } from '@/nodes/types'; -import type { PolykeyAgent } from '@'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; - import { DB } from '@matrixai/db'; -import { KeyManager } from '@/keys'; -import { NodeManager } from '@/nodes'; +import { PolykeyAgent } from '@'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { NodeManager, errors as nodesErrors } from '@/nodes'; import { ForwardProxy, ReverseProxy } from '@/network'; import { Sigchain } from '@/sigchain'; -import { sleep } from '@/utils'; -import * as nodesErrors from '@/nodes/errors'; -import * as claimsUtils from '@/claims/utils'; +import { utils as claimsUtils } from '@/claims'; import { makeNodeId } from '@/nodes/utils'; -import * as keysUtils from '@/keys/utils'; -import { makeCrypto } from '../utils'; -import * as testUtils from '../utils'; +import { sleep } from '@/utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -98,7 +93,13 @@ describe('NodeManager', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); sigchain = await Sigchain.createSigchain({ keyManager, db, logger }); @@ -144,22 +145,39 @@ describe('NodeManager', () => { // await expect(nodeManager.writeToken()).rejects.toThrow(nodesErrors.ErrorNodeManagerNotRunning); }); describe('getConnectionToNode', () => { + let targetDataDir: string; let target: PolykeyAgent; let targetNodeId: NodeId; let targetNodeAddress: NodeAddress; beforeAll(async () => { - target = await testUtils.setupRemoteKeynode({ - logger: logger, + targetDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + target = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: targetDataDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, }); }, global.polykeyStartupTimeout); + afterAll(async () => { + await target.stop(); + await fs.promises.rm(targetDataDir, { + force: true, + recursive: true, + }); + }); + beforeEach(async () => { await target.start({ password: 'password' }); targetNodeId = target.keyManager.getNodeId(); targetNodeAddress = { - host: target.revProxy.ingressHost, - port: target.revProxy.ingressPort, + host: target.revProxy.getIngressHost(), + port: target.revProxy.getIngressPort(), }; await nodeManager.setNode(targetNodeId, targetNodeAddress); }); @@ -169,10 +187,6 @@ describe('NodeManager', () => { await target.stop(); }); - afterAll(async () => { - await testUtils.cleanupRemoteKeynode(target); - }); - test('creates new connection to node', async () => { // @ts-ignore get connection + lock from protected NodeConnectionMap const initialConnLock = nodeManager.connections.get(targetNodeId); @@ -254,13 +268,18 @@ describe('NodeManager', () => { test( 'pings node', async () => { - const server = await testUtils.setupRemoteKeynode({ + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048, + }, logger: logger, }); const serverNodeId = server.nodeManager.getNodeId(); let serverNodeAddress: NodeAddress = { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), }; await nodeManager.setNode(serverNodeId, serverNodeAddress); @@ -274,8 +293,8 @@ describe('NodeManager', () => { await server.start({ password: 'password' }); // Update the node address (only changes because we start and stop) serverNodeAddress = { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), }; await nodeManager.setNode(serverNodeId, serverNodeAddress); // Check if active @@ -292,8 +311,6 @@ describe('NodeManager', () => { // Case 3: pre-existing connection no longer active, so offline const active3 = await nodeManager.pingNode(serverNodeId); expect(active3).toBe(false); - - await testUtils.cleanupRemoteKeynode(server); }, global.failedConnectionTimeout * 2, ); // Ping needs to timeout (takes 20 seconds + setup + pulldown) @@ -319,16 +336,25 @@ describe('NodeManager', () => { host: '127.0.0.1' as Host, port: 11111 as Port, }; - const server = await testUtils.setupRemoteKeynode({ logger: logger }); + + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger: logger, + }); + await nodeManager.setNode(server.nodeManager.getNodeId(), { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), } as NodeAddress); await server.nodeManager.setNode(nodeId, nodeAddress); const foundAddress2 = await nodeManager.findNode(nodeId); expect(foundAddress2).toStrictEqual(nodeAddress); - await testUtils.cleanupRemoteKeynode(server); + await server.stop(); }, global.polykeyStartupTimeout, ); @@ -337,10 +363,17 @@ describe('NodeManager', () => { async () => { // Case 3: node exhausts all contacts and cannot find node const nodeId = nodeId1; - const server = await testUtils.setupRemoteKeynode({ logger: logger }); + const server = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'server'), + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, + }); await nodeManager.setNode(server.nodeManager.getNodeId(), { - host: server.revProxy.ingressHost, - port: server.revProxy.ingressPort, + host: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), } as NodeAddress); // Add a dummy node to the server node graph database // Server will not be able to connect to this node (the only node in its @@ -353,8 +386,7 @@ describe('NodeManager', () => { await expect(() => nodeManager.findNode(nodeId)).rejects.toThrowError( nodesErrors.ErrorNodeGraphNodeNotFound, ); - - await testUtils.cleanupRemoteKeynode(server); + await server.stop(); }, global.failedConnectionTimeout * 2, ); @@ -380,34 +412,53 @@ describe('NodeManager', () => { // We're unable to mock the actions of the server, but we can ensure the // state on each side is as expected. + let xDataDir: string; let x: PolykeyAgent; let xNodeId: NodeId; let xNodeAddress: NodeAddress; let xPublicKey: PublicKeyPem; + let yDataDir: string; let y: PolykeyAgent; let yNodeId: NodeId; let yNodeAddress: NodeAddress; let yPublicKey: PublicKeyPem; beforeAll(async () => { - x = await testUtils.setupRemoteKeynode({ - logger: logger, + xDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + x = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: xDataDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, }); + xNodeId = x.nodeManager.getNodeId(); xNodeAddress = { - host: x.revProxy.ingressHost, - port: x.revProxy.ingressPort, + host: x.revProxy.getIngressHost(), + port: x.revProxy.getIngressPort(), }; xPublicKey = x.keyManager.getRootKeyPairPem().publicKey; - y = await testUtils.setupRemoteKeynode({ - logger: logger, + yDataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + y = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: xDataDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + logger, }); yNodeId = y.nodeManager.getNodeId(); yNodeAddress = { - host: y.revProxy.ingressHost, - port: y.revProxy.ingressPort, + host: y.revProxy.getIngressHost(), + port: y.revProxy.getIngressPort(), }; yPublicKey = y.keyManager.getRootKeyPairPem().publicKey; @@ -415,8 +466,16 @@ describe('NodeManager', () => { await y.nodeManager.setNode(xNodeId, xNodeAddress); }, global.polykeyStartupTimeout * 2); afterAll(async () => { - await testUtils.cleanupRemoteKeynode(x); - await testUtils.cleanupRemoteKeynode(y); + await y.stop(); + await x.stop(); + await fs.promises.rm(yDataDir, { + force: true, + recursive: true, + }); + await fs.promises.rm(xDataDir, { + force: true, + recursive: true, + }); }); // Make sure to remove any side-effects after each test diff --git a/tests/nodes/TestNodeConnection.ts b/tests/nodes/TestNodeConnection.ts index 56c1eb4a4..953c0d106 100644 --- a/tests/nodes/TestNodeConnection.ts +++ b/tests/nodes/TestNodeConnection.ts @@ -35,8 +35,8 @@ class TestNodeConnection extends NodeConnection { }): Promise { const logger_ = logger ?? new Logger('NodeConnection'); const proxyConfig_ = { - host: forwardProxy.proxyHost, - port: forwardProxy.proxyPort, + host: forwardProxy.getProxyHost(), + port: forwardProxy.getProxyPort(), authToken: forwardProxy.authToken, } as ProxyConfig; return new TestNodeConnection({ diff --git a/tests/nodes/utils.ts b/tests/nodes/utils.ts index da824bae8..960e844d3 100644 --- a/tests/nodes/utils.ts +++ b/tests/nodes/utils.ts @@ -1,5 +1,6 @@ -import type { NodeId } from '@/nodes/types'; +import type { NodeId, NodeAddress } from '@/nodes/types'; +import type { PolykeyAgent } from '@'; import * as nodesUtils from '@/nodes/utils'; import { makeNodeId } from '@/nodes/utils'; import { fromMultibase } from '@/GenericIdTypes'; @@ -82,4 +83,12 @@ function bigIntToBuffer(number: BigInt) { return u8; } -export { generateNodeIdForBucket, incrementNodeId }; +async function nodesConnect(localNode: PolykeyAgent, remoteNode: PolykeyAgent) { + // Add remote node's details to local node + await localNode.nodeManager.setNode(remoteNode.nodeManager.getNodeId(), { + host: remoteNode.revProxy.getIngressHost(), + port: remoteNode.revProxy.getIngressPort(), + } as NodeAddress); +} + +export { generateNodeIdForBucket, incrementNodeId, nodesConnect }; diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index 6ef1a048a..258b41319 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -12,7 +12,7 @@ import { DB } from '@matrixai/db'; import { ACL } from '@/acl'; import { Sigchain } from '@/sigchain'; import { GRPCServer } from '@/grpc'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { VaultManager } from '@/vaults'; import { GestaltGraph } from '@/gestalts'; import { NodeManager } from '@/nodes'; @@ -22,7 +22,6 @@ import { AgentServiceService, createAgentService } from '@/agent'; import * as networkUtils from '@/network/utils'; import { generateVaultId } from '@/vaults/utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -130,7 +129,13 @@ describe('NotificationsManager', () => { dbPath: receiverDbPath, fs: fs, logger: logger, - crypto: makeCrypto(receiverKeyManager.dbKey), + crypto: { + key: receiverKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); receiverACL = await ACL.createACL({ db: receiverDb, @@ -204,7 +209,7 @@ describe('NotificationsManager', () => { ingressHost: receiverHost, tlsConfig: revTLSConfig, }); - receiverIngressPort = revProxy.ingressPort; + receiverIngressPort = revProxy.getIngressPort(); }, global.polykeyStartupTimeout * 2); beforeEach(async () => { @@ -222,7 +227,13 @@ describe('NotificationsManager', () => { dbPath: senderDbPath, fs, logger, - crypto: makeCrypto(senderKeyManager.dbKey), + crypto: { + key: senderKeyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); senderACL = await ACL.createACL({ db: senderDb, logger }); senderSigchain = await Sigchain.createSigchain({ diff --git a/tests/sessions/Session.test.ts b/tests/sessions/Session.test.ts index b853eba35..f79456b6a 100644 --- a/tests/sessions/Session.test.ts +++ b/tests/sessions/Session.test.ts @@ -37,12 +37,6 @@ describe('Session', () => { await expect(session.start()).rejects.toThrow( sessionErrors.ErrorSessionDestroyed, ); - await expect(session.readToken()).rejects.toThrow( - sessionErrors.ErrorSessionNotRunning, - ); - await expect(session.writeToken('abc' as SessionToken)).rejects.toThrow( - sessionErrors.ErrorSessionNotRunning, - ); }); test('creating session', async () => { const session1 = await Session.createSession({ diff --git a/tests/sessions/SessionManager.test.ts b/tests/sessions/SessionManager.test.ts index beaca9f0f..31461996b 100644 --- a/tests/sessions/SessionManager.test.ts +++ b/tests/sessions/SessionManager.test.ts @@ -3,23 +3,18 @@ import os from 'os'; import path from 'path'; import { DB } from '@matrixai/db'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { KeyManager } from '@/keys'; -import SessionManager from '@/sessions/SessionManager'; -import * as sessionsErrors from '@/sessions/errors'; +import { KeyManager, utils as keysUtils } from '@/keys'; +import { SessionManager, errors as sessionsErrors } from '@/sessions'; import { sleep } from '@/utils'; -import * as keysUtils from '@/keys/utils'; - -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as testUtils from '../utils'; describe('SessionManager', () => { const password = 'password'; const logger = new Logger(`${SessionManager.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; /** * Shared db, keyManager for all tests */ @@ -27,6 +22,13 @@ describe('SessionManager', () => { let db: DB; let keyManager: KeyManager; beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -56,6 +58,8 @@ describe('SessionManager', () => { force: true, recursive: true, }); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); }); test('session manager readiness', async () => { const sessionManager = await SessionManager.createSessionManager({ diff --git a/tests/setup.ts b/tests/setup.ts index 5cac9b6aa..e69de29bb 100644 --- a/tests/setup.ts +++ b/tests/setup.ts @@ -1,59 +0,0 @@ -import os from 'os'; -import path from 'path'; - -declare global { - namespace NodeJS { - interface Global { - projectDir: string; - testDir: string; - keyPairDir: string; - binAgentDir: string; - binAgentPassword: string; - defaultTimeout: number; - polykeyStartupTimeout: number; - failedConnectionTimeout: number; - maxTimeout: number; - } - } -} - -/** - * Absolute directory to the project root - */ -global.projectDir = path.join(__dirname, '../'); - -/** - * Absolute directory to the test root - */ -global.testDir = __dirname; - -/** - * Absolute directory to shared keypair directory - * Generating the root key pair takes time - * This global key pair can be used by mocks - */ -global.keyPairDir = path.join(os.tmpdir(), 'polykey-test-keypair'); - -/** - * Absolute directory to a shared data directory used by bin tests - * This has to be a static path - * The setup.ts is copied into each test module - */ -global.binAgentDir = path.join(os.tmpdir(), 'polykey-test-bin'); - -/** - * Shared password for agent used by for bin tests - */ -global.binAgentPassword = 'hello world'; - -/** - * Default asynchronous test timeout - */ -global.defaultTimeout = 20000; -global.polykeyStartupTimeout = 30000; -global.failedConnectionTimeout = 50000; - -/** - * Timeouts rely on setTimeout which takes 32 bit numbers - */ -global.maxTimeout = Math.pow(2, 31) - 1; diff --git a/tests/sigchain/Sigchain.test.ts b/tests/sigchain/Sigchain.test.ts index c38a92333..ab0f65330 100644 --- a/tests/sigchain/Sigchain.test.ts +++ b/tests/sigchain/Sigchain.test.ts @@ -6,29 +6,36 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { Sigchain } from '@/sigchain'; import * as claimsUtils from '@/claims/utils'; import * as sigchainErrors from '@/sigchain/errors'; -import { makeCrypto } from '../utils'; - -// Mocks. -jest.mock('@/keys/utils', () => ({ - ...jest.requireActual('@/keys/utils'), - generateDeterministicKeyPair: - jest.requireActual('@/keys/utils').generateKeyPair, -})); +import * as testUtils from '../utils'; describe('Sigchain', () => { - const password = 'password'; const logger = new Logger('Sigchain Test', LogLevel.WARN, [ new StreamHandler(), ]); + const password = 'password'; + const srcNodeId = 'NodeId1' as NodeId; + let mockedGenerateKeyPair: jest.SpyInstance; + let mockedGenerateDeterministicKeyPair: jest.SpyInstance; + beforeAll(async () => { + const globalKeyPair = await testUtils.setupGlobalKeypair(); + mockedGenerateKeyPair = jest + .spyOn(keysUtils, 'generateKeyPair') + .mockResolvedValue(globalKeyPair); + mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockResolvedValue(globalKeyPair); + }); + afterAll(async () => { + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + }); let dataDir: string; let keyManager: KeyManager; let db: DB; - const srcNodeId = 'NodeId1' as NodeId; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), @@ -43,7 +50,13 @@ describe('Sigchain', () => { db = await DB.createDB({ dbPath, logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); }); afterEach(async () => { @@ -72,9 +85,6 @@ describe('Sigchain', () => { await expect(async () => { await sigchain.getSequenceNumber(); }).rejects.toThrow(sigchainErrors.ErrorSigchainNotRunning); - await expect(async () => { - await sigchain.getLatestClaimId(); - }).rejects.toThrow(sigchainErrors.ErrorSigchainNotRunning); }); test('async start initialises the sequence number', async () => { const sigchain = await Sigchain.createSigchain({ keyManager, db, logger }); @@ -89,9 +99,8 @@ describe('Sigchain', () => { node1: srcNodeId, node2: 'NodeId2' as NodeId, }; - await sigchain.addClaim(cryptolink); + const [claimId] = await sigchain.addClaim(cryptolink); - const claimId = await sigchain.getLatestClaimId(); expect(claimId).toBeTruthy(); const claim = await sigchain.getClaim(claimId!); @@ -134,16 +143,14 @@ describe('Sigchain', () => { node1: srcNodeId, node2: 'NodeId2' as NodeId, }; - await sigchain.addClaim(cryptolink); - const claimId1 = await sigchain.getLatestClaimId(); + const [claimId1] = await sigchain.addClaim(cryptolink); const cryptolink2: ClaimData = { type: 'node', node1: srcNodeId, node2: 'NodeId3' as NodeId, }; - await sigchain.addClaim(cryptolink2); - const claimId2 = await sigchain.getLatestClaimId(); + const [claimId2] = await sigchain.addClaim(cryptolink2); const claim1 = await sigchain.getClaim(claimId1!); const claim2 = await sigchain.getClaim(claimId2!); diff --git a/tests/status/Status.test.ts b/tests/status/Status.test.ts index ce8f9248c..02c04ae15 100644 --- a/tests/status/Status.test.ts +++ b/tests/status/Status.test.ts @@ -4,57 +4,57 @@ import fs from 'fs'; import os from 'os'; import path from 'path'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { sleep, errors as utilsErrors } from '@/utils'; +import config from '@/config'; import { Status, errors as statusErrors } from '@/status'; -describe('Lockfile is', () => { - const logger = new Logger('Lockfile Test', LogLevel.WARN, [ +describe('Status', () => { + const logger = new Logger(`${Status.name} Test`, LogLevel.WARN, [ new StreamHandler(), ]); - const waitForTimeout = 1000; let dataDir: string; - let status: Status; - let statusPath: string; - beforeEach(async () => { dataDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'status-test-')); - statusPath = path.join(dataDir, 'status'); - status = new Status({ - statusPath, - fs: fs, - logger: logger, - }); }); - afterEach(async () => { - await status.stop({}); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - - test('type correct', () => { - expect(status).toBeInstanceOf(Status); - }); - - test('starting and stopping with correct side effects', async () => { + test('status readiness', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // Should be a noop await status.start({ pid: 0 }); - await status.readStatus(); expect(fs.existsSync(status.statusPath)).toBe(true); - - await status.stop({ lol: 2 }); - await sleep(1000); + expect(fs.existsSync(status.statusLockPath)).toBe(true); + await status.stop({ foo: 'bar' }); expect(fs.existsSync(status.statusPath)).toBe(true); - const state = await status.readStatus(); - expect(state?.status).toEqual('DEAD'); + expect(fs.existsSync(status.statusLockPath)).toBe(false); + let statusInfo = await status.readStatus(); + expect(statusInfo?.status).toEqual('DEAD'); + await status.start({ pid: 0 }); + statusInfo = await status.readStatus(); + expect(statusInfo?.status).toEqual('STARTING'); + await status.stop({}); }); - - test('updating data and parsing it correctly', async () => { + test('status transitions', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); await status.start({ pid: 0 }); - const lock1 = await status.readStatus(); - expect(lock1?.data.pid).toBeDefined(); - + const statusInfo1 = await status.readStatus(); + expect(statusInfo1).toBeDefined(); + expect(statusInfo1!.status).toBe('STARTING'); + expect(statusInfo1!.data.pid).toBe(0); await status.finishStart({ pid: 0, nodeId: 'node' as NodeId, @@ -66,181 +66,217 @@ describe('Lockfile is', () => { grpcPort: 12345, anything: 'something', }); - - const lock2 = await status.readStatus(); - if (lock2) { - expect(lock2.data.pid).toBeDefined(); - expect(lock2.data.grpcHost).toBe('localhost'); - expect(lock2.data.grpcPort).toBe(12345); - expect(lock2.data.anything).toBe('something'); - } else { - throw new Error('Lock should exist'); - } - + const statusInfo2 = await status.readStatus(); + expect(statusInfo2).toBeDefined(); + expect(statusInfo2!.status).toBe('LIVE'); + expect(statusInfo2!.data.pid).toBeDefined(); + expect(statusInfo2!.data.grpcHost).toBe('localhost'); + expect(statusInfo2!.data.grpcPort).toBe(12345); + expect(statusInfo2!.data.anything).toBe('something'); + await status.beginStop({ + pid: 1, + }); + const statusInfo3 = await status.readStatus(); + expect(statusInfo3).toBeDefined(); + expect(statusInfo3!.status).toBe('STOPPING'); + expect(statusInfo3!.data.pid).toBe(1); await status.stop({}); + const statusInfo4 = await status.readStatus(); + expect(statusInfo4).toBeDefined(); + expect(statusInfo4!.status).toBe('DEAD'); }); - - test('Working fine when a status already exists', async () => { + test('start with existing statusPath or statusLockPath', async () => { await fs.promises.writeFile( - status.statusPath, - JSON.stringify({ pid: 66666 }), + path.join(dataDir, config.defaults.statusBase), + 'hello world', ); - await status.start({ pid: 0 }); - let lock; - lock = await status.readStatus(); - if (lock) { - expect(lock.data.pid).toBeDefined(); - } else { - throw new Error('Lock should exist'); - } - - await status.finishStart({ - pid: 0, - nodeId: 'node' as NodeId, - clientHost: '::1' as Host, - clientPort: 0 as Port, - ingressHost: '127.0.0.1' as Host, - ingressPort: 0 as Port, - grpcHost: 'localhost', - grpcPort: 12345, - anything: 'something', + await fs.promises.writeFile( + path.join(dataDir, config.defaults.statusLockBase), + 'hello world', + ); + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, }); - - lock = await status.readStatus(); - if (lock) { - expect(lock.data.pid).toBeDefined(); - expect(lock.data.grpcHost).toBe('localhost'); - expect(lock.data.grpcPort).toBe(12345); - expect(lock.data.anything).toBe('something'); - } else { - throw new Error('Lock should exist'); - } - + await status.start({ pid: 0 }); + const statusInfo = await status.readStatus(); + expect(statusInfo).toBeDefined(); + expect(statusInfo!.status).toBe('STARTING'); + expect(statusInfo!.data.pid).toBe(0); await status.stop({}); }); - test('A running status holds a lock', async () => { - // Make sure that the status is running - await status.start({ pid: 0 }); - - // Try to start a new status. - // Creation should succeed. + test('readStatus on non-existent status', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + expect(await status.readStatus()).toBeUndefined(); + }); + test('singleton running status', async () => { + const status1 = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); const status2 = new Status({ - statusPath: path.join(dataDir, 'status'), + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), fs: fs, logger: logger, }); - - // Should be able to read the lock info. - const info = await status2.readStatus(); - expect(info).toBeDefined(); - expect(info?.data.pid).toBeDefined(); - - // Should fail to start a new lock. - await expect(() => status2.start({ pid: 0 })).rejects.toThrow( - statusErrors.ErrorStatusLocked, - ); + await status1.start({ pid: 1 }); + await expect(async () => { + await status2.start({ pid: 2 }); + }).rejects.toThrow(statusErrors.ErrorStatusLocked); + // Status 2 can still read the status + const statusInfo = await status2.readStatus(); + expect(statusInfo).toBeDefined(); + expect(statusInfo!.data.pid).toBe(1); + await status1.stop({}); }); - test('Lockfile has multiple states.', async () => { - // Should be starting now. + test('wait for transitions', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + let statusWaitFor = status.waitFor('STARTING'); await status.start({ pid: 0 }); - expect((await status.readStatus())?.status).toEqual('STARTING'); - - // Should be running. + const statusInfoStarting = await statusWaitFor; + expect(statusInfoStarting!.status).toBe('STARTING'); + statusWaitFor = status.waitFor('LIVE'); await status.finishStart({ clientHost: '' as Host, clientPort: 0 as Port, - nodeId: '' as NodeId, ingressHost: '127.0.0.1' as Host, ingressPort: 0 as Port, + nodeId: '' as NodeId, pid: 0, }); - expect((await status.readStatus())?.status).toEqual('LIVE'); - - // Should be stopping. + const statusInfoLive = await statusWaitFor; + expect(statusInfoLive!.status).toBe('LIVE'); + statusWaitFor = status.waitFor('STOPPING'); await status.beginStop({ pid: 0 }); - expect((await status.readStatus())?.status).toEqual('STOPPING'); - - // Should be removed now. + const statusInfoStopping = await statusWaitFor; + expect(statusInfoStopping!.status).toBe('STOPPING'); + statusWaitFor = status.waitFor('DEAD'); await status.stop({}); - expect((await status.readStatus())?.status).toEqual('DEAD'); - }); - test('Status can wait for its status to be LIVE if started.', async () => { - // We want to mimic the startup procedure. - const delayedStart = async () => { - await status.start({ pid: 0 }); - await sleep(500); - await status.finishStart({ - clientHost: '' as Host, - clientPort: 0 as Port, - ingressHost: '127.0.0.1' as Host, - ingressPort: 0 as Port, - nodeId: '' as NodeId, - pid: 0, - }); - }; - const prom = delayedStart(); - - const test = await status.waitFor('LIVE', waitForTimeout); - expect(test.status).toEqual('LIVE'); - await prom; - - // Checking that we throw an error when we can't wait for RUNNING. - const delayedStop = async () => { - await status.beginStop({ pid: 0 }); - await sleep(500); - await status.stop({}); - }; - const prom2 = delayedStop(); - const test2 = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test2; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - await prom2; - - // Should throw if no file was found / unlocked. - const test3 = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test3; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); + const statusInfoDead = await statusWaitFor; + expect(statusInfoDead!.status).toBe('DEAD'); }); - test('Status can wait for its status to be DEAD if Stopping.', async () => { - // Should succeed if not started. - const test4 = await status.waitFor('DEAD', waitForTimeout); - expect(test4.status).toEqual('DEAD'); - - // Should throw an error when starting. - await status.start({ pid: 0 }); - const test = status.waitFor('LIVE', waitForTimeout); - await expect(async () => { - await test; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - - // Should throw an error whens started. - await status.start({ pid: 0 }); - const test2 = status.waitFor('DEAD', waitForTimeout); - await expect(async () => { - await test2; - }).rejects.toThrow(utilsErrors.ErrorUtilsPollTimeout); - - // Should wait and succeed when stopping. - const delayedStart = async () => { - await status.beginStop({ pid: 0 }); - await sleep(500); - await status.stop({}); - }; - const prom2 = delayedStart(); - const test3 = await status.waitFor('DEAD', waitForTimeout); - expect(test3.status).toEqual('DEAD'); - await prom2; - }); - test('should throw an error when failing to parse.', async () => { - // Creating the status file. + test('parse error when statusPath is corrupted', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); await status.start({ pid: 0 }); - // Corrupting the status file. - await fs.promises.writeFile(statusPath, '{'); - // Should throw. + await fs.promises.writeFile(status.statusPath, '{'); await expect(() => status.readStatus()).rejects.toThrow( statusErrors.ErrorStatusParse, ); + await status.stop({}); + }); + test('status transitions are serialised', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // The key point here is that there are no parsing errors + // And that the status info is always defined + for (let i = 0; i < 100; i++) { + const [, statusInfo1, , , , statusInfo2] = await Promise.all([ + status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 3425 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + status.readStatus(), + status.beginStop({ + pid: 4, + }), + status.finishStart({ + clientHost: '' as Host, + clientPort: 3445 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + status.beginStop({ + pid: 2, + }), + status.readStatus(), + status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }), + ]); + expect(statusInfo1).toBeDefined(); + expect(statusInfo2).toBeDefined(); + expect(['LIVE', 'STARTING', 'STOPPING']).toContainEqual( + statusInfo1!.status, + ); + expect(['LIVE', 'STARTING', 'STOPPING']).toContainEqual( + statusInfo2!.status, + ); + } + await status.stop({ pid: 0 }); + }); + test('wait for has at-least-once semantics', async () => { + const status = new Status({ + statusPath: path.join(dataDir, config.defaults.statusBase), + statusLockPath: path.join(dataDir, config.defaults.statusLockBase), + fs: fs, + logger: logger, + }); + await status.start({ pid: 0 }); + // `waitFor` relies on filesystem watching + // It does not guarantee exactly-once semantics for status events + // In this case, it is possible that upon reacting to `LIVE` status + // When it reads the status, it has already changed to `STOPPING` + // Which means the `statusWaitFor` never resolves + const statusWaitFor = status.waitFor('LIVE', 1000); + const p1 = status.finishStart({ + clientHost: '' as Host, + clientPort: 0 as Port, + ingressHost: '127.0.0.1' as Host, + ingressPort: 0 as Port, + nodeId: '' as NodeId, + pid: 0, + }); + const p2 = status.beginStop({ pid: 1 }); + try { + const statusInfo = await statusWaitFor; + expect(statusInfo!.status).toBe('LIVE'); + logger.info('Succeeds waiting for LIVE'); + } catch (e) { + expect(e).toBeInstanceOf(statusErrors.ErrorStatusTimeout); + logger.info('Times out waiting for LIVE'); + } + await Promise.all([p1, p2]); + // The last promise to be resolved might be p1 and not p2 + const statusInfo = await status.readStatus(); + expect( + statusInfo!.status === 'LIVE' || statusInfo!.status === 'STOPPING', + ).toBe(true); + await status.stop({}); }); }); diff --git a/tests/utils.ts b/tests/utils.ts index 65b6854e5..4eed7f203 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -1,91 +1,172 @@ -import type Logger from '@matrixai/logger'; -import type { NodeAddress } from '@/nodes/types'; -import os from 'os'; +import type { StatusLive } from '@/status/types'; import path from 'path'; import fs from 'fs'; +import lock from 'fd-lock'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { PolykeyAgent } from '@'; -import * as keysUtils from '@/keys/utils'; - -async function getGlobalKeyPair() { - const [publicKeyPem, privateKeyPem] = await Promise.all([ - fs.promises.readFile(path.join(global.keyPairDir, 'root.pub'), 'utf-8'), - fs.promises.readFile(path.join(global.keyPairDir, 'root.key'), 'utf-8'), - ]); - return keysUtils.keyPairFromPem({ - publicKey: publicKeyPem, - privateKey: privateKeyPem, - }); -} +import { Status } from '@/status'; +import { utils as keysUtils } from '@/keys'; +import { GRPCClientClient, utils as clientUtils } from '@/client'; +import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; +import { sleep } from '@/utils'; +import config from '@/config'; /** - * Helper function to create a remote keynode to contact. - * It will append a directory to options.baseDir or create it's own temp directory if not specified. - * For multiple nodes, specify a unique number. + * Setup the global keypair + * This is expected to be executed by multiple worker processes */ -async function setupRemoteKeynode({ - logger, - dataDir, -}: { - logger: Logger; - dataDir?: string; -}): Promise { - // Create and start the keynode + its temp directory - let nodeDir: string; - if (dataDir) { - // Add the directory. - nodeDir = path.join(dataDir, `remoteNode`); - await fs.promises.mkdir(nodeDir, { recursive: true }); - } else { - nodeDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-remote-'), - ); +async function setupGlobalKeypair() { + const globalKeyPairDir = path.join(globalThis.dataDir, 'keypair'); + const globalKeyPairLock = await fs.promises.open( + path.join(globalThis.dataDir, 'keypair.lock'), + fs.constants.O_WRONLY | fs.constants.O_CREAT, + ); + while (!lock(globalKeyPairLock.fd)) { + await sleep(1000); + } + try { + try { + await fs.promises.mkdir(globalKeyPairDir); + } catch (e) { + // Return key pair if the directory exists + if (e.code === 'EEXIST') { + const globalKeyPairPem = { + publicKey: fs.readFileSync( + path.join(globalKeyPairDir, 'root.pub'), + 'utf-8', + ), + privateKey: fs.readFileSync( + path.join(globalKeyPairDir, 'root.key'), + 'utf-8', + ), + }; + const globalKeyPair = keysUtils.keyPairFromPem(globalKeyPairPem); + return globalKeyPair; + } + } + const globalKeyPair = await keysUtils.generateKeyPair(4096); + const globalKeyPairPem = keysUtils.keyPairToPem(globalKeyPair); + await Promise.all([ + fs.promises.writeFile( + path.join(globalKeyPairDir, 'root.pub'), + globalKeyPairPem.publicKey, + 'utf-8', + ), + fs.promises.writeFile( + path.join(globalKeyPairDir, 'root.key'), + globalKeyPairPem.privateKey, + 'utf-8', + ), + ]); + return globalKeyPair; + } finally { + // Unlock when we have returned the keypair + lock.unlock(globalKeyPairLock.fd); + await globalKeyPairLock.close(); } - const agent = await PolykeyAgent.createPolykeyAgent({ - password: 'password', - nodePath: nodeDir, - logger: logger, - }); - return agent; } /** - * Helper function to stop a created remote keynode, and remove its temporary - * directory. + * Setup the global agent + * Use this in beforeAll, and use the closeGlobalAgent in afterAll + * This is expected to be executed by multiple worker processes + * Uses a references directory as a reference count + * Uses fd-lock to serialise access + * This means all test modules using this will be serialised + * Any beforeAll must use globalThis.maxTimeout + * Tips for usage: + * * Do not restart this global agent + * * Ensure client-side side-effects are removed at the end of each test + * * Ensure server-side side-effects are removed at the end of each test */ -async function cleanupRemoteKeynode(node: PolykeyAgent): Promise { - await node.stop(); - await node.destroy(); - await fs.promises.rm(node.nodePath, { - force: true, +async function setupGlobalAgent( + logger: Logger = new Logger(setupGlobalAgent.name, LogLevel.WARN, [ + new StreamHandler(), + ]), +) { + const globalAgentPassword = 'password'; + const globalAgentDir = path.join(globalThis.dataDir, 'agent'); + // The references directory will act like our reference count + await fs.promises.mkdir(path.join(globalAgentDir, 'references'), { recursive: true, }); -} - -async function addRemoteDetails( - localNode: PolykeyAgent, - remoteNode: PolykeyAgent, -) { - // Add remote node's details to local node - await localNode.nodeManager.setNode(remoteNode.nodeManager.getNodeId(), { - host: remoteNode.revProxy.ingressHost, - port: remoteNode.revProxy.ingressPort, - } as NodeAddress); -} - -function makeCrypto(dbKey: Buffer) { + const pid = process.pid.toString(); + // Plus 1 to the reference count + await fs.promises.writeFile(path.join(globalAgentDir, 'references', pid), ''); + const globalAgentLock = await fs.promises.open( + path.join(globalThis.dataDir, 'agent.lock'), + fs.constants.O_WRONLY | fs.constants.O_CREAT, + ); + while (!lock(globalAgentLock.fd)) { + await sleep(1000); + } + const status = new Status({ + statusPath: path.join(globalAgentDir, config.defaults.statusBase), + statusLockPath: path.join(globalAgentDir, config.defaults.statusLockBase), + fs, + }); + let statusInfo = await status.readStatus(); + if (statusInfo == null || statusInfo.status === 'DEAD') { + await PolykeyAgent.createPolykeyAgent({ + password: globalAgentPassword, + nodePath: globalAgentDir, + keysConfig: { + rootKeyPairBits: 2048, + }, + seedNodes: {}, // Explicitly no seed nodes on startup + logger, + }); + statusInfo = await status.readStatus(); + } return { - key: dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, + globalAgentDir, + globalAgentPassword, + globalAgentStatus: statusInfo as StatusLive, + globalAgentClose: async () => { + // Closing the global agent cannot be done in the globalTeardown + // This is due to a sequence of reasons: + // 1. The global agent is not started as a separate process + // 2. Because we need to be able to mock dependencies + // 3. This means it is part of a jest worker process + // 4. Which will block termination of the jest worker process + // 5. Therefore globalTeardown will never get to execute + // 6. The global agent is not part of globalSetup + // 7. Because not all tests need the global agent + // 8. Therefore setupGlobalAgent is lazy and executed by jest worker processes + try { + await fs.promises.rm(path.join(globalAgentDir, 'references', pid)); + // If the references directory is not empty + // there are other processes still using the global agent + try { + await fs.promises.rmdir(path.join(globalAgentDir, 'references')); + } catch (e) { + if (e.code === 'ENOTEMPTY') { + return; + } + throw e; + } + // Stopping may occur in a different jest worker process + // therefore we cannot rely on pkAgent, but instead use GRPC + const statusInfo = (await status.readStatus()) as StatusLive; + const grpcClient = await GRPCClientClient.createGRPCClientClient({ + nodeId: statusInfo.data.nodeId, + host: statusInfo.data.clientHost, + port: statusInfo.data.clientPort, + tlsConfig: { keyPrivatePem: undefined, certChainPem: undefined }, + logger, + }); + const emptyMessage = new utilsPB.EmptyMessage(); + const meta = clientUtils.encodeAuthFromPassword(globalAgentPassword); + // This is asynchronous + await grpcClient.agentStop(emptyMessage, meta); + await grpcClient.destroy(); + await status.waitFor('DEAD'); + } finally { + lock.unlock(globalAgentLock.fd); + await globalAgentLock.close(); + } }, }; } -export { - getGlobalKeyPair, - setupRemoteKeynode, - cleanupRemoteKeynode, - addRemoteDetails, - makeCrypto, -}; +export { setupGlobalKeypair, setupGlobalAgent }; diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index f7db6f8a1..5db37e4c4 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -10,7 +10,7 @@ import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; import { utils as idUtils } from '@matrixai/id'; -import { KeyManager } from '@/keys'; +import { KeyManager, utils as keysUtils } from '@/keys'; import { NodeManager } from '@/nodes'; import { Sigchain } from '@/sigchain'; import { VaultManager, vaultOps } from '@/vaults'; @@ -24,7 +24,6 @@ import { NotificationsManager } from '@/notifications'; import { errors as vaultErrors } from '@/vaults'; import { utils as vaultUtils } from '@/vaults'; import { makeVaultId } from '@/vaults/utils'; -import { makeCrypto } from '../utils'; // Mocks. jest.mock('@/keys/utils', () => ({ @@ -107,7 +106,13 @@ describe('VaultManager', () => { db = await DB.createDB({ dbPath: dbPath, logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); sigchain = await Sigchain.createSigchain({ @@ -590,7 +595,13 @@ describe('VaultManager', () => { targetDb = await DB.createDB({ dbPath: path.join(targetDataDir, 'db'), logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); targetSigchain = await Sigchain.createSigchain({ keyManager: targetKeyManager, @@ -681,7 +692,13 @@ describe('VaultManager', () => { altDb = await DB.createDB({ dbPath: path.join(altDataDir, 'db'), logger: logger, - crypto: makeCrypto(keyManager.dbKey), + crypto: { + key: keyManager.dbKey, + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, }); altSigchain = await Sigchain.createSigchain({ keyManager: altKeyManager, @@ -763,10 +780,13 @@ describe('VaultManager', () => { await revProxy.closeConnection(altHost, altPort); await revProxy.closeConnection(sourceHost, sourcePort); await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection(fwdProxy.egressHost, fwdProxy.egressPort); + await fwdProxy.closeConnection( + fwdProxy.getEgressHost(), + fwdProxy.getEgressPort(), + ); await altFwdProxy.closeConnection( - altFwdProxy.egressHost, - altFwdProxy.egressPort, + altFwdProxy.getEgressHost(), + altFwdProxy.getEgressPort(), ); await revProxy.stop(); await altRevProxy.stop(); diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 242431e18..6cb6d6280 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -3,14 +3,16 @@ import os from 'os'; import path from 'path'; import { EncryptedFS } from 'encryptedfs'; -import Logger, { LogLevel } from '@matrixai/logger'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { IdRandom } from '@matrixai/id'; import * as utils from '@/utils'; import * as vaultsUtils from '@/vaults/utils'; import { isVaultId } from '@/vaults/utils'; describe('Vaults utils', () => { - const logger = new Logger('Vaults utils tests', LogLevel.WARN); + const logger = new Logger('Vaults utils tests', LogLevel.WARN, [ + new StreamHandler(), + ]); let dataDir: string; beforeEach(async () => {