From a64fa9a4950794b0fd4ed4a1aa40c7f30f499da1 Mon Sep 17 00:00:00 2001 From: Will Anderson Date: Fri, 20 Oct 2023 08:59:36 -0700 Subject: [PATCH] Merging latest from internal repo --- Dockerfile | 2 +- README.md | 3 +- docs/development.md | 14 +- docs/examples/commands-output.yaml | 20 + docs/examples/conditional.yaml | 96 +++- docs/examples/environment-built-ins.yaml | 14 + docs/examples/environment.yaml | 15 +- docs/examples/import/imports.yaml | 10 +- docs/examples/loop.yaml | 12 +- docs/examples/phased-exec.yaml | 2 +- docs/examples/secrets.yaml | 6 +- docs/examples/user.yaml | 21 + docs/getting-started/concepts.md | 13 +- docs/reference/cli.md | 145 ++++-- docs/reference/environment.md | 37 +- docs/reference/yaml.md | 154 ++++-- docs/release-notes.md | 291 ++++++++++- docs/shared/conditionals.md | 14 +- docs/shared/environment.md | 25 +- docs/tutorials/commands.md | 5 + docs/tutorials/environment.md | 4 +- docs/tutorials/flow.md | 9 +- docs/tutorials/import.md | 12 +- docs/tutorials/phased-exec.md | 2 +- docs/tutorials/secrets.md | 2 +- package.json | 79 +-- scripts/cix.sh | 13 +- src/cli/Application.js | 72 +-- src/cli/Application.test.js | 13 +- src/cli/commands/AbstractCommand.js | 85 +--- src/cli/commands/AbstractCommand.test.js | 12 +- src/cli/commands/AbstractRemoteCommand.js | 55 +-- src/cli/commands/Describe.js | 17 +- src/cli/commands/Describe.test.js | 2 +- src/cli/commands/Exec.js | 15 +- src/cli/commands/Exec.test.js | 33 +- src/cli/commands/Install.js | 14 +- src/cli/commands/Install.test.js | 2 +- src/cli/commands/Kill.js | 10 +- src/cli/commands/Kill.test.js | 2 +- src/cli/commands/Load.js | 56 +-- src/cli/commands/Load.test.js | 2 +- src/cli/commands/Pipelines.js | 26 +- src/cli/commands/Pipelines.test.js | 13 +- src/cli/commands/Resume.js | 25 +- src/cli/commands/Resume.test.js | 2 +- src/cli/commands/Server.js | 16 +- src/cli/commands/Server.test.js | 2 +- src/cli/commands/Update.js | 6 +- src/cli/commands/Update.test.js | 2 +- src/cli/commands/Validate.js | 16 +- src/cli/commands/Validate.test.js | 3 +- src/cli/commands/index.js | 2 +- src/cli/config/ConfigImporter.js | 15 +- src/cli/config/ConfigImporter.test.js | 7 +- src/cli/index.js | 2 +- src/common/ContainerLogger.js | 151 ------ src/common/ContainerLogger.test.js | 103 ---- src/common/Errors.js | 15 +- src/common/NodeProvider.js | 4 +- src/common/Provider.js | 8 +- src/common/Provider.test.js | 8 +- src/common/index.js | 5 +- src/common/loaders/AbstractLoader.js | 4 +- src/common/loaders/AbstractLoader.test.js | 7 +- src/common/loaders/HTTPLoader.js | 35 +- src/common/loaders/HTTPLoader.test.js | 18 +- src/common/loaders/LocalFileLoader.js | 15 +- src/common/loaders/LocalFileLoader.test.js | 8 +- src/common/loaders/Protocols.js | 4 +- src/common/loaders/Protocols.test.js | 2 +- src/common/loaders/index.js | 11 +- src/common/lodash.js | 23 +- src/common/lodash.test.js | 2 +- .../logging/ContainerStreamTransform.js | 51 ++ src/common/logging/Logger.js | 154 ++++++ src/common/logging/Logger.test.js | 54 +++ src/common/logging/LoggerTransportFactory.js | 160 ++++++ .../logging/LoggerTransportFactory.test.js | 50 ++ src/common/logging/PipelineLogger.js | 179 +++++++ src/common/logging/PipelineLogger.test.js | 59 +++ src/docker/DockerContainer.js | 302 +++++++----- src/docker/DockerContainer.test.js | 285 +++++++++-- src/docker/DockerExec.js | 236 +++++---- src/docker/DockerExec.test.js | 165 ++++--- src/docker/index.js | 2 +- src/engine/EnvironmentService.js | 22 +- src/engine/EnvironmentService.test.js | 6 +- src/engine/PipelineService.js | 86 ++-- src/engine/PipelineService.test.js | 2 +- src/engine/PluginService.js | 13 +- src/engine/PluginService.test.js | 2 +- src/engine/ValidateService.js | 6 +- src/engine/ValidateService.test.js | 2 +- src/engine/environment/Environment.js | 3 +- src/engine/environment/Environment.test.js | 26 +- src/engine/index.js | 2 +- src/engine/pipeline/Pipeline.js | 232 +++------ src/engine/pipeline/Pipeline.test.js | 36 +- src/engine/pipeline/PipelineImporter.js | 54 ++- src/engine/pipeline/PipelineImporter.test.js | 101 +++- src/engine/pipeline/PipelineNode.js | 14 +- src/engine/pipeline/Step.js | 154 +++++- src/engine/pipeline/Step.test.js | 244 +++++++++- src/engine/pipeline/Steps.js | 38 +- src/engine/pipeline/Steps.test.js | 28 +- .../pipeline/decorators/ConditionDecorator.js | 141 ++++-- .../decorators/ConditionDecorator.test.js | 163 ++++++- .../decorators/ContinueOnFailDecorator.js | 23 + .../ContinueOnFailDecorator.test.js | 71 +++ .../pipeline/decorators/LoopDecorator.js | 24 - .../pipeline/decorators/LoopDecorator.test.js | 48 -- .../pipeline/decorators/ModifyDecorator.js | 14 +- .../decorators/ModifyDecorator.test.js | 36 +- .../pipeline/decorators/RetryDecorator.js | 9 +- .../decorators/RetryDecorator.test.js | 2 +- .../pipeline/decorators/StepDecorators.js | 6 +- .../pipeline/decorators/StepsDecorators.js | 4 +- .../pipeline/decorators/TimeoutDecorator.js | 10 +- .../decorators/TimeoutDecorator.test.js | 15 +- .../pipeline/decorators/ValidateDecorator.js | 2 +- .../decorators/ValidateDecorator.test.js | 2 +- src/engine/plugin/Plugin.js | 17 +- src/engine/plugin/Plugin.test.js | 9 +- src/engine/runtime/AbstractExec.js | 2 +- src/engine/runtime/ExecFactory.js | 2 +- src/engine/validate/JsonSchemas.js | 42 +- src/engine/validate/Validate.js | 12 +- src/engine/validate/Validate.test.js | 22 +- src/index.js | 1 + src/server/Server.js | 31 +- src/server/Server.test.js | 4 +- src/server/index.js | 2 +- src/server/routes/api/asyncLogHandler.js | 18 +- src/server/routes/api/environment.js | 2 +- src/server/routes/api/environment.test.js | 6 +- src/server/routes/api/pipeline.js | 42 +- src/server/routes/api/pipeline.test.js | 4 +- src/server/routes/api/plugin.js | 2 +- src/server/routes/api/plugin.test.js | 4 +- src/server/routes/api/validate.js | 2 +- src/server/routes/api/validate.test.js | 10 +- src/server/routes/index.js | 2 +- src/server/routes/swagger.js | 2 +- test/integration/color.bats | 51 ++ test/integration/general.bats | 454 +++++++++++++++++- test/integration/secrets.bats | 4 +- test/integration/server.bats | 111 ++++- test/performance/README.md | 26 +- test/performance/heavy_logging.yaml | 1 - 150 files changed, 4392 insertions(+), 1801 deletions(-) create mode 100644 docs/examples/commands-output.yaml create mode 100644 docs/examples/user.yaml delete mode 100644 src/common/ContainerLogger.js delete mode 100644 src/common/ContainerLogger.test.js create mode 100644 src/common/logging/ContainerStreamTransform.js create mode 100644 src/common/logging/Logger.js create mode 100644 src/common/logging/Logger.test.js create mode 100644 src/common/logging/LoggerTransportFactory.js create mode 100644 src/common/logging/LoggerTransportFactory.test.js create mode 100644 src/common/logging/PipelineLogger.js create mode 100644 src/common/logging/PipelineLogger.test.js create mode 100644 src/engine/pipeline/decorators/ContinueOnFailDecorator.js create mode 100644 src/engine/pipeline/decorators/ContinueOnFailDecorator.test.js delete mode 100644 src/engine/pipeline/decorators/LoopDecorator.js delete mode 100644 src/engine/pipeline/decorators/LoopDecorator.test.js create mode 100644 test/integration/color.bats diff --git a/Dockerfile b/Dockerfile index 49b912b1..6dc1ad51 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:12.20.1-alpine +FROM node:16-alpine LABEL SCM_URL https://github.com/salesforce/cix USER root diff --git a/README.md b/README.md index 1e16aef2..92fb0971 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ -CIX - CI Executor -====== +# CIX - CI Executor **[Official Documentation](https://opensource.salesforce.com/cix/)** 👈 Check out our Doc Site diff --git a/docs/development.md b/docs/development.md index c1352499..59e5e5bf 100644 --- a/docs/development.md +++ b/docs/development.md @@ -3,13 +3,17 @@ ## Set up your local development environment - Install required system dependencies - [Install Docker on your workstation](install-docker.md) - - Install Node.js v12 - - Using Brew + - Install volta (Warning: remove existing node if facing issues installing volta) + - install and link volta ```bash - brew install node@12 - brew link node + curl https://get.volta.sh | bash + export VOLTA_HOME=~/.volta + export PATH=$VOLTA_HOME/bin:$PATH + ``` + - install correct node version + ```bash + volta install node@16.20.0 (Warning: you might have to get off Cisco Anyconnect VPN for this step. you can reconnect after this step) ``` - - Or download the appropriate package from the [Node.js web site](https://nodejs.org/en/download/). - If you haven't already, [setup](https://confluence.internal.salesforce.com/pages/viewpage.action?spaceKey=NEXUS&title=Nexus+NPM+Repositories) access to the Nexus npm repositories - Clone the CIX repo ```bash diff --git a/docs/examples/commands-output.yaml b/docs/examples/commands-output.yaml new file mode 100644 index 00000000..e4441fb5 --- /dev/null +++ b/docs/examples/commands-output.yaml @@ -0,0 +1,20 @@ +version: 2.1 +pipeline: + - step: + name: minimal + image: alpine:3.9 + commands-output: minimal + commands: + - echo "You will only see stdout/stderr (minimal)" + - step: + name: echo + image: alpine:3.9 + commands-output: echo + commands: + - echo "This echo command will be echo'd (echo without a timestamp)" + - step: + name: timestamp + image: alpine:3.9 + commands-output: timestamp + commands: + - echo "This echo command will be echo'd (echo with a timestamp)" diff --git a/docs/examples/conditional.yaml b/docs/examples/conditional.yaml index 94801897..6098d8b8 100644 --- a/docs/examples/conditional.yaml +++ b/docs/examples/conditional.yaml @@ -325,9 +325,32 @@ pipeline: when: - operator: INCLUDES value: $$FOO_INCLUDES # example value FOO_INCLUDES=foo,bar,baz + value-default: foo,bar,baz other: foo commands: - echo "yay 14" + - step: + name: INCLUDESFail + image: alpine:3.9 + when: + - operator: INCLUDES + value: $$FOO_INCLUDES # example value FOO_INCLUDES=foo,bar,baz + value-default: foo,bar,baz + other: '' + commands: + - echo "shouldn't run" + - exit 1 + - step: + name: INCLUDESFail_2 + image: alpine:3.9 + when: + - operator: INCLUDES + value: $$FOO_INCLUDES # example value FOO_INCLUDES=foo,bar,baz + value-default: foo,bar,baz + other: ' ' + commands: + - echo "shouldn't run" + - exit 1 - step: name: NOT_INCLUDESPass image: alpine:3.9 @@ -378,4 +401,75 @@ pipeline: other: foo commands: - echo "shouldn't run" - - exit 1 \ No newline at end of file + - exit 1 + - step: + name: ANDPass + image: alpine:3.9 + when: + - operator: AND + conditions: + - operator: ENDS_WITH + value: $$FOO_ENDS_WITH # example value FOO_ENDS_WITH=foobar + value-default: foobar + other: bar + - operator: EQ + value: $$FOO_CONDITION_VALUE + value-default: 1 + other: $$BAR_CONDITION_VALUE + other-default: 1 + commands: + - echo "yay 18" + - step: + name: ANDFail_1 + image: alpine:3.9 + when: + - operator: AND + conditions: + - operator: ENDS_WITH + value: $$FOO_ENDS_WITH # example value FOO_ENDS_WITH=foobar + value-default: foobar + other: foo + - operator: EQ + value: $$FOO_CONDITION_VALUE + value-default: 1 + other: $$BAR_CONDITION_VALUE + other-default: 2 + commands: + - echo "shouldn't run" + - exit 1 + - step: + name: ANDFail_2 + image: alpine:3.9 + when: + - operator: AND + conditions: + - operator: ENDS_WITH + value: $$FOO_ENDS_WITH # example value FOO_ENDS_WITH=foobar + value-default: foobar + other: bar + - operator: EQ + value: $$FOO_CONDITION_VALUE + value-default: 1 + other: $$BAR_CONDITION_VALUE + other-default: 2 + commands: + - echo "shouldn't run" + - exit 1 + - step: + name: ANDFail_3 + image: alpine:3.9 + when: + - operator: AND + conditions: + - operator: ENDS_WITH + value: $$FOO_ENDS_WITH # example value FOO_ENDS_WITH=foobar + value-default: foobar + other: foo + - operator: EQ + value: $$FOO_CONDITION_VALUE + value-default: 2 + other: $$BAR_CONDITION_VALUE + other-default: 2 + commands: + - echo "shouldn't run" + - exit 1 diff --git a/docs/examples/environment-built-ins.yaml b/docs/examples/environment-built-ins.yaml index c85f952e..0b3a7c0d 100644 --- a/docs/examples/environment-built-ins.yaml +++ b/docs/examples/environment-built-ins.yaml @@ -11,3 +11,17 @@ pipeline: - echo $CIX_NETWORK_NAME - echo $CIX_CONTAINER_NAME - echo $CIX_EXECUTION_ID + - echo $CIX_SERVER_PORT + - pwd + - echo $CIX_WORKDIR + - echo $CIX_WORKSPACE + - echo $CIX_TEMP + - step: + name: override-work-dir + image: alpine:3.9 + working-dir: /bin + workspace-mount-point: /tmp + commands: + - pwd + - echo $CIX_WORKDIR + - echo $CIX_WORKSPACE diff --git a/docs/examples/environment.yaml b/docs/examples/environment.yaml index a6251d3e..2d99fbda 100644 --- a/docs/examples/environment.yaml +++ b/docs/examples/environment.yaml @@ -5,13 +5,18 @@ pipeline: image: alpine:3.9 environment: - name: FOO - value: $$FOO_FROM_COMMAND # if passed with -e option, $$ literals will be replaced - default: foo_default + # value passed in via '-e FOO=foo' + value: $$FOO - name: BAR - value: $$BAR_FROM_COMMAND + # value passed in via '-e BAR=bar' + value: $$BAR + # default passed in via '-e BAR_DEFAULT=bar-default' default: $$BAR_DEFAULT - # not passing these properties will not cause any failures - # the environment variables would be the $$ literals during exec, or the default, if specified + - name: BAZ + # when value is omitted, value == $$BAZ, + # passed in via '-e BAZ=baz' + default: baz-default commands: - echo $FOO - echo $BAR + - echo $BAZ diff --git a/docs/examples/import/imports.yaml b/docs/examples/import/imports.yaml index 56d1833f..e1fc3288 100644 --- a/docs/examples/import/imports.yaml +++ b/docs/examples/import/imports.yaml @@ -5,9 +5,15 @@ imports: library: src: ./library.yaml # - # Following is how http(s) based imports can be specified. The token is optional if needed, e.g., - # needed in order to fetch from git.soma + # Following is how Git based http(s) imports can be specified. A personal access token is required + # with Git, there are two options 1) use the special HTTP_AUTHORIZATION_TOKEN secret 2) specifiy the + # YAML attribute http-authorization-token and feed in any environment variable # + # (Option 1: HTTP_AUTHORIZATION_TOKEN secret) + # library: + # src: https://github.com/raw/ci/cix/master/docs/examples/import/library.yaml + # + # (Option 2: Use any other secret with YAML) # library: # src: https://github.com/raw/ci/cix/master/docs/examples/import/library.yaml # http_authorization_token: $$CIX_GIT_TOKEN # must be passed via -s diff --git a/docs/examples/loop.yaml b/docs/examples/loop.yaml index fa04d5ed..6950419f 100644 --- a/docs/examples/loop.yaml +++ b/docs/examples/loop.yaml @@ -1,9 +1,11 @@ version: 2.1 pipeline: - step: - name: loop - image: alpine:3.9 + name: for-each-empty + image: alpine:3.9 + for-each: $$FOO + element-variable: ELEMENT + parallel: true + commands-output: minimal commands: - - "[ $((RANDOM % 10)) -ge 8 ] && exit 1 || exit 0" # occasionally fail the command - # entire pipeline will fail, if any command in any of the loops fails (unlike retry) - loop: $$LOOPS + - echo $ELEMENT diff --git a/docs/examples/phased-exec.yaml b/docs/examples/phased-exec.yaml index c94564d2..e21ea3ce 100644 --- a/docs/examples/phased-exec.yaml +++ b/docs/examples/phased-exec.yaml @@ -27,4 +27,4 @@ pipeline: name: fifth image: alpine:3.9 commands: - - hostname \ No newline at end of file + - hostname diff --git a/docs/examples/secrets.yaml b/docs/examples/secrets.yaml index c0bc23fd..6953b61a 100644 --- a/docs/examples/secrets.yaml +++ b/docs/examples/secrets.yaml @@ -5,10 +5,10 @@ pipeline: image: alpine:3.9 environment: - name: FOO - value: $$SECRET_FOO_FROM_COMMAND + value: $$SECRET_FOO - name: BAR - value: $$SECRET_BAR_FROM_COMMAND + value: $$SECRET_BAR commands: - echo $FOO - echo $BAR - - echo Multiple secrets such as $FOO, $FOO, and $BAR on the same line should be masked + - echo Multiple secrets such as $FOO, $FOO, and $BAR on the same line will be masked diff --git a/docs/examples/user.yaml b/docs/examples/user.yaml new file mode 100644 index 00000000..dfd3b066 --- /dev/null +++ b/docs/examples/user.yaml @@ -0,0 +1,21 @@ +version: 2.1 +pipeline: + - step: + name: set-user + image: alpine:3.9 + user: 12300 + commands: + - id + - step: + name: set-uid-and-gid + image: alpine:3.9 + user: 12300:12300 + commands: + - id + - step: + name: set-user-string + image: alpine:3.9 + # 'games' is a user listed in the alpine image's /etc/passwd file + user: games + commands: + - id diff --git a/docs/getting-started/concepts.md b/docs/getting-started/concepts.md index 5ab796c2..d6cac2cf 100644 --- a/docs/getting-started/concepts.md +++ b/docs/getting-started/concepts.md @@ -10,7 +10,7 @@ Your pipeline definition (YAML) files are also expected to be accessible from un ## Container Network -In Docker mode, CIX creates a private network for the containers of a pipeline. This allows the containers to communicate +In Docker mode, CIX creates an ephemeral private network for the containers of a pipeline. This allows the containers to communicate freely with each other, while being isolated from other containers on the host. It also allows for private DNS resolution, so that containers may refer to each other by their simple step names and @@ -19,6 +19,17 @@ It also allows for private DNS resolution, so that containers may refer to each Containers in the same pipeline can also communicate with each other without need to export or specify ports. Containers can connect to other concurrent containers on any port. +You can provide your own network or switch to `host` networking by exporting the environment variable `DOCKER_NETWORK` before running CIX. This will skip creating the ephemeral network. + +>! For `host` networking, DNS resolution and container communication may not have full feature parity with an ephemeral docker network. + +```sh +DOCKER_NETWORK=host cix exec +# OR +docker network create my-network +DOCKER_NETWORK=my-network cix exec +``` + These topics are covered in more detail in the [Background Services](tutorials/background) tutorial. --- diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 02928d8e..ce76e3a9 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -20,6 +20,8 @@ The `-l --logging ` option takes one of the following modes to set the log * Each container's output is logged to its own separate file, in the directory specified by the `-p --logging-path ` option. CIX's general output is printed to the standard output streams as well as the file `cix-execution.log`. +* `file` + * Logs are written to a single file: `cix-execution.log`, in the directory specified by the `-p --logging-path ` option. ## Logging Levels Logging is performed by the `winston` module, and the verbosity level can be specified with the @@ -50,25 +52,32 @@ Usage: cix exec [options] Executes a pipeline. Options: - -c, --configfile Specify a cix configuration file to load. - -e, --env Specify environment variable. Mapping should be in the form of KEY (value taken from environment) or KEY=VALUE. - -s, --secret Specify a secret (insecure). Mapping should be in the form of KEY (secret taken from environment) or KEY=SECRET. - -w, --workspace Specify the workspace path. Default path is the current working directory. - -l, --logging Specify container output logging mode: console, or files (separate files). - -p, --logging-path Path where logs created by the 'files' logging mode will be stored. (default: "logs") - -y, --yaml Path to pipeline definition YAML file. May be repeated. - --plugin Path to plugin YAML file. May be repeated. - --setup Path to setup pipeline YAML file. - --teardown Path to teardown pipeline YAML file. - --secret-prompt CIX will prompt you for the value of the key specified. May be repeated. - --secret-stdin CIX will assign a value passed via stdin to the key specified. Cannot be used with --secrets-stdin. - --secrets-stdin CIX will accept a map of key/value pairs in JSON format via stdin. Cannot be used with --secret-stdin. - --host CIX Server to connect to (default: "127.0.0.1") - --port CIX Server to connect to (default: "10030") - --remote CIX will execute against a remote CIX Server. - --non-blocking Disables the blocking wait until a remote execution is complete. - --no-remote-logs Disables streaming logs from Server. - -h, --help display help for command + -a, --pipeline-alias Assign an alias for the pipeline. + -c, --configfile Specify a cix configuration file to load. + -e, --env Specify environment variable. Mapping should be in the form of KEY (value taken from environment) or KEY=VALUE. + -l, --logging Specify container output logging mode: console, file (single file), or files (separate files for each step)). + -L, --logname Specify a custom name for the CIX application log file. + -p, --logging-path Path where logs created by the 'files' logging mode will be stored. (default: "logs") + -P, --pull-policy Overrides the pull-policy for Docker pulls, when it is not specified in the pipeline definition. (choices: "Always", "Default", + "IfNotPresent", "Never", default: "Default") + -s, --secret Specify a secret (insecure). Mapping should be in the form of KEY (secret taken from environment) or KEY=SECRET. + -w, --workspace Specify the workspace path. Default path is the current working directory. + -y, --yaml Path to pipeline definition YAML file. May be repeated. + --plugin Path to plugin YAML file. May be repeated. + --setup Path to setup pipeline YAML file. + --teardown Path to teardown pipeline YAML file. + --secret-prompt CIX will prompt you for the value of the key specified. May be repeated. + --secret-stdin CIX will assign a value passed via stdin to the key specified. Cannot be used with --secrets-stdin. + --secrets-stdin CIX will accept a map of key/value pairs in JSON format via stdin. Cannot be used with --secret-stdin. + --host CIX Server to connect to (default: "127.0.0.1") + --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging + --remote CIX will execute against a remote CIX Server. + --non-blocking Disables the blocking wait until a remote execution is complete. + --no-remote-logs Disables streaming logs from Server. + -h, --help display help for command ``` ## cix validate @@ -92,6 +101,9 @@ Options: --remote CIX will execute against a remote CIX Server. --host CIX Server to connect to (default: "127.0.0.1") --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` @@ -107,10 +119,14 @@ Starts a local CIX server. Options: -c, --configfile Specify a cix configuration file to load. -d, --detach Shell command only: detach the server container from the console. - -w, --workspace Specify the workspace path. Default path is the current working directory. - --port CIX Server Port (default: "10030") - -l, --logging Specify container output logging mode: console, or files (separate files) (default: "files") + -l, --logging Specify container output logging mode: console, file (single file), or files (separate files for each step)) + -L, --logname Specify a custom name for the CIX application log file. -p, --logging-path Path where logs created by the 'files' logging mode will be stored. (default: "logs") + --port CIX Server Port (default: "10030") + -w, --workspace Specify the workspace path. Default path is the current working directory. + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` @@ -126,22 +142,29 @@ Usage: cix load [options] Loads a pipeline onto a remote CIX Server. Options: - -c, --configfile Specify a cix configuration file to load. - -e, --env Specify environment variable. Mapping should be in the form of KEY (value taken from environment) or KEY=VALUE. - -s, --secret Specify a secret (insecure). Mapping should be in the form of KEY (secret taken from environment) or KEY=SECRET. - -w, --workspace Specify the workspace path. Default path is the current working directory. - -l, --logging Specify container output logging mode: console, or files (separate files). - -p, --logging-path Path where logs created by the 'files' logging mode will be stored. (default: "logs") - -y, --yaml Path to pipeline definition YAML file. May be repeated. - --plugin Path to plugin YAML file. May be repeated. - --setup Path to setup pipeline YAML file. - --teardown Path to teardown pipeline YAML file. - --secret-prompt CIX will prompt you for the value of the key specified. May be repeated. - --secret-stdin CIX will assign a value passed via stdin to the key specified. Cannot be used with --secrets-stdin. - --secrets-stdin CIX will accept a map of key/value pairs in JSON format via stdin. Cannot be used with --secret-stdin. - --host CIX Server to connect to (default: "127.0.0.1") - --port CIX Server to connect to (default: "10030") - -h, --help display help for command + -a, --pipeline-alias Assign an alias for the pipeline. + -c, --configfile Specify a cix configuration file to load. + -e, --env Specify environment variable. Mapping should be in the form of KEY (value taken from environment) or KEY=VALUE. + -l, --logging Specify container output logging mode: console, file (single file), or files (separate files for each step)). + -L, --logname Specify a custom name for the CIX application log file. + -p, --logging-path Path where logs created by the 'files' logging mode will be stored. (default: "logs") + -P, --pull-policy Overrides the pull-policy for Docker pulls, when it is not specified in the pipeline definition. (choices: "Always", "Default", + "IfNotPresent", "Never", default: "Default") + -s, --secret Specify a secret (insecure). Mapping should be in the form of KEY (secret taken from environment) or KEY=SECRET. + -w, --workspace Specify the workspace path. Default path is the current working directory. + -y, --yaml Path to pipeline definition YAML file. May be repeated. + --plugin Path to plugin YAML file. May be repeated. + --setup Path to setup pipeline YAML file. + --teardown Path to teardown pipeline YAML file. + --secret-prompt CIX will prompt you for the value of the key specified. May be repeated. + --secret-stdin CIX will assign a value passed via stdin to the key specified. Cannot be used with --secrets-stdin. + --secrets-stdin CIX will accept a map of key/value pairs in JSON format via stdin. Cannot be used with --secret-stdin. + --host CIX Server to connect to (default: "127.0.0.1") + --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging + -h, --help display help for command ``` ## cix pipelines @@ -156,12 +179,17 @@ Usage: cix pipelines [options] Lists information about pipelines. Options: - --default Displays default pipeline. - --set-default Sets the default pipeline. - --status [pipeline-id] Displays the status of a pipeline. - --host CIX Server to connect to (default: "127.0.0.1") - --port CIX Server to connect to (default: "10030") - -h, --help display help for command + --get-alias Gets the pipeline for an alias. + --set-alias Sets the alias to a pipeline (--pipeline-id required). + --pipeline-id ID for setting alias. + --pipeline-alias Alias for checking status. + --status Displays the status of a pipeline (--pipeline-id optional). + --host CIX Server to connect to (default: "127.0.0.1") + --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging + -h, --help display help for command ``` ## cix resume @@ -184,6 +212,9 @@ Options: --no-remote-logs Disables streaming logs from Server. --host CIX Server to connect to (default: "127.0.0.1") --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` @@ -200,12 +231,17 @@ Usage: cix describe [options] Displays the sequence of the steps. Options: - --file Write the Pipeline steps to a file. - --stdout Write the Pipeline steps to standard output. - --stderr Write the Pipeline steps to standard error. - --host CIX Server to connect to (default: "127.0.0.1") - --port CIX Server to connect to (default: "10030") - -h, --help display help for command + --pipeline-id Pipeline ID to resume (default: "latest" alias) + --pipeline-alias Pipeline Alias to resume. (default: "latest" alias) + --file Write the Pipeline steps to a file. + --stdout Write the Pipeline steps to standard output. + --stderr Write the Pipeline steps to standard error. + --host CIX Server to connect to (default: "127.0.0.1") + --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging + -h, --help display help for command ``` ## cix kill @@ -222,6 +258,9 @@ Kills a running pipeline. Options: --host CIX Server to connect to (default: "127.0.0.1") --port CIX Server to connect to (default: "10030") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` @@ -243,6 +282,9 @@ Starts the pipeline server component for phased execution. Options: --location Installation location (default: "/usr/local/bin") + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` @@ -253,5 +295,8 @@ Usage: cix update [options] Shell command only: updates the CIX docker image from the registry. Options: + --color Force console logs to include ANSI color codes + --no-color Force console logs to not include ANSI color codes (overrides --color) + --silent Silence logging -h, --help display help for command ``` diff --git a/docs/reference/environment.md b/docs/reference/environment.md index 85edf0a5..49fe9653 100644 --- a/docs/reference/environment.md +++ b/docs/reference/environment.md @@ -2,32 +2,51 @@ These environment variables will automatically be set for each CIX execution. +`CIX_CONTAINER_NAME` + +The fully-qualified name of the container on the CIX host, composed of the unique network name +and the step name (e.g., `cix-XYZab-stepname`). This name should not be used for network connections. + + +`CIX_EXECUTION_ID` + +The ID of the running pipeline. This is mostly used for interfacing with the Server API. + `CIX_HOSTNAME` The name of the host running CIX (e.g., your workstation or server). This variable is defined only when cix is started via the wrapper shell script (`cix`, refer to the [Getting Started](../getting-started.md) document for installation instructions). +`CIX_NETWORK_NAME` + +The name of the isolated network created for this pipeline (e.g., `cix-XYZab`). Especially useful +when using docker-outside-of-docker (the host's `docker.sock` is mounted into the container) to attach new containers +to the Docker network used within a CIX execution. + +`CIX_SERVER_PORT` + +The port number on which the CIX server is listening. This variable defaults to 10030. + `CIX_STEP_NAME` The name of the step for which this container is being run (from the YAML `name:` attribute). It will be the container's hostname, unless the optional `hostname:` attribute is supplied. Either of these names may be used to address the host on the Docker network. -`CIX_NETWORK_NAME` +`CIX_TEMP` -The name of the isolated network created for this pipeline (e.g., `cix-XYZab`). Especially useful -when using docker-outside-of-docker (the host's `docker.sock` is mounted into the container) to attach new containers -to the Docker network used within a CIX execution. +The temp directory of CIX, persists through steps as a docker volume. Defaults to `/cix/tmp`. -`CIX_CONTAINER_NAME` +`CIX_WORKDIR` -The fully-qualified name of the container on the CIX host, composed of the unique network name -and the step name (e.g., `cix-XYZab-stepname`). This name should not be used for network connections. +The working directory of the container, defaults to `/cix/src`. Almost always is the same as the workspace bind volume mount (see ``CIX_WORKSPACE`). + +`CIX_WORKSPACE` + + Where the workspace is mounted, defaults to `/cix/src`. -`CIX_EXECUTION_ID` -The ID of the running pipeline. This is mostly used for interfacing with the Server API. # Example * [docs/examples/environment-built-ins.yaml](https://github.com/salesforce/cix/blob/master/docs/examples/environment-built-ins.yaml) diff --git a/docs/reference/yaml.md b/docs/reference/yaml.md index eaaf6dab..db8ec1f5 100644 --- a/docs/reference/yaml.md +++ b/docs/reference/yaml.md @@ -31,13 +31,20 @@ registry: imports: import-name: src: path-to-yaml-file - http_authorization_token: "optional-auth-token-on-http(s)-imports" + http-authorization-token: "optional-auth-token-on-http(s)-imports" pipeline: - step: name: string image: repo/string:tag + user: [:] pull-policy: Default | Always | IfNotPresent | Never + loop: Int + counter-variable: COUNTER + for-each: + - list + for-each: c,s,v + element-variable: ELEMENT ports: - "port:port" - port @@ -53,7 +60,7 @@ pipeline: value: string default: string when: - - operator: EQ | NEQ | IS_SET | IS_NOT_SET | GTE | GT | LTE | LT | EXISTS | NOT_EXISTS | OR | INCLUDES | NOT_INCLUDES | STARTS_WITH | ENDS_WITH + - operator: EQ | NEQ | IS_SET | IS_NOT_SET | GTE | GT | LTE | LT | EXISTS | NOT_EXISTS | OR | INCLUDES | NOT_INCLUDES | AND | STARTS_WITH | ENDS_WITH value: $$VALUE value-default: default-value other: $$OTHER @@ -65,6 +72,10 @@ pipeline: default: something conditions: # When Operator is OR - operator: ... + - operator: MATCHES | NOT_MATCHES + value: $$VALUE + expressions: $$REGEXP + delimiter: ',' #delimiter is optional. By default ',' comma is taken as delimiter arguments: - arg1 - arg2 @@ -103,11 +114,12 @@ pipeline: ## Basic YAML Attributes -### version {docsify-ignore} +### version Number. Required. -The CIX YAML file must specify a numeric version. Currently, that is `2.1` (as a number, the value should not be quoted). +The CIX YAML file must specify a numeric version. Currently, that is `2.1` (as a number, the value should not be +quoted). ```yaml version: 2.1 @@ -115,46 +127,57 @@ version: 2.1 [Error loading section, please refresh](../shared/registry.md ':include') -### pipeline {docsify-ignore} +### pipeline Begins the pipeline definition, which is a collection of steps, [imports](#imports), and [step groups](#steps). -### step {docsify-ignore} +### step Defines a pipeline step - that is, a container. Supported attributes are described below, only `name` and `image` are required. -#### arguments {docsify-ignore} +#### arguments List of strings. -A list of command arguments to pass to the image's default ENTRYPOINT executable. Mutually exclusive with the `commands` -attribute. +A list of command arguments to pass to the image's default ENTRYPOINT executable. Mutually exclusive with the +`commands` attribute. As a special case, YAML boolean (`true`, `false`) and number (`123`, `3.14`) literals +can be supplied without quotes, but be sure to quote other special YAML syntax that can be interpreted as +structures or objects. ```yaml arguments: - --work-dir - /cix/src + - --timeout + - 120 + - --enable-check + - true + - --data + - '{}' - start ``` -#### background {docsify-ignore} +#### background Boolean, default `false`. When `true`, the step will be executed asynchronously from the remaining pipeline. Therefore, this can be used to -start "services" which are required by subsequent steps. +start "services" which are required by subsequent steps. The exit status of background steps is ignored, and a failure +will not abort the pipeline. Containers terminate when there is no longer a process with PID 1, so if your process detaches from the standard file -descriptors (stdin, stdout, and stderr) and starts a new process group ("daemonizes"), then you will need to otherwise -prevent your container from exiting (such as a while loop which waits for the desired process to exit). If possible, -your process should remain in the foreground. +descriptors (stdin, stdout, and stderr) and starts a new process group (known as "daemonizing"), then you will need to +otherwise prevent your container from exiting (such as a while loop which waits for the desired process to exit). If +possible, your process should remain in the foreground. -#### commands {docsify-ignore} +#### commands A list of shell command strings to be executed after the container is started. These commands are added to a script which is injected into the container and replaces the container's default ENTRYPOINT. Mutually exclusive with -the `arguments` attribute. +the `arguments` attribute. As a special case, YAML boolean (`true`, `false`) and number (`123`, `3.14`) literals +can be supplied without quotes, but be sure to quote other special YAML syntax that can be interpreted as +structures or objects. ```yaml commands: @@ -162,10 +185,10 @@ commands: - cp /src/file /tmp ``` -##### Tips & Caveats for `commands` {docsify-ignore} +##### Tips & Caveats for `commands` -The YAML "literal block scalar" operator is great for specifying multi-line expressions. It avoids having to escape special -YAML characters or cram a complex shell statement into one long line with semicolons. It looks like this: +The YAML "literal block scalar" operator is great for specifying multi-line expressions. It avoids having to escape +special YAML characters or cram a complex shell statement into one long line with semicolons. It looks like this: ``` commands: @@ -190,7 +213,18 @@ if it is a strictly POSIX-conforming Bourne shell, you will be prevented from us An excellent reference to shell scripting can be found here: [Advanced Bash Scripting Guide](http://tldp.org/LDP/abs/html/index.html). -#### commands-shell {docsify-ignore} +#### commands-output + +String, default `timestamp`. + +Changes the default step output behavior, allowing users to simplify the output of their steps without wrapping them in +a script. Allowed options are: +* `timestamp` echoes the commands before their stdout/stderr is displayed, with a timestamp. This is the default +behavior. +* `echo` echoes the commands before their stdout/stderr is displayed, but does not provide a timestamp. +* `minimal` steps will only convey the stdout/stderr of the commands run. + +#### commands-shell String, default `/bin/sh`. @@ -201,7 +235,7 @@ some images may also provide a separate `/bin/bash`, or another language entirel commands-shell: /bin/bash ``` -#### continue-on-fail {docsify-ignore} +#### continue-on-fail Boolean, default `false`. @@ -214,7 +248,7 @@ last command's status will be used to determine the outcome of the step. [Error loading section, please refresh](../shared/environment.md ':include') -#### hostname {docsify-ignore} +#### hostname String. @@ -223,7 +257,7 @@ simple word (“db” or “web”) to a fully-qualified hostname such as “www will resolve this name to the container's address for all other containers on the same network (that is, within the same pipeline). -##### More On Container Names {docsify-ignore} +##### More On Container Names A container can be referenced using several names. The first of course is the step name as defined by the `name:` attribute in the YAML file. This is the preferred name. @@ -239,27 +273,65 @@ Docker will provide an internal DNS service for your containers. This enables it addresses. It's not advisable to manipulate the /etc/resolv.conf file, as this will remove the ability to refer to and connect with other containers by name. -#### image {docsify-ignore} +#### image String. Required. The name of the image for this step. If the image resides on an authenticated registry, you must specify the `registry` entry for the registry and provide authentication credentials. -#### loop {docsify-ignore} +#### loop Integer. Causes the step to be repeated the number of times specified. -#### name {docsify-ignore} +#### counter-variable + +String. Optional for `loop` or `for-each`. + +Exports a counter (starting at 1) as an environment variable to the current step iteration. + +#### for-each + +YAML List or CSV. + +Causes the step to repeated for each element in the list or CSV. + +#### element-variable + +String. Required for `for-each`. + +Exports the element as an environment variable to the step iteration. + +#### name String. Required. The container will be given this name as its hostname on the Docker network. Other containers can refer to it using this name. See the `hostname` attribute for more details on container hostnames. -#### ports {docsify-ignore} +#### user + +String or Integer. Optional. + +A string value specifying the user or integer value specifying user-id (uid) inside the container. The user field +accepts a username or UID as well as optional group or GID (format: [:]). +The user value can also be supplied via an environment variable with the `$$` notation. + +Default value of uid and gid is 0 (root) + +See the [Docker documentation](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#user) on how +to add usernames in images using Dockerfile + +```yaml +pipeline: + - step: + name: example + user: '$$MY_USER' +``` + +#### ports List of ports to expose to the external host. @@ -277,7 +349,7 @@ ports: - 9000 ``` -#### privileged {docsify-ignore} +#### privileged Boolean, default `false`. @@ -288,7 +360,7 @@ See the [Docker documentation on the --privileged option](https://docs.docker.com/engine/reference/commandline/run/#full-container-capabilities---privileged) for more details. -#### pull-policy {docsify-ignore} +#### pull-policy String. Permissible values: `Default`, `Always`, `IfNotPresent`, `Never` @@ -312,7 +384,7 @@ image: redis pull-policy: IfNotPresent ``` -#### retry {docsify-ignore} +#### retry Must specify `iterations:` and `backoff:` values, which are both integers. @@ -325,13 +397,13 @@ retry: backoff: 30 ``` -#### timeout {docsify-ignore} +#### timeout Integer, in seconds. Causes the step to fail if it takes longer than the specified number of seconds. -#### volumes {docsify-ignore} +#### volumes List of volumes to mount in the container. @@ -343,13 +415,13 @@ volumes: [Error loading section, please refresh](../shared/conditionals.md ':include') -#### working-dir {docsify-ignore} +#### working-dir String, default `/cix/src`. Specifies an alternative current-working-directory when the container starts. -#### workspace-mount-point {docsify-ignore} +#### workspace-mount-point String, default `/cix/src`. @@ -358,7 +430,7 @@ provided to `cix` with the `-w --workspace` option) will be mounted in the conta ## Advanced Topics -### steps {docsify-ignore} +### steps Begins a nested group of pipeline steps and imports. @@ -374,26 +446,26 @@ pipeline: . . . ``` -#### name {docsify-ignore} +#### name String. Required. Provides a name for the group of steps. -#### parallel {docsify-ignore} +#### parallel Boolean, default `false`. If set to `true`, the steps in the step group will be run concurrently. -#### pipeline {docsify-ignore} +#### pipeline Begins the declaration of sub-steps and step groups which will be grouped as a unit. Accepts all the attributes available to the top level `pipeline` directive. Step groups may contain nested groups. -### imports {docsify-ignore} +### imports Top-level directive which introduces an imported set of steps or entire pipeline for use later in the current file. @@ -409,13 +481,13 @@ imports: src: another-file.yaml remote-import: src: http://my-awesome-remote-server/foo.yaml - http_authorization_token: my-awesome-optional-http-authorization-token + http-authorization-token: my-awesome-optional-http-authorization-token pipeline: . . . ``` -#### Using Imported Steps and Pipelines {docsify-ignore} +#### Using Imported Steps and Pipelines Use imported steps and pipelines in your main YAML file, with `import:` followed by a list of the imported steps. Values are passed into the imported step(s) via environment variables. diff --git a/docs/release-notes.md b/docs/release-notes.md index 48596f44..689f4161 100644 --- a/docs/release-notes.md +++ b/docs/release-notes.md @@ -1,3 +1,290 @@ # CIX Release Notes -### 3.0.0-Beta -* Setting up Open Source copy of project. + +### 2.7.4 +* Bug-fix: Add type 'boolean' to environmentSchema for value and default + +### 2.7.2 +* Added support for using images with sha256 digests instead of tags + +### 2.7.1 +* Added new Operators (MATCHES, NOT_MATCHES) for regular expression checks. + +### 2.7.0 +* Updated CIX to run on Node.js 16 +* Updated npm dependencies +* Bug-fix: refactor loop, for-each, so they work with continue-on-fail and retry + +### 2.6.9 +* Bug-fix: Added right docker network for the CIX container +* Bug-fix: Increased the remote stream readable watermark to 10MB +* Bug-fix: Fixed the privilege access mode +* Updated npm dependencies + +### 2.6.8 +* Bug-fix: chained pipelines showing successful in some cases where they fail + +### 2.6.7 +* Bug-fix: exec --remote can hang on failed pipelines + +### 2.6.6 +* Removed logging version within install command. +* Upgraded node packages to the latest versions. +* Can replace the ephemeral network creation with your own network: + * Can switch to experimental host networking by exporting `DOCKER_NETWORK=host` before running CIX + * Can use an existing network by providing it's name by exporting `DOCKER_NETWORK=my-network-name` before running CIX + +### 2.6.5 +* Added `-L, --logname `, a way to specify a different log file name than cix-execution.log. +* Added `--silent`, a way to suppress all output. +* Added environment variables `CIX_SEVER_PORT`, `CIX_WORKSPACE`, `CIX_WORKDIR` and `CIX_TEMP`. Detailed [here](/reference/environment). + +### 2.6.4 +* Fix logging to background steps + +### 2.6.3 +* Add new logger that writes to a single file. +* Now multiplexing logs from different pipelines (no longer all sharing a single global logger). +* Fixed bug where container logger would not log the last line if it didn't contain a newline. +* Fixed a bug where we were logging container output in color for log files. + +### 2.6.2 +* Allow `for-each` to be a single element, have trailing commas or be empty. + +### 2.6.1 +* Allow pipline to continue if step is both continue-on-fail: true and timeout is reached. +* Allow $$ENV variables in `for-each`. + +### 2.6.0 +* Enhanced `for-each` loop for `step`. See [this page for more](/tutorials/flow?id=loops). +* Fixed teardown being unable to remove resources when background containers were still running. +* Improved validation of step names. +* As a special case, allow unquoted boolean and number types in `arguments` and `commands` properties. +* Updated npm dependencies. +* Updated `when:` to support `AND` and also fixed `INCLUDES` w.r.t. empty strings causing a match. + +### 2.5.3 +* Users now have the ability to override the default pull-policy on an entire pipeline when using `load` or `exec`. See `cix help load` and `cix help exec` for more details. +* `step` now supports a `commands-output` [property](/reference/yaml?id=commands-output) with the following values: + * `timestamp` echoes all of the commands (w/ timestamp). Note, this is the default mode and the previous behavior of CIX. + * `echo` echoes all of the commands (no timestamp). + * `minimal` CIX will only display the stdout/stderr generated by the step's commands. +* Bugfix: Now able to see SILLY and DEBUG logs when streaming logs from server to a client. + +### 2.5.1 +* If an environment variable is not provided for variable `user` definitions in a step, the step will now run as root. + +### 2.5.0 +* Added the ability to pass environment to a step while only providing the name of the environment variable. + * For example: + ```yaml + version: 2.1 + pipeline: + - step: + name: environment + image: alpine:3.9 + environment: + - name: FOO # value == $$FOO + - name: BAR # value == $$BAR + default: bar-default # default works the same regardless of how value is defined + commands: + - echo $FOO + - echo $BAR + ``` +* Added the ability to force color mode in non-TTY CIX console logging mode, or force no color mode in TTY mode with `--color` and `--no-color`. +* Added the ability to specify a user on a `step`. The user field accepts username or UID (format: [:]). + * Example: + ```yaml + version: 2.1 + pipeline: + - step: + name: example + user: '$$MY_USER' + ``` +* Fixed an issue where log stream between a client and server would close after no activity for 120 seconds. +* Fixed an issue where console output would not show when changing the CIX logging level below INFO. +* Updated the CIX install script to set a more restrictive umask. +* Updated the Dockerfile base image to latest CentOS image. +* Fixed an issue where Docker pull retries would be stripped of the user credentials, leading to the retry mechanism exiting and failing the pipeline. +* Clarified some CIX logging messages to make the cause of a pipeline failure more clear. +* When a preprocessor is defined, it is now run on imports. + +### 2.4.2 +* Fixed a bug with GIT imports, where you couldn't mix local and GIT imports. We're also now able to correctly use http-authorization-token YAML attribute. +* Fixed a bug with timeouts, where if a timeout was specified and a step fails, it would hang until timeout was reached. +* Do not expose CIX API port when CIX is overridden to run on a preconfigured docker network. +* Added `pipelines`, `plugins`, and `prompted-secrets` to the configuration file. +* Added the ability to prompt for multiple secrets. + +### 2.4.1 +* `exec --remote`, `resume` and `next` block execution of the client until either the pipeline is paused or becomes terminal. + * Blocking executions can be disabled with the flag --non-blocking. +* `exec --remote`, `resume` and `next` can now stream logs from the server and display them on the client. + * Remote log streams can be disabled with --no-remote-logs. +* Updated `--env` and `--secret` with the ability to pass environment variables by name only, using their values from the calling environment. +* Added the `arguments` step attribute so additional parameters can be provided to an image's default ENTRYPOINT command. +* Detach from the console when starting the CIX server (`cix server` command). +* Migrated from custom nodejs base image to new common Strata nodejs base image. +* NPM dependency cleanup and upgrades. +* Added the `workspace-mount-point` step attribute to specify a different target to mount the workspace directory in the container. + +### 2.4.0 +* Added a plugin framework to CIX. The first feature to take advantage of the new framework is "preprocessors." A preprocessor allows a non-native pipeline to be transformed into a valid CIX pipeline before execution. +* Fixed a bug where teardown was not run after a pipeline failure. +* `cix describe` now displays environment variables defined within a pipeline. +* `cix resume --to step` now runs to and including the specified step. + +### 2.3.1 +* Restored CIX to loading chained YAML files to just before they get executed ("just-in-time"), rather than at the start of the first pipeline execution. This allows users to dynamically add or sync pipelines from earlier ones at runtime. +* Fixed issues with reading secrets from stdin with and without a TTY. +* Added the ability for users to define a .cixconfig file for some flags which would previously have to be specified on each CIX invocation. See [documentation](/reference/local-configuration.md) for more information. + +### 2.3.0 +* Added new CLI commands and server endpoints to support phased execution. Please check out [the documentation](/tutorials/phased-exec) for more information. +* Added support for passing in secrets through standard input rather than command line. Please check out [the documentation](/tutorials/secrets#standard-input) for more information. +* Added support to POST pipelines as JSON though the REST API, which removes the need to have the YAML pipeline files available on the server. +* Preliminary support for launching pipeline steps to a Kubernetes cluster. +* Return to using `:latest` as the default image tag in the `cix` wrapper script. The default can be overridden by [setting the CIX_IMAGE environment variable](/reference/cli.md#specifying-the-cix-docker-image-to-use). + +### 2.2.2 +* Perform a clean shutdown when the INT (Ctrl-C) and TERM signals are received. + +### 2.2.1 +* When executed by the shell, step `commands` now defaults to `set -e` or 'fail on error' mode, such that any failed +command will abort the pipeline. A step's failure mode can be manually overridden to 'ignore errors' by calling +`set +e` in your `commands` list. +* Refactored loggers to take care of the bug where containers log to multiple files. + +### 2.2.0 +* Added new Rest API, the API starts up at http://localhost:10030/api-docs and has the following functionality: + * Adding new pipelines to execute + * Chaining together pipeline executions + * Checking the status of a pipeline execution + * Adding new environment variables at runtime + * Ability to start CIX in a server mode with `cix server` +* Refactored much of the code, including upgrading all npm dependencies. + +### 2.1.19 +* Use `:latest` as the default image version in the wrapper script until renamed CIX is more widespread. + +### 2.1.18 +* It's time to make it official! The command to start CIX is now `cix`, while `cix2` is deprecated but will continue +to be supported for the foreseeable future. To update your image and wrapper script run `cix2 update` followed by +`cix2 install | sudo sh`. You may then use the command `cix` to run CIX. +* Added ability to use $$ environment format within the environment variables and registry host name `default` key. +* Default to console logging and remove Chalk colors if logging in files mode. +* Move from the jsonschema package to the [ajv](https://www.npmjs.com/package/ajv) package for schema validation. +* Allow the use of alternative interpreters for `commands`. Write your step commands in Python, NodeJS, etc. as long +as it is supported by the image. +* Bug fix: in shell scripts the `$?` variable was being overwritten before it could be used by a subsequent command. +* Add timestamp to container commands echo lines. +* To ensure the shell wrapper script is in-sync with the cix image, a version comparison is now performed and warning +emitted if it is outdated. + +### 2.1.17 +* Added ability to retry the image pull operation, to be more resilient to registry connection issues. Retries +are configured via the new `retry` attribute of the `registry` definition. + +### 2.1.16 +* Bug fix: fix obfuscation bug in maskSecrets function. + +### 2.1.15 +* Bug fix: avoid using the "function" shell keyword in the cixrc shell script to support older Bourne shells. + +### 2.1.14 +* [Registries](/reference/yaml.md#registry) may now be defined by alias, with optional default value. This allows the registry +host to be overridden from the command-line. Alternatively, multiple registries may be defined and one selected from the +set using a command line variable. +* Allow `ports` to be defined with command-line variables. +* Fixed a bug where secrets passed with empty strings or regex control characters caused issues during the secret +masking process. +* Fixed a bug where the unquoted words `true` and `false` were not treated as strings in `when` condition operands. +* When using file logging, step numbers are now padded to two digits for easier sorting. +* Added `INCLUDES` (a.k.a. contains) and `NOT INCLUDES` to available simple conditional operators. + +### 2.1.13 +* Added check to ensure `exec` will not attempt to process invalid YAML. + +### 2.1.12 +* Addition of the "OR" [conditional operator](/reference/yaml.md#when). +* Fixed a bug where the Teardown pipeline was not executed when the main pipeline failed. + +### 2.1.11 +* Steps can be executed based on conditional evaluation of variables with the [when](/reference/yaml.md#when) attribute. +* Imports can now be nested within other imports. +* Multiple YAML files may be specified on the command line using repeated `--yaml` (`-y`) options. Each one will be +executed in order. Files are parsed when encountered so they do not need to exist prior to starting the pipeline. +This allows YAML files to be generated by previous files in the chain. +* Pipelines can be fetched from GitHub, which creates the ability to run "managed pipelines." Refer to the +[cix exec managed](/reference/cli.md#cix-exec-managed) command in the CLI reference. +* A new attribute [commands-shell](/reference/yaml.md#commands-shell) has been introduced to specify the shell binary under +which to run `commands`. This is useful when the default /bin/sh in an image is not the one you would like to run +commands with, but there is a separate shell installed (typically /bin/bash). The replacement shell must be +Bourne-compatible. +* The [privileged](/reference/yaml.md#privileged) attribute has been added to enable Docker privileged mode for containers, +primarily for Docker-in-Docker (running the full Docker engine in a container). +* Fix for `cix install` so it doesn't output CR-LF and break the newly installed file name and contents. Because +this bug is present in the existing script, use the `docker run` instructions from the +[Getting Started guide](getting-started.md) to get the updated version. Future updates can be performed by simply running +`cix install | sudo sh`. +* The 'cix' wrapper script has been improved to allow spaces in `-e` and `-s` parameters. +* When the logging mode is 'files' (`--logging files`), general CIX output is now sent to the console as well. +* Containers are deleted immediately after they are finished, which helps to avoid naming conflicts for repeated steps. +* Fixed erroneous `$$` variable expansion when two overlapping names were defined. + +### 2.1.10 +* Added ability to specify defaults for environment variables when they are not defined by `-e`, `-s` arguments on the +command line. +* Added ability to validate schema command in cix-cli `cix validate -y your.yaml`. +* Added two new volume mounts to each step: /cix/bin and /cix/libexec. These will be used for shared scripts developed +by the CIX team. +* Change github docs theme to Cayman. + +### 2.1.9 +* When the 'files' logging mode is enabled (with `--logging files`), general output is saved to `cix-execution.log` as +in CIX 1. +* The default logging level has been changed from WARN to INFO. + +### 2.1.2 +* Bug fix: fix failing Payload test and skip pipeline creation if schema validation fails. + +### 2.1.1 +* __USER ACTION REQUIRED__ - YAML changes are required to upgrade from CIX 2.0 to 2.1. + * The `version` attribute in your YAML definition must be changed from `2.0` to `2.1`. + * Attributes of `step` must be indented an additional 2 spaces. + * 2.0 docs/examples/basic.yaml + + ```yaml + version: 2 + pipeline: + - step: + name: basic + image: alpine:3.8 + commands: + - hostname + ``` + + * 2.1 docs/examples/basic.yaml + + ```yaml + version: 2.1 + pipeline: + - step: + name: basic + image: alpine:3.8 + commands: + - hostname + ``` + + * The `pullpolicy` attribute has been renamed to `pull-policy`. + * The `workingdir` attribute has been renamed to `working-dir`. +* Added ability to import local files into your pipeline, as full pipelines or as libraries. More info at +docs/examples/import/imports.yaml in the [Getting Started guide](getting-started.md). +* Steps can now be grouped, and grouped steps can be parallelized. More info at docs/examples/steps.yaml in the +[Getting Started guide](getting-started.md). +* Steps and step-groups can be nested. More info at docs/examples/nested_steps.yaml in the +[Getting Started guide](getting-started.md). +* Fixed an issue where older version 2.0 YAML files would cause an `UncaughtException "TypeError: Cannot read +property 'name' of null"` when run with CIX 2.1. +* Fixed an issue where the request npm module was not included in the CIX2 docker image. +* Removed `root-` prefix from first level nested steps. +* Retry `backoff` value is specified in seconds instead of milliseconds. diff --git a/docs/shared/conditionals.md b/docs/shared/conditionals.md index e397ff33..5a2cf8b0 100644 --- a/docs/shared/conditionals.md +++ b/docs/shared/conditionals.md @@ -31,6 +31,14 @@ when: value-default: 2 other: $$BAR_CONDITION_VALUE other-default: 1 + - operator: MATCHES + value: 'main-244' + expressions: 'freeze,main,main-\d+' + delimiter: ',' + - operator: NOT_MATCHES + value: 'patch_' + expressions: 'freeze,main,patch-\d+' + ``` ##### Operators @@ -41,10 +49,12 @@ The following operators are defined: | `IS_SET`, `IS_NOT_SET` | Emptiness check of 'value' | value | | `EQ`, `NEQ` | Equality/Non-equality of 'value' to 'other' | (1) value, other, value-default, other-default | | `GTE`, `GT`, `LTE`, `LT` | Numeric or String (lexical) comparison of 'value' to 'other' | (1) value, other, value-default, other-default | -| `INCLUDES`, `NON_INCLUDES` | Substring comparison -- does 'value' include the string 'other' | (1) value, other, value-default, other-default | +| `INCLUDES`, `NOT_INCLUDES` | Substring comparison -- does 'value' include the string 'other'. Evaluates to false if 'other' is an empty string and 'value' is not an empty string | (1) value, other, value-default, other-default | | `STARTS_WITH`, `ENDS_WITH` | String prefix and suffix comparison -- does 'value' start with or end with the string 'other' | (1) value, other, value-default, other-default | -| `EXISTS`, `NON_EXISTS` | List membership -- is 'value' among 'values' | (2) value, value-default, values | +| `EXISTS`, `NOT_EXISTS` | List membership -- is 'value' among 'values' | (2) value, value-default, values | | `OR` | Alternation | `conditions`, a list of operator/value attributes as above | +| `AND` | Conjunction | `conditions`, a list of operator/value attributes as above | +| `MATCHES`, `NOT_MATCHES` | Regular expression comparison -- does 'expressions' match the string 'value'. 'expressions' can take multiple expressions separated by a char(delimiter). Can pass optional 'delimiter' to split the expressions to compare a single expression against the 'value'. By default 'delimiter' is taken as comma ','. | (1) value, expressions | (1) Operator expects attributes named `value` and `other`, with optional `value-default` and `other-default` alternatives which supply values when the former attributes are not defined or refer to variables which cannot diff --git a/docs/shared/environment.md b/docs/shared/environment.md index 7806a3b7..74d87c53 100644 --- a/docs/shared/environment.md +++ b/docs/shared/environment.md @@ -1,15 +1,30 @@ #### environment -Defines a list of key-value pairs in the step definition representing environment variables which will be accessible from within a step's container. In order to access a value passed to CIX, either in the value field or the default field, the `$$` notation must be used. A default value may be defined in the default field, which will be used in the case where a value is not provided via the value field (either statically or passed to CIX). +Defines a list of key-value pairs in the step definition representing environment variables which will be accessible from within a step's container. -By default, environment variables and secrets passed to CIX are not provided to any step which does not explicitly define them within an environment block. Below is an example of these two concepts: +In order to assign a value passed to CIX to the environment of a step, the `$$` notation can be used in either the `value` field or the `default` field. For example: `value: $$BAR` and `default: $$BAR_DEFAULT` will use the values from `-e BAR=bar` and `-e BAR_DEFAULT=bar-default`, respectively. + +If the `value` field is omitted from the YAML environment item, the `value` field will also automatically be defined as `$$` prepended to the `name` field. For example: `name: BAZ` will infer `value: $$BAZ` if the `value` field is left undefined in the YAML definition. + +A default environment value may be defined in the `default` field, which will be used in the case where a resolvable value is not provided to CIX. In the example below: `default: baz-default` will cause `BAZ` to equal `baz-default` within the step's environment if `-e BAZ=baz` is not provided to CIX, because CIX was not passed a value for `$$BAZ`. + +!> **Important** For security purposes, environment variables and secrets passed to CIX are not provided to any step which does not explicitly define them in a YAML environment block. + +Below is an example of these concepts: ```yaml environment: - name: FOO - value: foo # statically defined + # value statically defined + value: foo - name: BAR - value: $$BAR_FROM_COMMAND # value passed in via '-e BAR_FROM_COMMAND=bar' - default: bar # $$BAR_FROM_COMMAND can also be used here + # value passed in via '-e BAR=bar' + value: $$BAR + # default passed in via '-e BAR_DEFAULT=bar-default' + default: $$BAR_DEFAULT + - name: BAZ + # when value is omitted, value == $$BAZ, + # passed in via '-e BAZ=baz' + default: baz-default ``` CIX provides some helpful predefined environment variables to every container, documented [here](/reference/environment.md). \ No newline at end of file diff --git a/docs/tutorials/commands.md b/docs/tutorials/commands.md index f3715969..3f7abd26 100644 --- a/docs/tutorials/commands.md +++ b/docs/tutorials/commands.md @@ -34,3 +34,8 @@ By default, the shell used by a step is `/bin/sh`. The behavior of `/bin/sh` is More information on this YAML property is available [here](reference/yaml.md#continue-on-fail) In the [next section](tutorials/environment.md) we'll learn how environment variables work. + +### Run pipeline as non-root user + +By default commands inside a step are run as root user. This user can be overridden per step. +More information on this is available [here](reference/yaml.md#user) diff --git a/docs/tutorials/environment.md b/docs/tutorials/environment.md index 2bce23a6..68f08bb8 100644 --- a/docs/tutorials/environment.md +++ b/docs/tutorials/environment.md @@ -19,7 +19,7 @@ var='foo!' # single quotes must be used for certain special characters: https:// # it is generally a good idea to surround the substitution with double quotes to ensure # the returned value is not interpreted directly by bash (and whitespace preserved), # but instead is passed as a string to CIX -cix exec -y docs/examples/environment.yaml -e FOO_FROM_COMMAND="$(curl www.google.com)" +cix exec -y docs/examples/environment.yaml -e FOO="$(curl www.google.com)" ``` ## Examples @@ -33,7 +33,7 @@ cix exec -y docs/examples/environment.yaml -e FOO_FROM_COMMAND="$(curl www.googl To launch this example, run the following command in your terminal: ```bash -cix exec -y docs/examples/environment.yaml -e FOO_FROM_COMMAND=foo -e BAR_FROM_COMMAND=bar +cix exec -y docs/examples/environment.yaml -e FOO=foo -e BAR=bar -e BAZ=baz ``` --- diff --git a/docs/tutorials/flow.md b/docs/tutorials/flow.md index 9c930da8..2d54b4fc 100644 --- a/docs/tutorials/flow.md +++ b/docs/tutorials/flow.md @@ -34,7 +34,12 @@ pipeline: ``` ### Loops -Learn how to use CIX's looping mechanism. Any loop iteration with a non-0 exit code will cause a pipeline failure. +Learn how to use CIX's looping mechanism. There are two types of loops: +1. A basic `loop` with a integer to represent how many loops to complete. The loop may optionally include a `counter-variable` attribute. This will add a counter environment variable for the step. +2. A `for-each` style loop. The `for-each` can take either a `yaml` list or a CSV list. An `element-variable` attribute is required for this style of loop. The element-variable will add an environment variable to that step with the current element being iterated over. The `for-each` may also optionally include a `counter-variable` attribute, just like the standard `loop`. This will add a counter environment variable for the step. + +Any loop iteration with a non-0 exit code will cause a pipeline failure. There also exists a `parallel` attriute to run the looped steps in parallel. + * [docs/examples/loop.yaml](https://github.com/salesforce/cix/blob/master/docs/examples/loop.yaml) @@ -43,7 +48,7 @@ Learn how to use CIX's looping mechanism. Any loop iteration with a non-0 exit c To launch this example, run the following command in your terminal: ```bash -cix exec -y docs/examples/loop.yaml -e LOOPS=3 +cix exec -y docs/examples/loop.yaml ``` ### Retry diff --git a/docs/tutorials/import.md b/docs/tutorials/import.md index 26fb6b52..75bca6be 100644 --- a/docs/tutorials/import.md +++ b/docs/tutorials/import.md @@ -98,13 +98,15 @@ pipeline: ``` To import a section of the library file, simply just use `.` notation for the qualified name of the step. -## Importing from GIT -!> Experimental feature. -!> To use GIT, the user must supply a GIT Auth Token environment variable called HTTP_AUTHORIZATION_TOKEN +## Using GIT -There are two ways to use GIT. +> GIT requires the user to provide GIT PAT (personal access token) -The first way is you can load a git URL from the command line: +There are three ways to use GIT with the PAT: + +1. You load a GIT URL from the command line, but you must supply the secret `HTTP_AUTHORIZATION_TOKEN`. +```sh +cix exec -y https://github.com/raw/ci/cix/master/docs/examples/import/imports.yaml --secret-prompt HTTP_AUTHORIZATION_TOKEN ``` Example: diff --git a/docs/tutorials/phased-exec.md b/docs/tutorials/phased-exec.md index df9e104c..04ba4571 100644 --- a/docs/tutorials/phased-exec.md +++ b/docs/tutorials/phased-exec.md @@ -27,7 +27,7 @@ cix load -y docs/examples/phased-exec.yaml Load works just like `cix exec`, you can provide environment variables and secrets. ```shell -cix load -y docs/examples/phased-exec.yaml -e FOO='foo' -s P4_PASSWORD='password' +cix load -y docs/examples/phased-exec.yaml -e FOO='foo' -s P4_PASSWORD='********' ``` If the server resides on another server or is running on an alternate port, you can specify either --host or --port. diff --git a/docs/tutorials/secrets.md b/docs/tutorials/secrets.md index 5e588d81..3b0377ca 100644 --- a/docs/tutorials/secrets.md +++ b/docs/tutorials/secrets.md @@ -15,7 +15,7 @@ This method of passing secrets is convenient, but should only be used in select [Error loading section, please refresh](../examples/secrets.yaml ':include :type=code') ```bash - cix exec -y docs/examples/secrets.yaml -s SECRET_FOO_FROM_COMMAND=foo -s SECRET_BAR_FROM_COMMAND=bar + cix exec -y docs/examples/secrets.yaml -s SECRET_FOO=foo -s SECRET_BAR=bar ``` ### Secrets passed via stdin diff --git a/package.json b/package.json index 1fc2f0bd..6a23eff2 100644 --- a/package.json +++ b/package.json @@ -1,17 +1,24 @@ { "name": "cix", - "version": "3.0.0", + "version": "2.7.4", "private": true, "license": "BSD-3-Clause", "type": "module", "main": "./src/index.js", + "overrides": { + "bats-support": "0.3.0", + "marked": "4.3.0", + "got": "12.6.0", + "qs": "6.11.1" + }, "scripts": { "itest": "bats test/integration", - "clean": "rm -rf .scannerwork coverage node_modules package-lock.json logs test-report.xml", + "clean": "rm -rf .scannerwork coverage node_modules logs test-report.xml", "lint": "eslint src", "lint-fix": "eslint src --fix", "preview-docs": "docsify serve docs", - "test": "jest --silent src" + "test": "jest --silent src", + "prepare": "husky install" }, "jest": { "collectCoverage": true, @@ -26,44 +33,50 @@ ] }, "dependencies": { - "ajv": "7.0.3", - "axios": "0.21.1", - "chalk": "4.1.0", - "commander": "7.0.0", - "deepdash": "5.3.5", - "dockerode": "3.2.1", - "ejs": "3.1.5", - "express": "4.17.1", - "helmet": "4.2.0", - "js-yaml": "4.0.0", + "ajv": "8.12.0", + "axios": "1.3.4", + "chalk": "4.1.2", + "commander": "10.0.0", + "deepdash": "5.3.9", + "dockerode": "3.3.5", + "ejs": "3.1.9", + "express": "4.18.2", + "js-yaml": "4.1.0", "lodash": "4.17.21", + "qs": "6.11.1", "readline-sync": "1.4.10", - "swagger-client": "3.12.2", - "swagger-jsdoc": "6.0.1", - "swagger-ui-express": "4.1.6", - "tar-stream": "2.2.0", - "uuid": "8.3.2", - "winston": "3.3.3" + "swagger-client": "3.18.5", + "swagger-jsdoc": "6.2.8", + "swagger-ui-express": "4.6.2", + "tar-stream": "3.0.0", + "uuid": "9.0.0", + "winston": "3.8.2" }, "devDependencies": { - "@babel/core": "7.12.10", - "@babel/plugin-proposal-class-properties": "7.12.1", + "@babel/core": "7.21.3", + "@babel/eslint-parser": "7.21.3", + "@babel/plugin-proposal-class-properties": "7.18.6", "@babel/plugin-syntax-import-meta": "7.10.4", - "@babel/preset-env": "7.12.11", - "babel-eslint": "10.1.0", - "babel-jest": "26.6.3", - "bats": "1.2.1", + "@babel/preset-env": "7.20.2", + "babel-jest": "29.5.0", + "bats": "1.9.0", "bats-assert": "2.0.0", "bats-support": "0.3.0", - "docsify-cli": "4.4.2", - "eslint": "7.18.0", + "docsify-cli": "4.4.4", + "eslint": "8.36.0", "eslint-config-google": "0.14.0", - "eslint-plugin-jsdoc": "31.4.0", - "jest": "26.6.3", - "jest-cli": "26.6.3", + "eslint-plugin-jsdoc": "^46.4.4", + "got": "12.6.0", + "husky": "^8.0.3", + "jest": "29.3.1", + "jest-cli": "29.3.1", "jest-sonar-reporter": "2.0.0", - "jsdoc": "3.6.6", - "strip-ansi": "6.0.0", - "supertest": "6.1.3" + "jsdoc": "4.0.2", + "marked": "4.3.0", + "strip-ansi": "6.0.1", + "supertest": "6.3.3" + }, + "volta": { + "node": "16.20.0" } } diff --git a/scripts/cix.sh b/scripts/cix.sh index 6c40e822..13bb3f69 100755 --- a/scripts/cix.sh +++ b/scripts/cix.sh @@ -1,5 +1,5 @@ #!/bin/bash -CIX_WRAPPER_VERSION=8 +CIX_WRAPPER_VERSION=9 # # Run and update the CIX (CI Executor) container. @@ -52,7 +52,7 @@ fi # The default port setting is 10030, we need to expose this for the server command, as well as the exec # command if the --remote option is not set -PORT=$DEFAULT_CIX_PORT +PORT=$DEFAULT_CIX_SERVER_PORT # Extract --port from args. for (( i = 1; i <= $#; i++ )); do @@ -120,9 +120,11 @@ if [[ $DOCKER_HOST ]]; then SET_DOCKER_HOST="-e DOCKER_HOST=$DOCKER_HOST" fi +if [[ $DOCKER_NETWORK ]]; then + SET_RUNTIME_CONTAINER_NETWORK="-e DOCKER_NETWORK=$DOCKER_NETWORK" +fi if [[ $CIX_DOCKER_NETWORK ]]; then - unset EXPOSE_PORT - SET_DOCKER_NETWORK="--network $CIX_DOCKER_NETWORK" + SET_CIX_CONTAINER_NETWORK="--network $CIX_DOCKER_NETWORK" fi # Detect ENV passthrough options @@ -153,7 +155,8 @@ $EXEC docker run \ $USE_TTY \ $DETACH \ $SET_NAME \ - $SET_DOCKER_NETWORK \ + $SET_CIX_CONTAINER_NETWORK \ + $SET_RUNTIME_CONTAINER_NETWORK \ -e CIX_WRAPPER_VERSION=$CIX_WRAPPER_VERSION \ -e CIX_WRAPPER_IMAGE=$CIX_IMAGE \ $ENV_PASSTHROUGH_ARGS \ diff --git a/src/cli/Application.js b/src/cli/Application.js index 4d60127e..192ba8ee 100644 --- a/src/cli/Application.js +++ b/src/cli/Application.js @@ -1,15 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {CIXError, CLIError, ExecutionError, NodeProvider, ServerError, ValidateError, _} from '../common/index.js'; -import commander from 'commander'; +import {CIXError, CLIError, ExecutionError, Logger, NodeProvider, ServerError, ValidateError, _} from '../common/index.js'; +import {program as commander} from 'commander'; import commands from './commands/index.js'; import dirname from './dirname.cjs'; import fs from 'fs'; -import log from 'winston'; import path from 'path'; // import.meta.url jest workaround... Fix once Jest supports ES Modules: https://github.com/facebook/jest/issues/9430 @@ -18,12 +17,9 @@ const {__dirname} = dirname; export class Application { /** * @class - * * @description CIX CLI. - * * @param {object} config - configuration options; aka., package.json * @param {Array} args - list of arguments for CIX execution - * * @see {@link https://www.npmjs.com/package/commander|commander} */ constructor(config, args) { @@ -31,41 +27,12 @@ export class Application { this._args = args; this.stdin = ''; this.insecureSecretWarned = false; - this.initLogger(); this.program = new commander.Command(); } - /** - * @function module:cli.Application#initLogger - * @description Initializes the winston default logger. - * - * @returns {undefined} - */ - initLogger() { - log.configure({ - transports: [ - new log.transports.Console({ - level: (NodeProvider.getProcess().env.LOG_LEVEL || 'info').toLowerCase(), - silent: NodeProvider.getProcess().argv.includes('--silent'), // silent for utests - format: log.format.combine( - log.format.timestamp({format: 'YYYY-MM-DDTHH:mm:ssZ'}), - log.format((info) => { - info.level = info.level.toUpperCase(); - return info; - })(), - log.format.colorize(), - log.format.align(), - log.format.printf((info) => `${info.timestamp} ${info.level} ${info.message}`), - ), - }), - ], - }); - } - /** * @function module:cli.Application#getProgram * @description Returns the commander object for better testability. - * * @returns {commander.Command} commander object */ getProgram() { @@ -75,7 +42,6 @@ export class Application { /** * @function module:cli.Application#validateWrapperScript * @description Check the shell wrapper script's version is not out of date. - * * @returns {undefined} */ validateWrapperScript() { @@ -87,21 +53,20 @@ export class Application { if (script.split('\n')[1].startsWith('CIX_WRAPPER_VERSION')) { expectedVersion = script.split('\n')[1].split('=')[1]; } else { - log.debug('Strange, the shell wrapper script\'s expected version cannot be determined.'); + Logger.debug('Strange, the shell wrapper script\'s expected version cannot be determined.'); expectedVersion = 0; } if (_.toInteger(callingVersion) < expectedVersion) { - log.warn('The shell wrapper script used to call cix is out of date. Run \'cix install | sudo sh\' to update it.'); + Logger.warn('The shell wrapper script used to call cix is out of date. Run \'cix install | sudo sh\' to update it.'); } else if (_.toInteger(callingVersion) > expectedVersion) { - log.warn('The shell wrapper script used to call cix is newer than this image. If you experience unexpected errors run \'cix install | sudo sh\' to downgrade it.'); + Logger.warn('The shell wrapper script used to call cix is newer than this image. If you experience unexpected errors run \'cix install | sudo sh\' to downgrade it.'); } } /** * @function module:cli.Application#getWrapperScript * @description returns the contents of the wrapper script. - * * @returns {string} the wrapper script as a string. */ getWrapperScript() { @@ -111,7 +76,6 @@ export class Application { /** * @function module:cli.Application#executeSubCommands * @description executes commands with error handling. - * * @returns {undefined} */ async executeSubCommands() { @@ -120,37 +84,37 @@ export class Application { await this.getProgram().parseAsync(this._args); } catch (error) { if (error instanceof CLIError) { - log.error(`${error.message}`); + Logger.error(`${error.message}`); exitCode = 1; } else if (error instanceof ValidateError) { - log.error(`${error.message}`); + Logger.error(`${error.message}`); exitCode = 2; } else if (error instanceof ExecutionError) { - log.error(`${error.message}`); + Logger.error(`${error.message}`); exitCode = 3; } else if (error instanceof ServerError) { - log.error(`${error.message}`); + Logger.error(`${error.message}`); exitCode = 4; } else if (error instanceof CIXError) { - log.error(`${error.message}`); + Logger.error(`${error.message}`); exitCode = 5; } else { // Catch swagger-client errors here if (error.response && error.response.obj && error.response.obj.message) { - log.debug(`Client Error: ${error.response.url} ${error.response.obj.status}`); - log.error(`${error.response.obj.message}`); + Logger.debug(`Client Error: ${error.response.url} ${error.response.obj.status}`); + Logger.error(`${error.response.obj.message}`); exitCode = 6; } else { - log.error(`Caught unexpected error: ${error.name}`); + Logger.error(`Caught unexpected error: ${error.name}`); exitCode = 100; } } if (error.stack) { - log.debug(`${error.stack}`); + Logger.debug(`${error.stack}`); } } finally { - log.debug('Exiting with status ' + exitCode); + Logger.debug('Exiting with status ' + exitCode); NodeProvider.getProcess().exit(exitCode); } } @@ -158,7 +122,6 @@ export class Application { /** * @function module:cli.Application#execute * @description Processes the request leveraging {@link https://www.npmjs.com/package/commander|commander} - * * @returns {undefined} */ execute() { @@ -177,10 +140,9 @@ export class Application { if (this._args[2] !== 'install') { this.validateWrapperScript(); + Logger.info(`This is CIX version ${this._config.version} ${NodeProvider.getProcess().env.CIX_WRAPPER_IMAGE ? '(from ' + NodeProvider.getProcess().env.CIX_WRAPPER_IMAGE + ')' : ''}`); } - log.debug(`This is CIX version ${this._config.version} ${NodeProvider.getProcess().env.CIX_WRAPPER_IMAGE ? '(from ' + NodeProvider.getProcess().env.CIX_WRAPPER_IMAGE + ')' : ''}`); - this.executeSubCommands(); // Get list of names from commander and join them with a regex OR, then remove first OR with a substr diff --git a/src/cli/Application.test.js b/src/cli/Application.test.js index 868baf97..198122a2 100644 --- a/src/cli/Application.test.js +++ b/src/cli/Application.test.js @@ -1,13 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global describe, expect */ -import {CLIError, NodeProvider} from '../common/index.js'; +import {CLIError, Logger, NodeProvider} from '../common/index.js'; import {Application} from './Application'; -import log from 'winston'; describe('Application.execute', () => { let app; let processMock; @@ -41,28 +40,28 @@ describe('Application.execute', () => { test('validateWrapperScript: no version prints "out of date"', () => { delete processMock.env['CIX_WRAPPER_VERSION']; - const logWarn = jest.spyOn(log, 'warn').mockImplementation(); + const logWarn = jest.spyOn(Logger, 'warn').mockImplementation(); app.validateWrapperScript(); expect(logWarn.mock.calls[0][0]).toEqual(expect.stringMatching(/out of date/)); }); test('validateWrapperScript: lower version warns "out of date"', () => { - const logWarn = jest.spyOn(log, 'warn').mockImplementation(); + const logWarn = jest.spyOn(Logger, 'warn').mockImplementation(); processMock.env['CIX_WRAPPER_VERSION'] = 1; app.validateWrapperScript(); expect(logWarn.mock.calls[0][0]).toEqual(expect.stringMatching(/out of date/)); }); test('validateWrapperScript: higher version warns "newer than"', () => { - const logWarn = jest.spyOn(log, 'warn').mockImplementation(); + const logWarn = jest.spyOn(Logger, 'warn').mockImplementation(); processMock.env['CIX_WRAPPER_VERSION'] = 666; app.validateWrapperScript(); expect(logWarn.mock.calls[0][0]).toEqual(expect.stringMatching(/newer than/)); }); test('validateWrapperScript: no warn if version is exact', () => { - const logWarn = jest.spyOn(log, 'warn').mockImplementation(); + const logWarn = jest.spyOn(Logger, 'warn').mockImplementation(); jest.spyOn(app, 'getWrapperScript').mockImplementation(() => '#!/bin/bash\nCIX_WRAPPER_VERSION=3'); app.validateWrapperScript(); expect(logWarn).not.toHaveBeenCalled(); diff --git a/src/cli/commands/AbstractCommand.js b/src/cli/commands/AbstractCommand.js index 5d9a4841..9b5d37e9 100644 --- a/src/cli/commands/AbstractCommand.js +++ b/src/cli/commands/AbstractCommand.js @@ -1,23 +1,19 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {NodeProvider, _} from '../../common/index.js'; +import {Logger, _} from '../../common/index.js'; import {PipelineService, PluginService, ValidateService} from '../../engine/index.js'; import ConfigImporter from '../config/ConfigImporter.js'; import {Server} from '../../server/index.js'; -import commander from 'commander'; -import log from 'winston'; -import path from 'path'; +import {program as commander} from 'commander'; export default class AbstractCommand { /** * @class - * * @description Abstract Command Class. - * * @param {string} name - name of the concrete class. */ constructor(name) { @@ -39,9 +35,7 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#registerProgram * @description Registers the command's name with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerProgram(program) { @@ -51,21 +45,20 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { + program.option('--color', 'Force console logs to include ANSI color codes'); + program.option('--no-color', 'Force console logs to not include ANSI color codes (overrides --color)'); + program.option('--silent', 'Silence logging'); return program; } /** * @function module:cli.AbstractCommand#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -75,7 +68,6 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#register * @description Registers the command with Commander. - * * @returns {undefined} */ register() { @@ -90,7 +82,6 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#action * @description The command's action. - * * @returns {undefined} */ action() { @@ -100,7 +91,6 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#getPipelineService * @description Provides the PipelineService reference for easier testability. - * * @returns {PipelineService} PipelineService reference */ getPipelineService() { @@ -110,7 +100,6 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#getValidateService * @description Provides the ValidateService reference for easier testability. - * * @returns {ValidateService} ValidateService reference */ getValidateService() { @@ -120,7 +109,6 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#getPluginService * @description Provides the PluginService reference for easier testability. - * * @returns {PipelineService} PipelineService reference */ getPluginService() { @@ -130,9 +118,7 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#initServer * @description Starts the pipeline in a server. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ initServer(options) { @@ -142,10 +128,8 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#collectValues * @description Using this method one can collect a series of values into a list - * * @param {string} value - value to be added to the list * @param {object} accumulator - existing values - * * @returns {object} updated values */ collectValues(value, accumulator = []) { @@ -160,10 +144,8 @@ export default class AbstractCommand { * @example * cix -e var1=Foo -e var2=Bar # Accumulates to {'var1=foo', 'var2=bar'} * cix -e ENV1 -e ENV2 # Accumulates to {'ENV1=foo', 'ENV2=bar'}, if they exist in the process ENV - * * @param {string} input - key or key/value pair to be added to the list * @param {object} accumulator - existing key/value pairs - * * @returns {object} updated key/value accumulator object */ collectKeyValue(input, accumulator = {}) { @@ -193,15 +175,13 @@ export default class AbstractCommand { * @description Using this function one can collect a series of entries with values and warn users to use different method * @example * cix -e var1=Foo -e var2=Bar # Leads to E: ['var1=Foo', 'var2=Bar'] - * * @param {string} value - value to be added to the list * @param {object} accumulator - existing values - * * @returns {object} updated values */ collectKeyValueAndWarn(value, accumulator = {}) { if (value.includes('=') && !this.insecureSecretWarned) { // only warn once per invocation - log.warn('Passing secrets with -s is not secure, instead use --secret-stdin, --secrets-stdin, or --secret-prompt'); + Logger.warn('Passing secrets with -s is not secure, instead use --secret-stdin, --secrets-stdin, or --secret-prompt'); } this.insecureSecretWarned = true; @@ -211,9 +191,7 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#loadCixConfig * @description Reads .cixconfig files from disk and sets options appropriately - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ loadCixConfig(options) { @@ -225,14 +203,12 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#validateOptions * @description Check command line YAML parameters. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ validateOptions(options) { if (_.isNil(options.yaml) || _.isEmpty(options.yaml)) { - log.info('No pipeline YAML file specified, using cix.yaml'); + Logger.info('No pipeline YAML file specified, using cix.yaml'); options.yaml = ['cix.yaml']; } } @@ -240,55 +216,22 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#configureLogger * @description Generates a logger class for the execution. - * * @param {object} options - Map of options set on command line. - * * @returns {undefined} */ configureLogger(options) { // enable logging to files - if (!_.isNil(options.logging) && options.logging === 'files') { + // PipelineService.set + if (options.logging === 'files' || options.logging === 'file') { // handle default here for loggingPath, in order to allow loadCixConfig to override it - this.enableFileLogging(options.loggingPath || 'logs'); + Logger.enableFileLogging(options.logging, options.loggingPath || 'logs', options.logname || 'cix-execution.log'); } } - /** - * @function module:cli.AbstractCommand#enableFileLogging - * @description Starts the pipeline in a server. - * - * @param {object} loggingPath - path to log cix-execution to - * - * @returns {undefined} - */ - enableFileLogging(loggingPath) { - log.info('Switching over to file logging.'); - log.add( - new log.transports.File({ - level: (NodeProvider.getProcess().env.LOG_LEVEL || 'info').toLowerCase(), - filename: path.join(loggingPath, 'cix-execution.log'), - silent: NodeProvider.getProcess().argv.includes('--silent'), // silent for utests - format: log.format.combine( - log.format.timestamp({format: 'YYYY-MM-DDTHH:mm:ssZ'}), - log.format((info) => { - info.level = info.level.toUpperCase(); - return info; - })(), - log.format.align(), - log.format.printf((info) => `${info.timestamp} ${info.level} ${info.message}`), - ), - }), - ); - // used for future container logging - log.fileLogging = {enabled: true, path: loggingPath}; - } - /** * @function module:cli.Application#generateListOfPipelines * @description Generates a list of pipelines to execute. - * * @param {object} options - Map of options set on command line. - * * @returns {undefined} */ generateListOfPipelines(options) { @@ -312,9 +255,7 @@ export default class AbstractCommand { /** * @function module:cli.AbstractCommand#generateEnvironmentList * @description Generates a engine compatible map of environment variables. - * * @param {object} options - Map of options set on command line. - * * @returns {undefined} */ generateEnvironmentList(options) { @@ -322,14 +263,14 @@ export default class AbstractCommand { _.forEach(options.env, (value, name) => { if (_.isNil(name) || _.isNil(value)) { - log.warn('This environment variable is invalid, skipping it: name: ' + name + ' value: ' + value); + Logger.warn('This environment variable is invalid, skipping it: name: ' + name + ' value: ' + value); } else { environmentList.push({name: name, value: value, type: 'environment'}); } }); _.forEach(options.secret, (value, name) => { if (_.isNil(name) || _.isNil(value)) { - log.warn('This environment variable is invalid, skipping it: name: ' + name + ' value: ' + value); + Logger.warn('This environment variable is invalid, skipping it: name: ' + name + ' value: ' + value); } else { environmentList.push({name: name, value: value, type: 'secret'}); } diff --git a/src/cli/commands/AbstractCommand.test.js b/src/cli/commands/AbstractCommand.test.js index 5b465192..85a0ad95 100644 --- a/src/cli/commands/AbstractCommand.test.js +++ b/src/cli/commands/AbstractCommand.test.js @@ -1,12 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ import AbstractCommand from './AbstractCommand.js'; -import log from 'winston'; +import {Logger} from '../../common/index.js'; class ImplementedCommand extends AbstractCommand { constructor() { @@ -62,16 +62,16 @@ describe('Application.configureLogger', () => { }); afterEach(() => { - if (log.fileLogging) { - delete log.fileLogging; + if (Logger.fileLogging) { + delete Logger.fileLogging; } }); // TODO: leaked file handle... test.skip('We add a file logger...', () => { - const logAdd = jest.spyOn(log, 'add').mockImplementation(() => {}); + const logAdd = jest.spyOn(Logger, 'add').mockImplementation(() => {}); implCommand.configureLogger({logging: 'files', loggingPath: '/dev/null'}); expect(logAdd).toHaveBeenCalled(); - expect(log.fileLogging).toEqual({'enabled': true, 'path': '/dev/null'}); + expect(Logger.fileLogging).toEqual({'enabled': true, 'path': '/dev/null'}); }); }); diff --git a/src/cli/commands/AbstractRemoteCommand.js b/src/cli/commands/AbstractRemoteCommand.js index f0c55fbf..0728db12 100644 --- a/src/cli/commands/AbstractRemoteCommand.js +++ b/src/cli/commands/AbstractRemoteCommand.js @@ -1,21 +1,19 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ContainerLogger, ExecutionError, ServerError} from '../../common/index.js'; +import {ExecutionError, Logger, ServerError} from '../../common/index.js'; import AbstractCommand from './AbstractCommand.js'; +import Pipeline from '../../engine/pipeline/Pipeline.js'; import Swagger from 'swagger-client'; import axios from 'axios'; -import log from 'winston'; export default class AbstractRemoteCommand extends AbstractCommand { /** * @class - * * @description AbstractRemoteCommand Command. - * * @param {string} name - name of the concrete class. * @see {@link https://www.npmjs.com/package/swagger-client|swagger-client} */ @@ -26,9 +24,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -40,9 +36,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#generateSwaggerClient * @description Generates Swagger client. - * * @param {object} options - map of options set on command line - * * @returns {object} The reference to the client. */ async generateSwaggerClient(options) { @@ -50,7 +44,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { const port = options.port || '10030'; const host = options.host || '127.0.0.1'; const url = `http://${host}:${port}/api-docs/swagger.json`; - log.debug(`Creating Swagger Client from ${url}`); + Logger.debug(`Creating Swagger Client from ${url}`); // Timeout of 3seconds trying to contact CIX Server... this.client = await new Promise( async (resolve, reject) => { @@ -66,9 +60,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#getPipelineApi * @description generates a pipeline API client. - * * @param {object} options - map of options set on command line - * * @returns {object} The reference to the pipeline client. */ async getPipelineApi(options) { @@ -79,9 +71,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#getValidateApi * @description generates a validate API client. - * * @param {object} options - map of options set on command line - * * @returns {object} The reference to the validate client. */ async getValidateApi(options) { @@ -92,9 +82,7 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#getPluginApi * @description generates a plugin API client. - * * @param {object} options - map of options set on command line - * * @returns {object} The reference to the plugin client. */ async getPluginApi(options) { @@ -103,19 +91,25 @@ export default class AbstractRemoteCommand extends AbstractCommand { } - async checkStatusPostRun(pipelineId) { - const pipelineApi = await this.getPipelineApi(); - const response = await pipelineApi.getPipelineStatus({pipelineId: pipelineId}); - const status = response.obj.status; + async checkStatusPostRun(pipelineId, remote) { + let status; + if (remote) { + const pipelineApi = await this.getPipelineApi(); + const response = await pipelineApi.getPipelineStatus({pipelineId: pipelineId, chainedStatus: true}); + status = response.obj.status; + } else { + status = this.getPipelineService().getPipeline(pipelineId).getStatus(true); + } + switch (status) { - case 'paused': - log.info('Step(s) completed successfully.'); + case Pipeline.STATUS.paused: + Logger.info('Step(s) completed successfully.'); break; - case 'successful': - log.info('Pipeline completed successfully.'); + case Pipeline.STATUS.successful: + Logger.info('Pipeline completed successfully.'); break; - case 'failed': - throw new ExecutionError('Pipeline has failed.'); + case Pipeline.STATUS.failed: + throw new ExecutionError(`Pipeline '${pipelineId}' failed.`); default: throw new ExecutionError(`Pipeline is in an unexpected state: ${status}`); } @@ -124,15 +118,12 @@ export default class AbstractRemoteCommand extends AbstractCommand { /** * @function module:cli.AbstractRemoteCommand#logStreamingFetch * @description generates a compatible axios request for swagger-client so we can stream the logs. - * * @returns {object} A swagger-client request object. */ logStreamingFetch() { return { userFetch: async (url, req) => { - const containerLogger = new ContainerLogger(); let axiosResponse; - try { axiosResponse = await axios({ ...req, @@ -179,16 +170,16 @@ export default class AbstractRemoteCommand extends AbstractCommand { // demux the two log streams if (obj.containerNames) { // container log - containerLogger.createClientStream().write(obj); + Logger.getPipelineLogger().clientContainerLogger(obj); } else { // cix application log obj.message = `Server: ${obj.message}`; - log.log(obj); + Logger.log(obj); } } } } catch (err) { - log.error(`Error parsing log stream: ${err}`); + Logger.error(`Error parsing log stream: ${err}`); } // remove the data from the response, could be big... diff --git a/src/cli/commands/Describe.js b/src/cli/commands/Describe.js index b091a1b5..c2874607 100644 --- a/src/cli/commands/Describe.js +++ b/src/cli/commands/Describe.js @@ -1,18 +1,16 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ +import {CIXError, Logger} from '../../common/index.js'; import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import {CIXError} from '../../common/index.js'; import fs from 'fs'; -import log from 'winston'; export default class Describe extends AbstractRemoteCommand { /** * @class - * * @description Describe Command. */ constructor() { @@ -22,9 +20,7 @@ export default class Describe extends AbstractRemoteCommand { /** * @function module:cli.Describe#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -39,9 +35,7 @@ export default class Describe extends AbstractRemoteCommand { /** * @function module:cli.Describe#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -51,7 +45,6 @@ export default class Describe extends AbstractRemoteCommand { /** * @function module:cli.Describe#writeSequenceToFile * @description Writes the step sequence as JSON to a file. - * * @param {object} path - A path the the file to save. * @param {object} sequence - The JSON content to write. */ @@ -62,9 +55,7 @@ export default class Describe extends AbstractRemoteCommand { /** * @function module:cli.Describe#action * @description Runs the Exec sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { @@ -93,7 +84,7 @@ export default class Describe extends AbstractRemoteCommand { } else if (options.stderr) { console.error(sequenceString); } else if (options.file) { - log.info(`Writing description to '${options.file}'`); + Logger.info(`Writing description to '${options.file}'`); try { this.writeSequenceToFile(options.file, sequenceString); } catch (error) { @@ -106,7 +97,7 @@ export default class Describe extends AbstractRemoteCommand { throw error; } } else { - log.info(`Sequence: \n${sequenceString}`); + Logger.info(`Sequence: \n${sequenceString}`); } } } diff --git a/src/cli/commands/Describe.test.js b/src/cli/commands/Describe.test.js index 72b2e111..18df344e 100644 --- a/src/cli/commands/Describe.test.js +++ b/src/cli/commands/Describe.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Exec.js b/src/cli/commands/Exec.js index 7098956c..0a66219a 100644 --- a/src/cli/commands/Exec.js +++ b/src/cli/commands/Exec.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -9,7 +9,6 @@ import Load from './Load.js'; export default class Exec extends Load { /** * @class - * * @description Exec command. */ constructor() { @@ -19,24 +18,21 @@ export default class Exec extends Load { /** * @function module:cli.Exec#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { program = super.registerOptions(program); program.option('--remote', 'CIX will execute against a remote CIX Server.'); program.option('--non-blocking', 'Disables the blocking wait until a remote execution is complete.'); - return program.option('--no-remote-logs', 'Disables streaming logs from Server.', false); + // options starting with --no are inverted by commander, thus the default here is really false + return program.option('--no-remote-logs', 'Disables streaming logs from Server.', true); } /** * @function module:cli.Exec#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -46,9 +42,7 @@ export default class Exec extends Load { /** * @function module:cli.Install#action * @description Runs the Exec sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { @@ -77,10 +71,11 @@ export default class Exec extends Load { await pipelineApi.startPipeline(postBody); } if (postBody['blocking']) { - await this.checkStatusPostRun(pipelineId); + await this.checkStatusPostRun(pipelineId, true); } } else { await this.getPipelineService().startPipeline(pipelineId); + await this.checkStatusPostRun(pipelineId, false); } } } diff --git a/src/cli/commands/Exec.test.js b/src/cli/commands/Exec.test.js index e3f1584f..ad338648 100644 --- a/src/cli/commands/Exec.test.js +++ b/src/cli/commands/Exec.test.js @@ -1,11 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ import Exec from './Exec.js'; +import {ExecutionError} from '../../common/index.js'; describe('Exec.action', () => { let exec; @@ -66,13 +67,41 @@ describe('Exec.action', () => { const addPipeline = jest.fn().mockImplementation(() => { return {id: 'PIPELINE_ID'}; }); + const getPipeline = jest.fn().mockImplementation(() => { + return {getStatus: function() { + return 'successful'; + }}; + }); const startPipeline = jest.fn().mockImplementation(() => {}); jest.spyOn(exec, 'getPipelineService').mockImplementation(() => { - return {addPipeline: addPipeline, startPipeline: startPipeline}; + return {addPipeline: addPipeline, getPipeline: getPipeline, startPipeline: startPipeline}; }); await exec.action({}); + expect(startPipeline).toHaveBeenCalledWith('PIPELINE_ID'); }); + + test('when executing a local pipeline, and it fails, throw exception.', async () => { + jest.spyOn(exec, 'generateListOfPipelines').mockImplementation(() => [{yaml: 'test.yaml'}]); + const addPipeline = jest.fn().mockImplementation(() => { + return {id: 'PIPELINE_ID'}; + }); + const getPipeline = jest.fn().mockImplementation(() => { + return {getStatus: function() { + return 'failed'; + }}; + }); + const startPipeline = jest.fn().mockImplementation(() => {}); + jest.spyOn(exec, 'getPipelineService').mockImplementation(() => { + return {addPipeline: addPipeline, getPipeline: getPipeline, startPipeline: startPipeline}; + }); + expect.assertions(1); + try { + await exec.action({}); + } catch (e) { + expect(e).toBeInstanceOf(ExecutionError); + } + }); }); diff --git a/src/cli/commands/Install.js b/src/cli/commands/Install.js index 8d8cfe86..6e2d52fa 100644 --- a/src/cli/commands/Install.js +++ b/src/cli/commands/Install.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -16,7 +16,6 @@ const {__dirname} = dirname; export default class Install extends AbstractCommand { /** * @class - * * @description Install Command. */ constructor() { @@ -26,9 +25,7 @@ export default class Install extends AbstractCommand { /** * @function module:cli.Install#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -39,9 +36,7 @@ export default class Install extends AbstractCommand { /** * @function module:cli.Install#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -51,7 +46,6 @@ export default class Install extends AbstractCommand { /** * @function module:cli.Describe#readScript * @description Reads the install script off the disk. - * * @returns {string} The contents of the install script. */ readScript() { @@ -61,9 +55,7 @@ export default class Install extends AbstractCommand { /** * @function module:cli.Install#action * @description Runs the install sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ action(options) { @@ -79,10 +71,10 @@ export default class Install extends AbstractCommand { ' echo "Installation failed, try again with \'sudo $0\'."', ' exit 1', 'fi', - `chmod +x ${location}/cix`, + `chmod a+rx ${location}/cix`, `rm -f ${location}/cix2`, `ln -s cix ${location}/cix2`, - `echo 'Installed cix at ${location}/cix, start by cix -h.'`, + `echo 'Installed cix at ${location}/cix, start with cix -h.'`, ], '\n')); } } diff --git a/src/cli/commands/Install.test.js b/src/cli/commands/Install.test.js index dc2f5fc0..9b46997a 100644 --- a/src/cli/commands/Install.test.js +++ b/src/cli/commands/Install.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Kill.js b/src/cli/commands/Kill.js index f8277aee..eddd3441 100644 --- a/src/cli/commands/Kill.js +++ b/src/cli/commands/Kill.js @@ -1,16 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import log from 'winston'; +import {Logger} from '../../common/index.js'; export default class Kill extends AbstractRemoteCommand { /** * @class - * * @description Kill Command. */ constructor() { @@ -20,9 +19,7 @@ export default class Kill extends AbstractRemoteCommand { /** * @function module:cli.Kill#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -32,12 +29,11 @@ export default class Kill extends AbstractRemoteCommand { /** * @function module:cli.Kill#action * @description Runs the Exec sub command. - * * @param {object} options - map of options set on command line */ async action(options) { const pipelineApi = await this.getPipelineApi(options); - log.info('Kill: killing pipeline....'); + Logger.info('Kill: killing pipeline....'); await pipelineApi.killPipeline(); } } diff --git a/src/cli/commands/Kill.test.js b/src/cli/commands/Kill.test.js index 3758a429..ea1a1fb0 100644 --- a/src/cli/commands/Kill.test.js +++ b/src/cli/commands/Kill.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Load.js b/src/cli/commands/Load.js index ebaed215..955e391d 100644 --- a/src/cli/commands/Load.js +++ b/src/cli/commands/Load.js @@ -1,20 +1,18 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {CIXError, CLIError, NodeProvider, PluginError, _} from '../../common/index.js'; +import {CIXError, CLIError, Logger, NodeProvider, PluginError, _} from '../../common/index.js'; import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import log from 'winston'; +import {program as commander} from 'commander'; import readlineSync from 'readline-sync'; export default class Load extends AbstractRemoteCommand { /** * @class - * * @description Load Command. - * * @param {string} name - name of the concrete class. */ constructor(name) { @@ -26,9 +24,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -36,11 +32,17 @@ export default class Load extends AbstractRemoteCommand { program.option('-c, --configfile ', 'Specify a cix configuration file to load.'); program.option('-e, --env ', 'Specify environment variable. Mapping should be in the form of KEY ' + '(value taken from environment) or KEY=VALUE.', this.collectKeyValue); + program.option('-l, --logging ', 'Specify container output logging mode: console, file (single file), or files (separate files for each step)).'); + program.option('-L, --logname ', 'Specify a custom name for the CIX application log file.'); + program.option('-p, --logging-path ', 'Path where logs created by the \'files\' logging mode will be stored. (default: "logs")'); + program.addOption( + new commander.Option('-P, --pull-policy ', 'Overrides the pull-policy for Docker pulls, when it is not specified in the pipeline definition.') + .choices(['Always', 'Default', 'IfNotPresent', 'Never']) + .default('Default'), + ); program.option('-s, --secret ', 'Specify a secret (insecure). Mapping should be in the form of KEY ' + '(secret taken from environment) or KEY=SECRET.', this.collectKeyValueAndWarn.bind(this)); program.option('-w, --workspace ', 'Specify the workspace path. Default path is the current working directory.'); - program.option('-l, --logging ', 'Specify container output logging mode: console, or files (separate files).'); - program.option('-p, --logging-path ', 'Path where logs created by the \'files\' logging mode will be stored. (default: "logs")'); program.option('-y, --yaml ', 'Path to pipeline definition YAML file. May be repeated.', this.collectValues.bind(this)); program.option('--plugin ', 'Path to plugin YAML file. May be repeated.', this.collectValues.bind(this)); program.option('--setup ', 'Path to setup pipeline YAML file.'); @@ -54,9 +56,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -66,9 +66,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#validateOptions * @description Check command line parameters. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ validateOptions(options) { @@ -98,9 +96,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#action * @description Runs the Load sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { @@ -118,7 +114,6 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#getStdin * @description Mockable method which returns the stdin stream. - * * @returns {object} a readable stream */ getStdin() { @@ -128,9 +123,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#setInput * @description Provides a way to override input from the CLI. - * * @param {string} input - Data representing input to the program (stdin). - * * @returns {undefined} */ setInput(input) { @@ -140,9 +133,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.AbstractCommand#acceptInput * @description Sets up the command to accept input from stdin. - * * @param {Function} onEnd - Called when all input has been collected - * * @returns {undefined} */ acceptInput(onEnd) { @@ -162,9 +153,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#actionCallback * @description Runs the remainder of the Load sub command, as a callback when input is required. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async actionCallback(options) { @@ -173,17 +162,19 @@ export default class Load extends AbstractRemoteCommand { const plugins = await this.loadPlugin(options); const pipelineList = this.generateListOfPipelines(options); - log.silly(`Adding new pipelines: ${JSON.stringify(pipelineList)}`); + Logger.silly(`Adding new pipelines: ${JSON.stringify(pipelineList)}`); // Add and link all chained pipelines for (let i = 0; i < pipelineList.length; i++) { - log.silly(`Adding Pipeline ${i + 1} of ${pipelineList.length}: ${JSON.stringify(pipelineList[i])}`); + Logger.silly(`Adding Pipeline ${i + 1} of ${pipelineList.length}: ${JSON.stringify(pipelineList[i])}`); const pipelineSpec = {}; if (!_.isEmpty(plugins)) { pipelineSpec.plugins = plugins; } + pipelineSpec.defaultPullPolicy = options.pullPolicy; + pipelineSpec.yamlPath = pipelineList[i].yaml; if (!_.isNil(pipelineList[i].type) && pipelineList[i].type == 'teardown') { pipelineSpec.type = 'teardown'; @@ -212,9 +203,10 @@ export default class Load extends AbstractRemoteCommand { pipelineSpec.environment = []; } pipelineSpec.environment.push({name: 'CIX_HOSTNAME', value: options.host, type: 'internal'}); + pipelineSpec.environment.push({name: 'CIX_SERVER_PORT', value: options.port, type: 'internal'}); response = await this.getPipelineService().addPipeline(pipelineSpec); } - log.silly(`Got back Pipeline ${JSON.stringify(response)}`); + Logger.silly(`Got back Pipeline ${JSON.stringify(response)}`); if (_.isNil(response) || _.isNil(response.id)) { throw new CIXError('Response invalid'); @@ -235,7 +227,7 @@ export default class Load extends AbstractRemoteCommand { } if (this.name === 'load') { - log.info('INFO: Pipeline(s) loaded, use \'cix resume\' to continue.'); + Logger.info('INFO: Pipeline(s) loaded, use \'cix resume\' to continue.'); } // return the first Pipeline ID, so caller can kick off the pipeline chain @@ -245,23 +237,21 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#loadPlugin * @description Loads any plugins defined in options. - * * @param {object} options - map of options set on command line - * * @returns {Array} list of pipeline ids. */ async loadPlugin(options) { const ids = []; if (options.plugin) { - log.info(`Plugins to load: ${options.plugin}`); + Logger.info(`Plugins to load: ${options.plugin}`); let pluginApi; if (this.name === 'load' || options.remote) { pluginApi = await this.getPluginApi(options); for (const plugin in options.plugin) { - log.silly(`Adding plugin ${options.plugin[plugin]}`); + Logger.silly(`Adding plugin ${options.plugin[plugin]}`); const response = await pluginApi.addPlugin({'pluginSpec': {'pluginPath': options.plugin[plugin]}}); if (response.body && response.body.id) { - log.silly(`Created plugin ${response.body.id}`); + Logger.silly(`Created plugin ${response.body.id}`); ids.push(response.body); } else { throw new PluginError('Did not get back ID from server.'); @@ -270,7 +260,7 @@ export default class Load extends AbstractRemoteCommand { } else { pluginApi = this.getPluginService(); for (const plugin in options.plugin) { - log.silly(`Adding plugin ${options.plugin[plugin]}`); + Logger.silly(`Adding plugin ${options.plugin[plugin]}`); ids.push(await pluginApi.addPlugin({'pluginPath': options.plugin[plugin]})); } } @@ -281,9 +271,7 @@ export default class Load extends AbstractRemoteCommand { /** * @function module:cli.Load#handleSecrets * @description Check options and intake secrets - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ handleSecrets(options) { diff --git a/src/cli/commands/Load.test.js b/src/cli/commands/Load.test.js index cf86103f..724edcb8 100644 --- a/src/cli/commands/Load.test.js +++ b/src/cli/commands/Load.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Pipelines.js b/src/cli/commands/Pipelines.js index 3a76257c..1083470e 100644 --- a/src/cli/commands/Pipelines.js +++ b/src/cli/commands/Pipelines.js @@ -1,17 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {CLIError, ServerError} from '../../common/index.js'; +import {CLIError, Logger, ServerError} from '../../common/index.js'; import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import log from 'winston'; export default class Pipelines extends AbstractRemoteCommand { /** * @class - * * @description Pipelines Command. */ constructor() { @@ -21,9 +19,7 @@ export default class Pipelines extends AbstractRemoteCommand { /** * @function module:cli.Pipelines#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -38,9 +34,7 @@ export default class Pipelines extends AbstractRemoteCommand { /** * @function module:cli.Pipelines#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -50,9 +44,7 @@ export default class Pipelines extends AbstractRemoteCommand { /** * @function module:cli.Pipelines#action * @description Runs the Exec sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { @@ -65,7 +57,7 @@ export default class Pipelines extends AbstractRemoteCommand { if (options.getAlias) { const pipeline = await pipelineApi.getPipelineForAlias({pipelineAlias: options.getAlias}); - log.info(`${options.getAlias} -> ${pipeline.obj.id}`); + Logger.info(`${options.getAlias} -> ${pipeline.obj.id}`); } else if (options.setAlias) { if (options.pipelineId) { await pipelineApi.setAliasForPipeline({pipelineAlias: options.setAlias, pipelineId: options.pipelineId}); @@ -81,28 +73,28 @@ export default class Pipelines extends AbstractRemoteCommand { if (options.pipelineAlias) { alias = options.pipelineAlias; } else { - log.warn('--pipeline-id not supplied using "latest" alias.'); + Logger.warn('--pipeline-id not supplied using "latest" alias.'); alias = 'latest'; } let pipeline; try { pipeline = await pipelineApi.getPipelineForAlias({pipelineAlias: alias}); } catch (error) { - log.debug(error); + Logger.debug(error); throw new ServerError('No pipelines exist on the server.'); } status = await pipelineApi.getPipelineStatus({pipelineId: pipeline.obj.id}); } - log.info(`Pipeline Status: ${JSON.stringify(status.obj.status)}`); + Logger.info(`Pipeline Status: ${JSON.stringify(status.obj.status)}`); } else { // List pipelines with aliases shown const list = await pipelineApi.getPipelineList(); - log.info('List of pipelines:'); + Logger.info('List of pipelines:'); for (const pipelineId of list.obj) { - log.info(` - ${pipelineId}`); + Logger.info(` - ${pipelineId}`); const aliases = await pipelineApi.getAliasesForPipeline({pipelineId: pipelineId}); for (const alias of aliases.obj) { - log.info(` - alias: ${alias}`); + Logger.info(` - alias: ${alias}`); } } } diff --git a/src/cli/commands/Pipelines.test.js b/src/cli/commands/Pipelines.test.js index 80f6adaa..07a53556 100644 --- a/src/cli/commands/Pipelines.test.js +++ b/src/cli/commands/Pipelines.test.js @@ -1,13 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ -import {CLIError} from '../../common/index.js'; +import {CLIError, Logger} from '../../common/index.js'; import Pipelines from './Pipelines.js'; -import log from 'winston'; describe('Pipelines.action', () => { let pipelines; @@ -43,7 +42,7 @@ describe('Pipelines.action', () => { }); test('getting --status without pipelineID will warn and get latest', async () => { - const logWarn = jest.spyOn(log, 'warn').mockImplementation(() => {}); + const logWarn = jest.spyOn(Logger, 'warn').mockImplementation(() => {}); jest.spyOn(pipelines, 'getPipelineApi').mockImplementation(() => { return {getPipelineForAlias: getPipelineForAlias, getPipelineStatus: getPipelineStatus}; }); @@ -52,7 +51,7 @@ describe('Pipelines.action', () => { }); test('getting --status with pipelineID will print status ', async () => { - const logInfo = jest.spyOn(log, 'info').mockImplementation(() => {}); + const logInfo = jest.spyOn(Logger, 'info').mockImplementation(() => {}); jest.spyOn(pipelines, 'getPipelineApi').mockImplementation(() => { return {getPipelineForAlias: getPipelineForAlias, getPipelineStatus: getPipelineStatus}; }); @@ -80,7 +79,7 @@ describe('Pipelines.action', () => { }); test('getting alias test', async () => { - const logInfo = jest.spyOn(log, 'info').mockImplementation(() => {}); + const logInfo = jest.spyOn(Logger, 'info').mockImplementation(() => {}); jest.spyOn(pipelines, 'getPipelineApi').mockImplementation(() => { return {getPipelineForAlias: getPipelineForAlias}; }); @@ -90,7 +89,7 @@ describe('Pipelines.action', () => { }); test('list pipelines with aliases', async () => { - const logInfo = jest.spyOn(log, 'info').mockImplementation(() => {}); + const logInfo = jest.spyOn(Logger, 'info').mockImplementation(() => {}); const getAliasesForPipeline = jest.fn().mockImplementation(() => { return { obj: ['latest', 'test'], diff --git a/src/cli/commands/Resume.js b/src/cli/commands/Resume.js index c1547b49..961b5e3d 100644 --- a/src/cli/commands/Resume.js +++ b/src/cli/commands/Resume.js @@ -1,17 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ +import {CLIError, Logger} from '../../common/index.js'; import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import {CLIError} from '../../common/index.js'; -import log from 'winston'; export default class Resume extends AbstractRemoteCommand { /** * @class - * * @description Resume Command. */ constructor() { @@ -21,9 +19,7 @@ export default class Resume extends AbstractRemoteCommand { /** * @function module:cli.Resume#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -32,16 +28,15 @@ export default class Resume extends AbstractRemoteCommand { program.option('--to ', 'Run to step name, then pause before the next step.'); program.option('--next', 'Run one step, then pause.'); program.option('--non-blocking', 'Disables the blocking wait until a remote execution is complete.'); - program.option('--no-remote-logs', 'Disables streaming logs from Server.', false); + // options starting with --no are inverted by commander, thus the default here is really false + program.option('--no-remote-logs', 'Disables streaming logs from Server.', true); return program = super.registerOptions(program); } /** * @function module:cli.Resume#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -51,9 +46,7 @@ export default class Resume extends AbstractRemoteCommand { /** * @function module:cli.Resume#action * @description Runs the Exec sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { @@ -78,13 +71,13 @@ export default class Resume extends AbstractRemoteCommand { const resp = await pipelineApi.getPipelineForAlias({pipelineAlias: options.pipelineAlias}); postBody['pipelineId'] = resp.body.id; } else { - log.info('No pipeline ID or alias provided, using "latest" alias.'); + Logger.info('No pipeline ID or alias provided, using "latest" alias.'); const resp = await pipelineApi.getPipelineForAlias({pipelineAlias: 'latest'}); postBody['pipelineId'] = resp.body.id; } if (options.next) { - log.info('Resume: running pipeline to next step.'); + Logger.info('Resume: running pipeline to next step.'); if (options.remoteLogs) { await pipelineApi.nextPipelineStep(postBody, this.logStreamingFetch()); } else { @@ -92,7 +85,7 @@ export default class Resume extends AbstractRemoteCommand { await pipelineApi.nextPipelineStep(postBody); } } else if (options.to) { - log.info(`Resume: running pipeline to (and including) step named '${options.to}'.`); + Logger.info(`Resume: running pipeline to (and including) step named '${options.to}'.`); postBody['step'] = options.to; if (options.remoteLogs) { await pipelineApi.resumePipeline(postBody, this.logStreamingFetch()); @@ -101,7 +94,7 @@ export default class Resume extends AbstractRemoteCommand { await pipelineApi.resumePipeline(postBody); } } else { - log.info('Resume: running pipeline to completion.'); + Logger.info('Resume: running pipeline to completion.'); if (options.remoteLogs) { await pipelineApi.resumePipeline(postBody, this.logStreamingFetch()); } else { @@ -110,7 +103,7 @@ export default class Resume extends AbstractRemoteCommand { } } if (postBody['blocking']) { - await this.checkStatusPostRun(postBody['pipelineId']); + await this.checkStatusPostRun(postBody['pipelineId'], true); } } } diff --git a/src/cli/commands/Resume.test.js b/src/cli/commands/Resume.test.js index a46e56a1..40518534 100644 --- a/src/cli/commands/Resume.test.js +++ b/src/cli/commands/Resume.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Server.js b/src/cli/commands/Server.js index 046f5fb4..c22e7b64 100644 --- a/src/cli/commands/Server.js +++ b/src/cli/commands/Server.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -9,7 +9,6 @@ import AbstractCommand from './AbstractCommand.js'; export default class Server extends AbstractCommand { /** * @class - * * @description Server Command. */ constructor() { @@ -19,27 +18,24 @@ export default class Server extends AbstractCommand { /** * @function module:cli.Server#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { program.option('-c, --configfile ', 'Specify a cix configuration file to load.'); program.option('-d, --detach', 'Shell command only: detach the server container from the console.'); - program.option('-w, --workspace ', 'Specify the workspace path. Default path is the current working directory.'); - program.option('--port ', 'CIX Server Port', '10030'); - program.option('-l, --logging ', 'Specify container output logging mode: console, or files (separate files)', 'files'); + program.option('-l, --logging ', 'Specify container output logging mode: console, file (single file), or files (separate files for each step))'); + program.option('-L, --logname ', 'Specify a custom name for the CIX application log file.'); program.option('-p, --logging-path ', 'Path where logs created by the \'files\' logging mode will be stored. (default: "logs")'); + program.option('--port ', 'CIX Server Port', '10030'); + program.option('-w, --workspace ', 'Specify the workspace path. Default path is the current working directory.'); return super.registerOptions(program); } /** * @function module:cli.Server#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -49,9 +45,7 @@ export default class Server extends AbstractCommand { /** * @function module:cli.Server#action * @description Starts the Server. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { diff --git a/src/cli/commands/Server.test.js b/src/cli/commands/Server.test.js index 7b859447..0cc8868d 100644 --- a/src/cli/commands/Server.test.js +++ b/src/cli/commands/Server.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Update.js b/src/cli/commands/Update.js index 41f7deec..29684527 100644 --- a/src/cli/commands/Update.js +++ b/src/cli/commands/Update.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -10,7 +10,6 @@ import {CLIError} from '../../common/index.js'; export default class Update extends AbstractCommand { /** * @class - * * @description Update Command. */ constructor() { @@ -20,9 +19,7 @@ export default class Update extends AbstractCommand { /** * @function module:cli.Update#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -32,7 +29,6 @@ export default class Update extends AbstractCommand { /** * @function module:cli.Update#action * @description Runs the install sub command. - * * @returns {undefined} */ action() { diff --git a/src/cli/commands/Update.test.js b/src/cli/commands/Update.test.js index 0cbfa2cd..52ae12ad 100644 --- a/src/cli/commands/Update.test.js +++ b/src/cli/commands/Update.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/commands/Validate.js b/src/cli/commands/Validate.js index 6cd4af34..19ea7b53 100644 --- a/src/cli/commands/Validate.js +++ b/src/cli/commands/Validate.js @@ -1,16 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import AbstractRemoteCommand from './AbstractRemoteCommand.js'; -import log from 'winston'; +import {Logger} from '../../common/index.js'; export default class Validate extends AbstractRemoteCommand { /** * @class - * * @description Validate Command. */ constructor() { @@ -20,9 +19,7 @@ export default class Validate extends AbstractRemoteCommand { /** * @function module:cli.Validate#registerOptions * @description Registers the command's options with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerOptions(program) { @@ -39,9 +36,7 @@ export default class Validate extends AbstractRemoteCommand { /** * @function module:cli.Validate#registerDescription * @description Registers the command's description with Commander. - * * @param {object} program - A reference to the Commander program. - * * @returns {object} The reference to the Commander program (used in builder pattern). */ registerDescription(program) { @@ -51,20 +46,17 @@ export default class Validate extends AbstractRemoteCommand { /** * @function module:cli.Validate#action * @description Runs the install sub command. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ async action(options) { this.validateOptions(options); - this.configureLogger(options); const pipelineList = this.generateListOfPipelines(options); - log.silly(`Validating pipelines: ${JSON.stringify(pipelineList)}`); + Logger.silly(`Validating pipelines: ${JSON.stringify(pipelineList)}`); for (let i = 0; i < pipelineList.length; i++) { - log.silly(`Validating Pipeline ${i + 1} of ${pipelineList.length}: ${JSON.stringify(pipelineList[i])}`); + Logger.silly(`Validating Pipeline ${i + 1} of ${pipelineList.length}: ${JSON.stringify(pipelineList[i])}`); const pipelineSpec = {}; pipelineSpec.yamlPath = pipelineList[i].yaml; diff --git a/src/cli/commands/Validate.test.js b/src/cli/commands/Validate.test.js index fa8af565..9babc473 100644 --- a/src/cli/commands/Validate.test.js +++ b/src/cli/commands/Validate.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -15,7 +15,6 @@ describe('Validate.action', () => { validate = new Validate(); // we test these in the abstract class... jest.spyOn(validate, 'validateOptions').mockImplementation(() => {}); - jest.spyOn(validate, 'configureLogger').mockImplementation(() => {}); jest.spyOn(validate, 'generateEnvironmentList').mockImplementation(() => {}); }); diff --git a/src/cli/commands/index.js b/src/cli/commands/index.js index 2d885b23..374fc7ec 100644 --- a/src/cli/commands/index.js +++ b/src/cli/commands/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/cli/config/ConfigImporter.js b/src/cli/config/ConfigImporter.js index 5882f208..499ea6a7 100644 --- a/src/cli/config/ConfigImporter.js +++ b/src/cli/config/ConfigImporter.js @@ -1,12 +1,11 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {NodeProvider, _} from '../../common/index.js'; +import {Logger, NodeProvider, _} from '../../common/index.js'; import Path from 'path'; -import log from 'winston'; import yaml from 'js-yaml'; export default class ConfigImporter { @@ -15,9 +14,7 @@ export default class ConfigImporter { /** * @function module:cli.config.ConfigImporter#load * @description Loads the content of one or all config files into memory. - * * @param {string} [path] - path to .cixconfig file - * * @returns {undefined} */ load(path) { @@ -34,7 +31,7 @@ export default class ConfigImporter { const pathToLoad = Path.resolve(`${Path.dirname(filename)}/${Path.basename(filename)}`); const loadedConfigs = loadConfigFile(pathToLoad); if (loadedConfigs) { - log.debug(`Successfully loaded CIX user configuration from ${pathToLoad}`); + Logger.debug(`Successfully loaded CIX user configuration from ${pathToLoad}`); configs = _.concat(configs, loadedConfigs); } }); @@ -45,9 +42,7 @@ export default class ConfigImporter { /** * @function module:cli.config.ConfigImporter#updateOptions * @description Modifies the options object to use imported configs where applicable. - * * @param {object} options - map of options set on command line - * * @returns {undefined} */ updateOptions(options) { @@ -89,9 +84,7 @@ export default class ConfigImporter { /** * @function module:cli.config.ConfigImporter#loadConfigFile * @description Loads options from the given path. - * * @param {object} path - location of config file to load - * * @returns {object} representation of the loaded configurations */ export function loadConfigFile(path) { @@ -110,7 +103,7 @@ export function loadConfigFile(path) { try { return JSON.parse(file); } catch (err) { - log.warn(`Could not parse ${path} as either YAML or JSON, skipping.`); + Logger.warn(`Could not parse ${path} as either YAML or JSON, skipping.`); } } } diff --git a/src/cli/config/ConfigImporter.test.js b/src/cli/config/ConfigImporter.test.js index 8b9d8081..795a1c8c 100644 --- a/src/cli/config/ConfigImporter.test.js +++ b/src/cli/config/ConfigImporter.test.js @@ -1,13 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global jest, describe, beforeEach, expect */ import ConfigImporter, {loadConfigFile} from './ConfigImporter.js'; -import {NodeProvider} from '../../common/index.js'; -import log from 'winston'; +import {Logger, NodeProvider} from '../../common/index.js'; jest.autoMockOff(); @@ -140,7 +139,7 @@ describe('ConfigImporter.updateOptions', () => { describe('ConfigImporter.loadConfigFile', () => { test('invalid parsing should result in log warning', () => { - const warnMock = jest.spyOn(log, 'warn').mockImplementation(); + const warnMock = jest.spyOn(Logger, 'warn').mockImplementation(); const content = '{}{{}}}}}}'; jest.spyOn(NodeProvider, 'getFs').mockImplementation(() => { return {readFileSync: () => content}; diff --git a/src/cli/index.js b/src/cli/index.js index b2384b4a..cf7de4de 100755 --- a/src/cli/index.js +++ b/src/cli/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/common/ContainerLogger.js b/src/common/ContainerLogger.js deleted file mode 100644 index 62597c9b..00000000 --- a/src/common/ContainerLogger.js +++ /dev/null @@ -1,151 +0,0 @@ -/* -* Copyright (c) 2020, salesforce.com, inc. -* All rights reserved. -* SPDX-License-Identifier: BSD-3-Clause -* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause -*/ -import {NodeProvider, _} from './index.js'; -import chalk from 'chalk'; -import path from 'path'; -import stream from 'stream'; -import winston from 'winston'; - -export default class ContainerLogger { - constructor() { - if (!_.isNil(winston.fileLogging) && winston.fileLogging.enabled) { - this.fileLogging = winston.fileLogging; - } else { - this.fileLogging = {enabled: false}; - } - - // Default to the current directory if no path specified. - if (_.isNil(this.fileLogging.path)) { - this.fileLogging.path = path.resolve(); - } - - this.consoleLogger = winston.createLogger({ - level: (NodeProvider.getProcess().env.LOG_LEVEL || 'info').toLowerCase(), - format: winston.format.printf(this.colorizedConsoleFormatter), - transports: [ - new winston.transports.Console({silent: NodeProvider.getProcess().argv.includes('--silent')}), - ], - }); - } - - colorizedConsoleFormatter(info) { - const COLORS = [chalk.blueBright, chalk.greenBright, chalk.magentaBright, chalk.yellowBright, - chalk.cyanBright, chalk.green, chalk.magenta, chalk.yellow, chalk.cyan]; - const ERROR_COLOR = chalk.redBright; - const MAX_NAME_LENGTH = 20; - - const containerId = info.containerNames.containerId; - const containerName = info.containerNames.shortName; - - /** - * @function module:common/ContainerLogger#elideName - * - * @param {string} name - name to shorten - * - * @returns {string} shortened name - */ - function elideName(name) { - if (name.length > MAX_NAME_LENGTH) { - return name.slice(0, MAX_NAME_LENGTH - 1) + '~'; - } - return name; - } - - if (this.containerColor === undefined) { - this.containerCount = 0; - this.containerColor = []; - } - - if (!this.containerColor[containerId]) { - this.containerColor[containerId] = COLORS[this.containerCount++ % COLORS.length]; - } - - const colorFn = this.containerColor[containerId]; - const formattedName = elideName(containerName).padEnd(MAX_NAME_LENGTH, ' ') + ' |'; - const output = info.message.toString().split('\n'); - - // sometimes we get more than one line in a stream - for (let i = 0; i < output.length; i++) { - if (info.isErrorOutput) { - output[i] = `${colorFn(formattedName)} ${ERROR_COLOR(output[i])}`; - } else { - output[i] = `${colorFn(formattedName)} ${output[i]}`; - } - } - - return output.join('\n'); - } - - createServerOutputStream(pipeline, containerNames, isErrorOutput = false) { - let environment = null; - let logStream = null; - - if (!_.isNil(pipeline)) { - environment = pipeline.getEnvironment(); - logStream = pipeline.getLogStream(); - } - const transformStream = new DockerOutputStream(environment, containerNames, isErrorOutput); - - if (this.fileLogging.enabled) { - const containerId = containerNames.containerId; - const containerName = containerNames.qualifiedName; - - if (this.containerFileLogger === undefined) { - this.containerCount = 0; - this.containerFileLogger = {}; - } - - if (!this.containerFileLogger[containerId]) { - const filename = path.join(this.fileLogging.path, - `${(++this.containerCount).toString().padStart(2, '0')}-${containerName}.log`); - this.containerFileLogger[containerId] = winston.createLogger({ - level: 'info', - format: winston.format.printf((item) => `${item.message}`), - transports: [ - new winston.transports.File({filename: filename}), - ], - }); - } - transformStream.pipe(this.containerFileLogger[containerId]); - } else { - transformStream.pipe(this.consoleLogger); - } - if (!_.isNil(logStream)) { - transformStream.pipe(logStream); - } - return transformStream; - } - - createClientStream() { - return this.consoleLogger; - } -} - -class DockerOutputStream extends stream.Transform { - constructor(environment, containerNames, isErrorOutput) { - super({ - readableObjectMode: true, - writableObjectMode: true, - }); - this.environment = environment; - this.containerNames = containerNames; - this.isErrorOutput = isErrorOutput; - } - - _transform(chunk, _encoding, callback) { - const message = this.environment.redactSecrets(chunk.toString()); - - this.push({ - level: 'info', - message: Buffer.from(message.trimRight()), - containerNames: this.containerNames, - isErrorOutput: this.isErrorOutput, - }); - - callback(); - } -} diff --git a/src/common/ContainerLogger.test.js b/src/common/ContainerLogger.test.js deleted file mode 100644 index 18f8c10d..00000000 --- a/src/common/ContainerLogger.test.js +++ /dev/null @@ -1,103 +0,0 @@ -/* -* Copyright (c) 2020, salesforce.com, inc. -* All rights reserved. -* SPDX-License-Identifier: BSD-3-Clause -* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause -*/ -/* global describe, test, expect, beforeEach */ -import ContainerLogger from './ContainerLogger.js'; -import {_} from '../common/index.js'; -import stripAnsi from 'strip-ansi'; -import winston from 'winston'; - -describe('ContainerLogger colorizedConsoleFormatter tests', () => { - let containerLogger; - - beforeEach(() => { - containerLogger = new ContainerLogger(); - }); - - test('Console output should be after the pipe.', () => { - const message = {containerNames: {shortName: 'test'}, message: 'should be tabbed'}; - const output = containerLogger.colorizedConsoleFormatter(message); - expect(stripAnsi(output)).toEqual('test | should be tabbed'); - }); - - test('Long container names should be truncated.', () => { - const message = {containerNames: {shortName: 'TheQuickBrownFoxJumpsOverTheLazyDog'}, message: 'should be truncated'}; - const output = containerLogger.colorizedConsoleFormatter(message); - expect(stripAnsi(output)).toEqual('TheQuickBrownFoxJum~ | should be truncated'); - }); - - test('Multiline messages should each have the container name prepended.', () => { - const message = {containerNames: {shortName: 'multiline'}, message: 'first\nsecond'}; - const output = containerLogger.colorizedConsoleFormatter(message); - expect(output.match(/multiline/g).length).toStrictEqual(2); - }); - - test('Should use different colors for different containers.', () => { - containerLogger.colorizedConsoleFormatter({containerNames: {containerId: 'abc', shortName: 'container1'}, message: 'test'}); - containerLogger.colorizedConsoleFormatter({containerNames: {containerId: '123', shortName: 'container2'}, message: 'test'}); - expect(containerLogger.containerColor['abc']).not.toStrictEqual(containerLogger.containerColor['123']); - }); - - test('Should use different colors for containers with same name.', () => { - containerLogger.colorizedConsoleFormatter({containerNames: {containerId: 'abc', shortName: 'container1'}, message: 'test'}); - containerLogger.colorizedConsoleFormatter({containerNames: {containerId: '123', shortName: 'container1'}, message: 'test'}); - expect(containerLogger.containerColor['abc']).not.toStrictEqual(containerLogger.containerColor['123']); - }); -}); - -describe('ContainerLogger createServerOutputStream tests', () => { - test('container console logging pipes to the console logger', () => { - const containerLogger = new ContainerLogger(); - const stream = containerLogger.createServerOutputStream(null, {containerNames: {qualifiedName: 'container1'}}); - expect(_.castArray(stream._readableState.pipes)).toEqual([containerLogger.consoleLogger]); - }); - - test('container file logging does not pipe to the console logger', () => { - winston.fileLogging = {enabled: true, path: '/tmp'}; - const containerLogger = new ContainerLogger(); - const stream = containerLogger.createServerOutputStream(null, {containerNames: {qualifiedName: 'container1'}}); - expect(_.castArray(stream._readableState.pipes)).not.toEqual([containerLogger.consoleLogger]); - }); - - test('file logging is stored and cached', () => { - winston.fileLogging = {enabled: true, path: '/tmp'}; - const containerLogger = new ContainerLogger(); - const stream = containerLogger.createServerOutputStream(null, {qualifiedName: 'container1'}); - const stream2 = containerLogger.createServerOutputStream(null, {qualifiedName: 'container1'}); - expect(stream._readableState.pipes).toEqual(stream2._readableState.pipes); - }); - - test('each container gets its own file logger', () => { - winston.fileLogging = {enabled: true, path: '/tmp'}; - const containerLogger = new ContainerLogger(); - const stream = containerLogger.createServerOutputStream(null, {containerId: 'abc', qualifiedName: 'container1'}); - const stream2 = containerLogger.createServerOutputStream(null, {containerId: '123', qualifiedName: 'container2'}); - expect(stream._readableState.pipes).not.toBe(stream2._readableState.pipes); - }); - - test('container count increments', () => { - winston.fileLogging = {enabled: true, path: '/tmp'}; - const containerLogger = new ContainerLogger(); - containerLogger.createServerOutputStream(null, {containerId: 'abc', qualifiedName: 'container1'}); - containerLogger.createServerOutputStream(null, {containerId: '123', qualifiedName: 'container2'}); - expect(containerLogger.containerCount).toBe(2); - }); - - test('container count increments, even when reusing name', () => { - winston.fileLogging = {enabled: true, path: '/tmp'}; - const containerLogger = new ContainerLogger(); - containerLogger.createServerOutputStream(null, {containerId: 'abc', qualifiedName: 'container1'}); - containerLogger.createServerOutputStream(null, {containerId: '123', qualifiedName: 'container1'}); - expect(containerLogger.containerCount).toBe(2); - }); - - afterEach(() => { - // remove the global state :( - if (winston.fileLogging) { - delete winston.fileLogging; - } - }); -}); diff --git a/src/common/Errors.js b/src/common/Errors.js index a16ba462..68761d98 100644 --- a/src/common/Errors.js +++ b/src/common/Errors.js @@ -1,11 +1,11 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ +import {Logger} from './index.js'; import NodeProvider from './NodeProvider.js'; -import log from 'winston'; export class CIXError extends Error { constructor(message, stack) { @@ -54,6 +54,13 @@ export class CLIError extends CIXError { } } +export class PipelineError extends CIXError { + constructor(message, stack) { + super(message, undefined, stack); + this.name = 'PipelineError'; + } +} + export class ServerError extends CIXError { constructor(message, errorCode, stack) { super(message, stack); @@ -64,9 +71,9 @@ export class ServerError extends CIXError { NodeProvider.getProcess().on('uncaughtException', function(err) { if (err.stack) { - log.error(err.stack); + Logger.error(err.stack); } else if (err.message) { - log.error(`${err.name}: ${err.message}`); + Logger.error(`${err.name}: ${err.message}`); } NodeProvider.getProcess().exit(127); }); diff --git a/src/common/NodeProvider.js b/src/common/NodeProvider.js index da12ef21..8e8f5a31 100644 --- a/src/common/NodeProvider.js +++ b/src/common/NodeProvider.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -9,7 +9,6 @@ class NodeProvider { /** * @function module:common.NodeProvider#readScript * @description Provides process object for easier testability. - * * @returns {process} process object. */ getProcess() { @@ -19,7 +18,6 @@ class NodeProvider { /** * @function module:cix-common.NodeProvider#readScript * @description Provides process object for easier testability. - * * @returns {process} process object. */ getFs() { diff --git a/src/common/Provider.js b/src/common/Provider.js index ae9cce7c..a2c7d0af 100644 --- a/src/common/Provider.js +++ b/src/common/Provider.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -34,9 +34,7 @@ export class ProviderFromFunction { /** * @function module:common.Provider#fromFunction - * * @param {Function} func - function to create Provider for - * * @returns {Function} provider */ export function fromFunction(func) { @@ -45,9 +43,7 @@ export function fromFunction(func) { /** * @function module:common.Provider#fromObject - * * @param {object} obj - object to create Provider for - * * @returns {Function} provider */ export function fromObject(obj) { @@ -56,9 +52,7 @@ export function fromObject(obj) { /** * @function module:common.Provider#get - * * @param {object} obj - object to get - * * @returns {object} object */ export function get(obj) { diff --git a/src/common/Provider.test.js b/src/common/Provider.test.js index 9b6993d2..bdde9202 100644 --- a/src/common/Provider.test.js +++ b/src/common/Provider.test.js @@ -1,10 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -/* global jest, describe, it, beforeEach, expect */ +/* global jest, describe, beforeEach, expect */ jest.autoMockOff(); @@ -15,14 +15,14 @@ describe('Provider', () => { Provider = require('./Provider'); }); - it('Provider.get should return correct value from object', () => { + test('Provider.get should return correct value from object', () => { const value = {foo: 1, bar: 2}; const provider = Provider.fromObject(value); expect(provider.get()).toBe(value); }); - it('Provider.get should return correct value from function', () => { + test('Provider.get should return correct value from function', () => { const value = {foo: 1, bar: 2}; const provider = Provider.fromFunction(() => { return value; diff --git a/src/common/index.js b/src/common/index.js index fc9d33be..f249a3be 100644 --- a/src/common/index.js +++ b/src/common/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -9,5 +9,4 @@ export {default as _} from './lodash.js'; import * as Provider from './Provider.js'; export {Provider}; export {default as NodeProvider} from './NodeProvider.js'; -export {default as ContainerLogger} from './ContainerLogger.js'; - +export {default as Logger} from './logging/Logger.js'; diff --git a/src/common/loaders/AbstractLoader.js b/src/common/loaders/AbstractLoader.js index 88e72158..229e4217 100644 --- a/src/common/loaders/AbstractLoader.js +++ b/src/common/loaders/AbstractLoader.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -20,7 +20,7 @@ export default class AbstractLoader { } // eslint-disable-next-line no-unused-vars - fetch(path, environment) { + fetch(path, environment, httpAuthToken) { throw new ExecutionError('Not Implemented.'); } } diff --git a/src/common/loaders/AbstractLoader.test.js b/src/common/loaders/AbstractLoader.test.js index 208de0f9..cc3b186f 100644 --- a/src/common/loaders/AbstractLoader.test.js +++ b/src/common/loaders/AbstractLoader.test.js @@ -1,13 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ -import AbstractLoader from './AbstractLoader.js'; import {ExecutionError} from '../Errors.js'; - +// disable eslint for next line, some weird issue with jest loading order +// eslint-disable-next-line sort-imports +import AbstractLoader from './AbstractLoader.js'; class ImplementedLoader extends AbstractLoader { constructor() { diff --git a/src/common/loaders/HTTPLoader.js b/src/common/loaders/HTTPLoader.js index e0a83487..a123d171 100644 --- a/src/common/loaders/HTTPLoader.js +++ b/src/common/loaders/HTTPLoader.js @@ -1,16 +1,16 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import {PROTOCOLS, getProtocol} from './Protocols.js'; import AbstractLoader from './AbstractLoader.js'; +import {ExecutionError} from '../Errors.js'; +import {Logger} from '../index.js'; import Path from 'path'; -import {ValidateError} from '../Errors.js'; import axios from 'axios'; import https from 'https'; -import log from 'winston'; export default class HTTPLoader extends AbstractLoader { constructor() { @@ -30,25 +30,28 @@ export default class HTTPLoader extends AbstractLoader { } } - async fetch(path, environment) { + async fetch(path, environment, httpAuthToken) { const headers = { 'User-Agent': 'CIX', }; - let token = environment.getEnvironmentVariable('__IMPORT_YAML_DEFINITION__.http_authorization_token') || - environment.getEnvironmentVariable('HTTP_AUTHORIZATION_TOKEN'); + const envHttpAuthToken = environment.getEnvironmentVariable('HTTP_AUTHORIZATION_TOKEN'); - if (token && token.value) { - token = environment.replace$$Values(token.value); - } else { - token = undefined; + // warn if user supplies both YAML based and environment based tokens + if (httpAuthToken && envHttpAuthToken && envHttpAuthToken.value) { + Logger.warn('Both the secret HTTP_AUTHORIZATION_TOKEN and the YAML attribute http-authorization-token were supplied.'); + Logger.warn('Using the YAML attribute http-authorization-token as the auth token.'); } - if (token) { - headers['Authorization'] = `token ${token}`; - log.debug(`Fetching ${path} with authorization tokens.`); + // httpAuthToken (from YAML) has precedence over envHttpAuthToken (from environment) + if (httpAuthToken) { + headers['Authorization'] = `token ${environment.replace$$Values(httpAuthToken)}`; + Logger.debug(`Fetching ${path} with authorization token set in the YAML.`); + } else if (envHttpAuthToken && envHttpAuthToken.value) { + headers['Authorization'] = `token ${environment.replace$$Values(envHttpAuthToken.value)}`; + Logger.debug(`Fetching ${path} with authorization token set in the Environment.`); } else { - log.debug(`Fetching ${path} without authorization tokens.`); + Logger.warn(`Fetching ${path} without authorization tokens.`); } try { @@ -62,8 +65,8 @@ export default class HTTPLoader extends AbstractLoader { }); return resp.data; } catch (error) { - log.error(`${error}`); - throw new ValidateError(`Unable to import HTTPS path ${path}`); + Logger.error(`${error}`); + throw new ExecutionError(`Unable to import HTTPS path ${path}`); } } } diff --git a/src/common/loaders/HTTPLoader.test.js b/src/common/loaders/HTTPLoader.test.js index cecb4043..a3d9c6bd 100644 --- a/src/common/loaders/HTTPLoader.test.js +++ b/src/common/loaders/HTTPLoader.test.js @@ -1,12 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ +import {ExecutionError} from '../index.js'; import HTTPLoader from './HTTPLoader.js'; -import {ValidateError} from '../Errors.js'; import axios from 'axios'; jest.mock('axios'); @@ -52,6 +52,18 @@ describe('HTTPLoader.fetch', () => { environmentMock.getEnvironmentVariable = jest.fn(); environmentMock.getEnvironmentVariable.mockReturnValue(undefined); // no token - await expect(loader.fetch('http://test.com/path', environmentMock)).rejects.toThrow(ValidateError); + await expect(loader.fetch('http://test.com/path', environmentMock)).rejects.toThrow(ExecutionError); + }); + + test('expect fetch to use YAML authToken over Environment authToken', async () => { + const expectedResp = {data: 'test_data'}; + axios.mockImplementation(() => Promise.resolve(expectedResp)); + const environmentMock = {}; + environmentMock.getEnvironmentVariable = jest.fn(); + environmentMock.replace$$Values = (input) => `${input}`; + environmentMock.getEnvironmentVariable.mockReturnValue({value: '5678'}); + + await loader.fetch('http://test.com/path', environmentMock, '1234'); + expect(axios.mock.calls[0][0].headers.Authorization).toBe('token 1234'); }); }); diff --git a/src/common/loaders/LocalFileLoader.js b/src/common/loaders/LocalFileLoader.js index dd3dae40..939280c4 100644 --- a/src/common/loaders/LocalFileLoader.js +++ b/src/common/loaders/LocalFileLoader.js @@ -1,40 +1,39 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import {PROTOCOLS, getProtocol} from './Protocols.js'; import AbstractLoader from './AbstractLoader.js'; +import HTTPLoader from './HTTPLoader.js'; +import {Logger} from '../index.js'; import NodeProvider from '../NodeProvider.js'; import Path from 'path'; import {ValidateError} from '../Errors.js'; -import log from 'winston'; - export default class LocalFileLoader extends AbstractLoader { constructor() { super([PROTOCOLS.DEFAULT]); + this.httpLoader = new HTTPLoader(); } relativePath(path, environment, definition) { const targetProtocol = getProtocol(definition.src); - if (targetProtocol === PROTOCOLS.DEFAULT) { return Path.normalize(`${Path.dirname(path)}/${definition.src}`); } else { - return LOADERS[targetProtocol].relativePath(path, environment, definition); + return this.httpLoader.relativePath(path, environment, definition); } } // eslint-disable-next-line no-unused-vars - fetch(path, environment) { + fetch(path, environment, httpAuthToken) { try { return NodeProvider.getFs().readFileSync(path); } catch (error) { - log.debug(`${error}`); + Logger.debug(`${error}`); throw new ValidateError(`Failed to load file: ${path}`); } } } - diff --git a/src/common/loaders/LocalFileLoader.test.js b/src/common/loaders/LocalFileLoader.test.js index 0f5051ef..115afe24 100644 --- a/src/common/loaders/LocalFileLoader.test.js +++ b/src/common/loaders/LocalFileLoader.test.js @@ -1,13 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global beforeEach, describe, expect */ -import LocalFileLoader from './LocalFileLoader.js'; import Path from 'path'; -import {ValidateError} from '../Errors.js'; +import {ValidateError} from '../index.js'; +// disable eslint for next line, some weird issue with jest loading order +// eslint-disable-next-line sort-imports +import LocalFileLoader from './LocalFileLoader.js'; describe('LocalFileLoader.relativePath', () => { let loader; diff --git a/src/common/loaders/Protocols.js b/src/common/loaders/Protocols.js index 7b1751c0..254af11e 100644 --- a/src/common/loaders/Protocols.js +++ b/src/common/loaders/Protocols.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -14,9 +14,7 @@ export const PROTOCOLS = { /** * @function module:common.Loader#getProtocol - * * @param {string} path - url being accessed - * * @returns {string} Representation of the protocol being used */ export function getProtocol(path) { diff --git a/src/common/loaders/Protocols.test.js b/src/common/loaders/Protocols.test.js index 7380aeeb..4b34738e 100644 --- a/src/common/loaders/Protocols.test.js +++ b/src/common/loaders/Protocols.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/common/loaders/index.js b/src/common/loaders/index.js index c21c7ee0..6431e9e8 100644 --- a/src/common/loaders/index.js +++ b/src/common/loaders/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -11,11 +11,9 @@ import {getProtocol} from './Protocols.js'; /** * @function module:common.Loader#relativePath - * * @param {string} path - path being accessed * @param {object} environment - representation of the environment passed * @param {object} definition - representation of the pipeline - * * @returns {string} relative path */ export function relativePath(path, environment, definition) { @@ -25,14 +23,13 @@ export function relativePath(path, environment, definition) { // eslint-disable-next-line no-unused-vars /** * @function module:common.Loader#fetch - * * @param {string} path - path being accessed * @param {object} environment - representation of the environment passed - * + * @param {string} httpAuthToken - (optional) provides authentication for fetch * @returns {object} yaml */ -export function fetch(path, environment) { - return LOADERS[getProtocol(path)].fetch(path, environment); +export function fetch(path, environment, httpAuthToken) { + return LOADERS[getProtocol(path)].fetch(path, environment, httpAuthToken); } const LOADERS = {}; diff --git a/src/common/lodash.js b/src/common/lodash.js index d76c8620..39199334 100644 --- a/src/common/lodash.js +++ b/src/common/lodash.js @@ -1,13 +1,13 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import {CIXError, ValidateError} from './Errors.js'; import {fetch, relativePath} from './loaders/index.js'; +import {Logger} from './index.js'; import _ from 'lodash'; -import log from 'winston'; import yaml from 'js-yaml'; /** @@ -16,7 +16,6 @@ import yaml from 'js-yaml'; _.map({ /** * @function module:common.lodash#check - * * @param {boolean} value value to check * @param {string} message message */ @@ -32,9 +31,7 @@ _.map({ /** * @function module:common.lodash#emptyToNull - * * @param {string} value value to check - * * @returns {string} value or null if value is empty */ emptyToNull(value) { @@ -45,9 +42,7 @@ _.map({ /** * @function module:common.lodash#getOnlyElement - * * @param {Array} collection collection to get the first element of - * * @returns {object} first element of the array * @throws {Error} If collection is not an array and doesn't only have a single element */ @@ -69,7 +64,6 @@ _.map({ /** * @function module:common.lodash#getUniqueDuplicates - * * @param {Array} collection collection to get all unique duplicates * @returns {Array} array of unique objects in the collection */ @@ -79,11 +73,9 @@ _.map({ /** * @function module:common.lodash#relativePath - * * @param {string} path - path being accessed * @param {object} environment - representation of the environment passed * @param {object} definition - representation of the pipeline - * * @returns {string} relative path */ relativePath(path, environment, definition) { @@ -93,21 +85,19 @@ _.map({ // eslint-disable-next-line no-unused-vars /** * @function module:common.lodash#fetch - * * @param {string} path - path being accessed * @param {object} environment - representation of the environment passed - * + * @param {string} httpAuthToken - (optional) provides authentication for fetch * @returns {object} yaml */ - fetch(path, environment) { - return fetch(path, environment); + fetch(path, environment, httpAuthToken) { + return fetch(path, environment, httpAuthToken); }, /** * @function module:common.Loader#loadYamlOrJson * @param {string} input - relative or absolute path * @param {string} optionalEncoding - optional encoding by which to read the yaml file, defaults to 'utf-8' - * * @returns {object|undefined} the json representation of the yaml */ loadYamlOrJson(input, optionalEncoding) { @@ -115,14 +105,13 @@ _.map({ try { return yaml.load(input, encoding); } catch (error) { - log.warn(`${error}`); + Logger.warn(`${error}`); throw new ValidateError('Failed to parse YAML/JSON.'); } }, /** * Returns a string of random characters consisting if 0-9, A-Z, a-z. - * * @function module:common.lodash#randomString * @param {number} len - desired length of the string * @returns {string} the random string diff --git a/src/common/lodash.test.js b/src/common/lodash.test.js index bb8115ec..110cab24 100644 --- a/src/common/lodash.test.js +++ b/src/common/lodash.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/common/logging/ContainerStreamTransform.js b/src/common/logging/ContainerStreamTransform.js new file mode 100644 index 00000000..921aa58b --- /dev/null +++ b/src/common/logging/ContainerStreamTransform.js @@ -0,0 +1,51 @@ +import stream from 'stream'; + +/** + * @class ContainerStreamTransform + * @description Converts container output to a winston object stream. + */ +export default class ContainerStreamTransform extends stream.Transform { + constructor(environment, containerNames, isErrorOutput) { + super({ + readableObjectMode: true, + writableObjectMode: true, + }); + this.environment = environment; + this.containerNames = containerNames; + this.isErrorOutput = isErrorOutput; + this.previousIncompleteChunk = ''; + } + + _transform(chunk, _encoding, callback) { + let message; + if (this.environment) { + message = this.previousIncompleteChunk + this.environment.redactSecrets(chunk.toString()); + } else { + message = this.previousIncompleteChunk + chunk.toString(); + } + + if (!message.includes('\n')) { + this.previousIncompleteChunk = message; + } else { + this.previousIncompleteChunk = ''; + this.push({ + level: 'info', + message: Buffer.from(message.trimEnd()), + containerNames: this.containerNames, + isErrorOutput: this.isErrorOutput, + }); + } + callback(); + } + + _final() { + if (this.previousIncompleteChunk != '') { + this.push({ + level: 'info', + message: Buffer.from(this.previousIncompleteChunk.trimEnd()), + containerNames: this.containerNames, + isErrorOutput: this.isErrorOutput, + }); + } + } +} diff --git a/src/common/logging/Logger.js b/src/common/logging/Logger.js new file mode 100644 index 00000000..f2fce10c --- /dev/null +++ b/src/common/logging/Logger.js @@ -0,0 +1,154 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +import LoggerTransportFactory from './LoggerTransportFactory.js'; +import PipelineLogger from './PipelineLogger.js'; +import path from 'path'; +import winston from 'winston'; + + +class Logger { + constructor() { + // Create a global logger + this.globalLogger = winston.createLogger(); + + // Add the console transport + this.globalLogger.add(LoggerTransportFactory.createApplicationConsoleTransport()); + // crete a dictionary of pipeline loggers + this.pipelineLoggers = {}; + } + + /** + * @function module:common/logging/Logger#enableFileLogging + * @description Enables file logging globally. + * @param {string} loggingType - type of logging to enable + * @param {object} loggingPath - path to winston cix-execution to + * @param {string} logname - name of the logfile (default is cix-execution.log) + */ + enableFileLogging(loggingType, loggingPath, logname) { + this.loggingType = loggingType; + this.loggingPath = loggingPath; + this.logname = logname; + this.info(`Activating file loging to: "${this.loggingPath}"`); + this.globalLogger.add(LoggerTransportFactory.createApplicationFileTransport(path.join(this.loggingPath, this.logname))); + } + + /** + * @function module:common/logging/Logger#enableFileLogging + * @description Getter for the global logger. + * @returns {winston.Logger} - the global logger + */ + getLogger() { + return this.globalLogger; + } + + /** + * @function module:common/logging/Logger#silly + * @description silly logger + * @param {string} message - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + silly(message, pipelineId) { + this.globalLogger.silly(message); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().silly(message); + } + } + + /** + * @function module:common/logging/Logger#debug + * @description debug logger + * @param {string} message - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + debug(message, pipelineId) { + this.globalLogger.debug(message); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().debug(message); + } + } + + /** + * @function module:common/logging/Logger#info + * @description info logger + * @param {string} message - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + info(message, pipelineId) { + this.globalLogger.info(message); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().info(message); + } + } + + /** + * @function module:common/logging/Logger#warn + * @description warn logger + * @param {string} message - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + warn(message, pipelineId) { + this.globalLogger.warn(message); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().warn(message); + } + } + + /** + * @function module:common/logging/Logger#error + * @description error logger + * @param {string} message - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + error(message, pipelineId) { + this.globalLogger.error(message); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().error(message); + } + } + + /** + * @function module:common/logging/Logger#log + * @description logs a winston info object + * @param {object} obj - the message to log + * @param {string} pipelineId - (optional) the pipeline logger to log to + */ + log(obj, pipelineId) { + this.globalLogger.log(obj); + if (pipelineId) { + this.getPipelineLogger(pipelineId)?.getRemoteLogger().log(obj); + } + } + + /** + * @function module:common/logging/Logger#createPipelineLogger + * @description generates a new PipelineLogger for a pipeline + * @param {object} pipeline (optional) - the pipeline to generate a logger for + */ + createPipelineLogger(pipeline) { + // pipeline, loggingType and loggingPath may be undefiend, but that's ok + // "default" is used within client execution when we don't have the ID of the pipeline + this.pipelineLoggers[pipeline?.getId() ?? 'default'] = new PipelineLogger(pipeline, this.loggingType, this.loggingPath, this.logname); + } + + /** + * @function module:common/logging/Logger#getPipelineLogger + * @description gets a PipelineLogger for a pipeline + * @param {object} pipelineId (optional) - the pipeline to get a logger for + * @returns {PipelineLogger} - the pipeline logger + */ + getPipelineLogger(pipelineId = 'default') { + // "default" is used within client execution when we don't have the ID of the pipeline + if (pipelineId === 'default' && this.pipelineLoggers[pipelineId] === undefined ) { + this.createPipelineLogger(); + } + return this.pipelineLoggers[pipelineId]; + } +} + +// Singleton instance +const instance = new Logger(); +export {instance as default}; diff --git a/src/common/logging/Logger.test.js b/src/common/logging/Logger.test.js new file mode 100644 index 00000000..4f433f38 --- /dev/null +++ b/src/common/logging/Logger.test.js @@ -0,0 +1,54 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +/* global describe, test, expect */ +import {Logger} from '../index.js'; + +describe('Logger tests', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + test('Logs should pass through to global logger.', () => { + const warnMock = jest.spyOn(Logger.globalLogger, 'warn').mockImplementation(); + Logger.warn('This is a test'); + expect(warnMock.mock.calls.length).toBe(1); + }); + + test('Logs should not pass through to pipeline logger if pipeline ID is not set.', () => { + const warnMock = jest.spyOn(Logger.globalLogger, 'warn').mockImplementation(); + const getPipeline = jest.spyOn(Logger, 'getPipelineLogger').mockImplementation(); + Logger.warn('This is a test'); + expect(warnMock.mock.calls.length).toBe(1); + expect(getPipeline.mock.calls.length).toBe(0); + }); + + test('Logs should pass through to pipeline logger if pipeline ID is set.', () => { + const warnMock = jest.spyOn(Logger.globalLogger, 'warn').mockImplementation(); + const getPipeline = jest.spyOn(Logger, 'getPipelineLogger').mockImplementation(); + Logger.warn('This is a test', '123'); + expect(warnMock.mock.calls.length).toBe(1); + expect(getPipeline.mock.calls.length).toBe(1); + }); + + test('createPipelineLogger will default to \'default\' if ID is not available.', () => { + Logger.createPipelineLogger(); + expect(Object.keys(Logger.pipelineLoggers)).toContain('default'); + delete Logger.pipelineLoggers['default']; + }); + + test('createPipelineLogger will not use default to \'default\' if ID is available.', () => { + Logger.createPipelineLogger({getId: () => '123'}); + expect(Object.keys(Logger.pipelineLoggers)).not.toContain('default'); + delete Logger.pipelineLoggers['123']; + }); + + test('getPipelineLogger will return the pipeline.', () => { + Logger.createPipelineLogger({getId: () => '123'}); + expect(Logger.getPipelineLogger('123')).not.toBeNull(); + delete Logger.pipelineLoggers['123']; + }); +}); diff --git a/src/common/logging/LoggerTransportFactory.js b/src/common/logging/LoggerTransportFactory.js new file mode 100644 index 00000000..38b00d10 --- /dev/null +++ b/src/common/logging/LoggerTransportFactory.js @@ -0,0 +1,160 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +import {NodeProvider} from '../index.js'; +import chalk from 'chalk'; +import winston from 'winston'; + +class LoggerTransportFactory { + constructor() { + // Check to see if the TTY can support colors + this.supportsColor = !NodeProvider.getProcess().argv.includes('--no-color') && + (chalk.supportsColor.hasBasic || NodeProvider.getProcess().argv.includes('--color')); + // Get Log Level from the environment variable, default to info + this.logLevel = (NodeProvider.getProcess().env.LOG_LEVEL || 'info').toLowerCase(); + // option to silence logs for utests + this.silent = NodeProvider.getProcess().argv.includes('--silent'); + } + + /** + * @function module:common/logging/LoggerTransportFactory#createContainerConsoleTransport + * @description Creates a file transport for the console logs + * @returns {winston.transports} - the console transport + */ + createContainerConsoleTransport() { + return new winston.transports.Console({ + level: 'info', + silent: this.silent, + format: winston.format.combine( + winston.format.label({label: 'color'}), + winston.format.printf(this.composeStyleFormatter), + ), + }); + } + + /** + * @function module:common/logging/LoggerTransportFactory#createContainerFilesTransport + * @param {string} filename - the filename to write the logs to + * @param {boolean} composeStyle - whether to use compose style formatting + * @description Creates a file transport for the console logs + * @returns {winston.transports} - the console transport + */ + createContainerFileTransport(filename, composeStyle = false) { + let format = winston.format.printf((item) => `${item.message}`); + if (composeStyle) { + format = winston.format.combine( + winston.format.label({label: 'no-color'}), + winston.format.printf(this.composeStyleFormatter), + ); + } + return new winston.transports.File({ + level: 'info', + silent: this.silent, + filename: filename, + format: format, + }); + } + + /** + * @function module:common/logging/LoggerTransportFactory#createApplicationConsoleTransport + * @description Creates a console transport for the application logs. + * @returns {winston.transports} - the console transport + */ + createApplicationConsoleTransport() { + return new winston.transports.Console({ + level: this.logLevel, + silent: this.silent, + format: winston.format.combine( + winston.format.timestamp({format: 'YYYY-MM-DDTHH:mm:ssZ'}), + winston.format(this.uppercaseLogLevelFormatter)(), + this.supportsColor ? winston.format.colorize() : winston.format.printf(() => { }), + winston.format.align(), + winston.format.printf((info) => `${info.timestamp} ${info.level} ${info.message}`), + ), + }); + } + + /** + * @function module:common/logging/LoggerTransportFactory#createApplicationFileTransport + * @description Creates a file transport for the application logs. + * @param {string} filename - the filename to write the logs to + * @returns {winston.transports} - the file transport + */ + createApplicationFileTransport(filename) { + return new winston.transports.File({ + level: this.logLevel, + silent: this.silent, + filename: filename, + format: winston.format.combine( + winston.format.timestamp({format: 'YYYY-MM-DDTHH:mm:ssZ'}), + winston.format(this.uppercaseLogLevelFormatter)(), + winston.format.align(), + winston.format.printf((info) => `${info.timestamp} ${info.level} ${info.message}`), + ), + }); + } + + /** + * @function module:common/logging/LoggerTransportFactory#uppercaseLogLevelFormatter + * @description Uppercases the log level in the log message. + * @param {object} info - the winston info object + * @returns {string} - log string + */ + uppercaseLogLevelFormatter(info) { + info.level = info.level.toUpperCase(); + return info; + } + + /** + * @function module:common/logging/LoggerTransportFactory#composeStyleFormatter + * @description Creates a winston formatter that colors the container output. + * @param {object} info - the winston info object + * @returns {string} - log string + */ + composeStyleFormatter(info) { + const COLORS = [chalk.blue, chalk.green, chalk.magenta, chalk.yellow, chalk.cyan]; + const ERROR_COLOR = chalk.red; + const MAX_NAME_LENGTH = 20; + + const containerId = info.containerNames.containerId; + let formattedName = info.containerNames.shortName; + if (formattedName.length > MAX_NAME_LENGTH) { + formattedName = formattedName.substring(0, MAX_NAME_LENGTH - 1) + '~'; + } + formattedName = formattedName.padEnd(MAX_NAME_LENGTH, ' ') + ' |'; + + if (this.containerColor === undefined) { + this.containerCount = 0; + this.containerColor = []; + } + + if (!this.containerColor[containerId]) { + this.containerColor[containerId] = COLORS[this.containerCount++ % COLORS.length]; + } + + const colorFn = this.containerColor[containerId]; + const output = info.message.toString().split('\n'); + + // sometimes we get more than one line in a stream + for (let i = 0; i < output.length; i++) { + if (info.label === 'color') { + if (info.isErrorOutput) { + output[i] = `${colorFn(formattedName)} ${ERROR_COLOR(output[i])}`; + } else { + output[i] = `${colorFn(formattedName)} ${output[i]}`; + } + } else { + output[i] = `${formattedName} ${output[i]}`; + } + } + + return output.join('\n'); + } +} + +// Singleton instance +const instance = new LoggerTransportFactory(); +export {instance as default}; diff --git a/src/common/logging/LoggerTransportFactory.test.js b/src/common/logging/LoggerTransportFactory.test.js new file mode 100644 index 00000000..5e1deee5 --- /dev/null +++ b/src/common/logging/LoggerTransportFactory.test.js @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +/* global describe, test, expect, beforeEach */ +import LoggerTransportFactory from './LoggerTransportFactory.js'; +import stripAnsi from 'strip-ansi'; + +jest.mock('./Logger.js'); + +describe('LoggerTransportFactory composeStyleFormatter tests', () => { + let pipelineLogger; + + beforeEach(() => { + jest.restoreAllMocks(); + pipelineLogger = LoggerTransportFactory; + }); + + test('Console output should be after the pipe.', () => { + const message = {containerNames: {shortName: 'test'}, message: 'should be tabbed'}; + const output = pipelineLogger.composeStyleFormatter(message); + expect(stripAnsi(output)).toEqual('test | should be tabbed'); + }); + + test('Long container names should be truncated.', () => { + const message = {containerNames: {shortName: 'TheQuickBrownFoxJumpsOverTheLazyDog'}, message: 'should be truncated'}; + const output = pipelineLogger.composeStyleFormatter(message); + expect(stripAnsi(output)).toEqual('TheQuickBrownFoxJum~ | should be truncated'); + }); + + test('Multiline messages should each have the container name prepended.', () => { + const message = {containerNames: {shortName: 'multiline'}, message: 'first\nsecond'}; + const output = pipelineLogger.composeStyleFormatter(message); + expect(output.match(/multiline/g).length).toStrictEqual(2); + }); + + test('Should use different colors for different containers.', () => { + pipelineLogger.composeStyleFormatter({containerNames: {containerId: 'abc', shortName: 'container1'}, message: 'test'}); + pipelineLogger.composeStyleFormatter({containerNames: {containerId: '123', shortName: 'container2'}, message: 'test'}); + expect(pipelineLogger.containerColor['abc']).not.toStrictEqual(pipelineLogger.containerColor['123']); + }); + + test('Should use different colors for containers with same name.', () => { + pipelineLogger.composeStyleFormatter({containerNames: {containerId: 'abc', shortName: 'container1'}, message: 'test'}); + pipelineLogger.composeStyleFormatter({containerNames: {containerId: '123', shortName: 'container1'}, message: 'test'}); + expect(pipelineLogger.containerColor['abc']).not.toStrictEqual(pipelineLogger.containerColor['123']); + }); +}); diff --git a/src/common/logging/PipelineLogger.js b/src/common/logging/PipelineLogger.js new file mode 100644 index 00000000..6c153b07 --- /dev/null +++ b/src/common/logging/PipelineLogger.js @@ -0,0 +1,179 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ + +import ContainerStreamTransform from './ContainerStreamTransform.js'; +import LoggerTransportFactory from './LoggerTransportFactory.js'; +import _ from 'lodash'; +import path from 'path'; +import stream from 'stream'; +import winston from 'winston'; + + +export default class PipelineLogger { + constructor(pipeline, loggingType, loggingPath, logname) { + this.pipeline = pipeline; + this.loggingType = loggingType; + this.loggingPath = loggingPath; + this.logname = logname; + + // Creates a console logger for container output + this.containerConsoleLogger = winston.createLogger(); + this.containerConsoleLogger.add(LoggerTransportFactory.createContainerConsoleTransport()); + + if (this.loggingType === 'file') { + // Creates a file logger for single file logging + this.containerFileLogger = winston.createLogger(); + this.containerFileLogger.add(LoggerTransportFactory.createContainerFileTransport(path.join(this.loggingPath, this.logname), true)); + } else if (this.loggingType === 'files') { + // A unique 'files' logger is generated for each container on demand, see createContainerFilesLogger + this.containerFilesLogger = {}; + this.filenameCount = 0; + } + + // The client (which does not need a remote stream) will not provide a pipeline object + if (!_.isNil(pipeline)) { + // Create a remote stream for remote clinets to hook into + this.remoteStream = this.createRemoteStream(); + // Create a log interface for the remote stream + this.remoteLogger = this.createRemoteLogger(); + // Start a heartbeat to keep the remote stream alive + this.startHeartbeat(); + } + } + + /** + * @function module:common/logging/PipelineLogger#createContainerFilesLogger + * @description Generates a unique file logger for each container. + * @param {object} containerNames - container names object + * @returns {object} - log stream + */ + createContainerFilesLogger(containerNames) { + const containerId = containerNames.containerId; + const containerName = containerNames.qualifiedName; + + if (!this.containerFilesLogger[containerId]) { + const filename = path.join(this.loggingPath, `${(++this.filenameCount).toString().padStart(2, '0')}-${containerName}.log`); + this.containerFilesLogger[containerId] = winston.createLogger(); + this.containerFilesLogger[containerId].add(LoggerTransportFactory.createContainerFileTransport(filename)); + } + return this.containerFilesLogger[containerId]; + } + + /** + * @function module:common/logging/PipelineLogger#createRemoteStream + * @description Generates a remote stream for the pipeline. + * @returns {object} - log stream + */ + createRemoteStream() { + const backPressureMessage = JSON.stringify({'level': 'warn', 'message': 'Not able to keep up with server log streaming, having to drop packets....'}); + const backPressureMessageByteSize = Buffer.byteLength(backPressureMessage); + + return new stream.Transform({ + writableObjectMode: true, + readableHighWaterMark: 26214400, // 25MB + transform(chunk, _encoding, callback) { + chunk.message = chunk.message.toString('utf8'); + const strMessage = JSON.stringify(chunk) + '\n'; + const strMessageByteSize = Buffer.byteLength(strMessage); + // drops any message if stream is full, prevents backpressure and memory consumption + // see: https://nodejs.org/en/docs/guides/backpressuring-in-streams/ + if (this.readableLength + strMessageByteSize + backPressureMessageByteSize < this.readableHighWaterMark) { + this.push(strMessage); + this.backpressureWarning = false; + } else { + // send a warning if there is space, and we are not already sending one + if (!this.backpressureWarning && this.readableLength + backPressureMessageByteSize < this.readableHighWaterMark) { + this.push(backPressureMessage); + this.backpressureWarning = true; + } + } + callback(); + }, + }); + } + + /** + * @function module:common/logging/PipelineLogger#createRemoteLogger + * @description Generates a remote stream for the pipeline. + * @returns {object} - log stream + */ + createRemoteLogger() { + return winston.createLogger({ + level: 'silly', + transports: [ + new winston.transports.Stream({stream: this.remoteStream}), + ], + }); + } + + /** + * @function module:common/logging/PipelineLogger#createContainerOutputStream + * @description Creates a stream for dockerrode to use, pipe that stream into correct logger. + * @param {object} containerNames - container names object + * @param {boolean} isErrorOutput - is error output + * @returns {object} - log stream + */ + createContainerOutputStream(containerNames, isErrorOutput = false) { + const transformStream = new ContainerStreamTransform(this.pipeline?.getEnvironment(), containerNames, isErrorOutput); + stream.pipeline(transformStream, this.containerConsoleLogger, () => { }); + if (this.remoteStream) { + stream.pipeline(transformStream, this.remoteStream, () => { }); + } + if (this.loggingType === 'file') { + stream.pipeline(transformStream, this.containerFileLogger, () => { }); + } else if (this.loggingType === 'files') { + stream.pipeline(transformStream, this.createContainerFilesLogger(containerNames), () => { }); + } + return transformStream; + } + + /** + * @function module:common/logging/PipelineLogger#clientContainerLogger + * @description Used by a client to log container output. + * @param {object} info - winston info object + */ + clientContainerLogger(info) { + this.containerConsoleLogger.log(info); + if (this.loggingType === 'file') { + this.containerFileLogger.log(info); + } else if (this.loggingType === 'files') { + if (!this.containerFilesLogger[info.containerNames?.containerId]) { + this.containerFilesLogger[info.containerNames.containerId] = this.createContainerFilesLogger(info.containerNames); + } + this.containerFilesLogger[info.containerNames?.containerId].log(info); + } + } + + /** + * @function module:common/logging/PipelineLogger#startHeartbeat + * @description Starts a heartbeat to keep the remote stream alive. + */ + startHeartbeat() { + if (this.remoteStream != null) { + this.remoteStream.write({message: 'Heartbeat.', level: 'silly'}); + setTimeout(this.startHeartbeat.bind(this), 60000); + } + } + + /** + * @function module:common/logging/PipelineLogger#getRemoteLogger + * @description getter for the logger to log to the remote stream + * @returns {object} - winston logger + */ + getRemoteLogger() { + return this.remoteLogger; + } + + /** + * @function module:common/logging/PipelineLogger#getRemoteReadableStream + * @description getter for the readable rate-limited stream + * @returns {stream} - readable stream + */ + getRemoteReadableStream() { + return this.remoteStream; + } +} diff --git a/src/common/logging/PipelineLogger.test.js b/src/common/logging/PipelineLogger.test.js new file mode 100644 index 00000000..c6e2e54d --- /dev/null +++ b/src/common/logging/PipelineLogger.test.js @@ -0,0 +1,59 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +/* global describe, test, expect */ +import Environment from '../../engine/environment/Environment.js'; +import PipelineLogger from './PipelineLogger.js'; + +describe('PipelineLogger createContainerOutputStream tests', () => { + test('by default, their should be two pipes (containerConsoleLogger and remoteStream)', () => { + const pipelineLogger = new PipelineLogger(); + const stream = pipelineLogger.createContainerOutputStream( {containerNames: {shortName: 'container1'}}); + expect(stream._readableState.pipes.length).toBe(1); + }); + + test('file logging is stored and cached', () => { + const pipelineLogger = new PipelineLogger(); + const stream = pipelineLogger.createContainerOutputStream( {shortName: 'container1'}); + const stream2 = pipelineLogger.createContainerOutputStream( {shortName: 'container1'}); + expect(stream._readableState.pipes).toEqual(stream2._readableState.pipes); + }); + + test('each container gets its own file logger', () => { + const pipelineLogger = new PipelineLogger(); + const stream = pipelineLogger.createContainerOutputStream( {containerId: 'abc', shortName: 'container1'}); + const stream2 = pipelineLogger.createContainerOutputStream( {containerId: '123', shortName: 'container2'}); + expect(stream._readableState.pipes).not.toBe(stream2._readableState.pipes); + }); + + test.skip('container count increments', () => { + // Manually run this test because it actually creates a file... + const pipelineLogger = new PipelineLogger(undefined, 'files', 'logs'); + pipelineLogger.createContainerOutputStream( {containerId: 'abc', shortName: 'container1'}); + pipelineLogger.createContainerOutputStream( {containerId: '123', shortName: 'container2'}); + expect(pipelineLogger.filenameCount).toBe(2); + }); + + test.skip('container count increments, even when reusing name', () => { + // Manually run this test because it actually creates a file... + const pipelineLogger = new PipelineLogger(undefined); + pipelineLogger.createContainerOutputStream({containerId: 'abc', shortName: 'container1'}); + pipelineLogger.createContainerOutputStream({containerId: '123', shortName: 'container1'}); + expect(pipelineLogger.filenameCount).toBe(2); + }); + + test('ContainerStreamTransform buffers input with no newline', () => { + const emptyEnv = new Environment(); + const pipelineLogger = new PipelineLogger({getEnvironment: () => emptyEnv}); + + const dockerStream = pipelineLogger.createContainerOutputStream({shortName: 'container1'}); + const noNewline = 'There is no newline...'; + dockerStream.write(noNewline); + expect(dockerStream.previousIncompleteChunk).toEqual(noNewline); + dockerStream.write('\n'); + expect(dockerStream.previousIncompleteChunk).toEqual(''); + }); +}); diff --git a/src/docker/DockerContainer.js b/src/docker/DockerContainer.js index 28ae7088..f83ae038 100644 --- a/src/docker/DockerContainer.js +++ b/src/docker/DockerContainer.js @@ -1,65 +1,17 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {DockerError, _} from '../common/index.js'; +import {DockerError, Logger, PipelineError, _} from '../common/index.js'; import DockerUtil from 'dockerode/lib/util.js'; -import log from 'winston'; import stream from 'stream'; import tar from 'tar-stream'; import util from 'util'; - -/** - * Creates the shell script used to run multiple commands in the container. - * - * @param {Array} commands - list of commands the script shall run - * @param {string} shell - path to the interpreter with which to run the script (default: /bin/sh) - * @returns {object} the tar-stream 'pack' data for the generated file - */ -function createScript(commands, shell = '/bin/sh') { - let cmd; let escapedCmd; - const scriptText = []; - - scriptText.push('#!' + shell + '\n'); - - if (shell.split('/').reverse()[0] === 'sh' || shell.split('/').reverse()[0] === 'bash') { - scriptText.push('PATH=/cix/bin:$PATH'); - scriptText.push('cix_echo() {'); - scriptText.push(' cix_prev_status=$?'); - scriptText.push(' if [ -x "$(command -v date 2>/dev/null)" ]; then'); - scriptText.push(' echo "+ [$(TZ=GMT+8 date +"%Y-%m-%dT%H:%M:%S%z")] $*"'); - scriptText.push(' else'); - scriptText.push(' echo "+ $*"'); - scriptText.push(' fi'); - scriptText.push(' return $cix_prev_status'); - scriptText.push('}'); - scriptText.push('set -e\n'); - - for (cmd of commands) { - escapedCmd = cmd.replace(/'/g, '\'\\\'\''); - escapedCmd = '\'' + escapedCmd + '\''; - scriptText.push(`cix_echo ${escapedCmd}`); - scriptText.push(cmd + '\n'); - } - } else { - for (cmd of commands) { - scriptText.push(cmd + '\n'); - } - } - - return { - name: 'usr/bin/cixrc', - data: scriptText.join('\n'), - mode: 0o755, - }; -} - /** * Packs up files into a buffer representing a tar archive, for copying to a container using the putArchive API. - * * @param {Array} files - array of file objects * @returns {Promise} a promise which resolves to a Buffer containing the tar data */ @@ -81,14 +33,14 @@ async function prepareFilesForPut(files) { export default class DockerContainer { /** * Initializes a DockerContainer object. - * * @param {object} dockerApi - Dockerode API object * @param {string} uniquePrefix - a unique prefix to use for container name * @param {object} [environment] - Environment object * @param {string} [networkName] - network name to attach the container to * @param {Array} [additionalVolumes] - list of volume objects (name, path) to attach to the container + * @param {string} defaultPullPolicy - override the default pull policy (step definition will take precedence) */ - constructor(dockerApi, uniquePrefix, environment, networkName, additionalVolumes) { + constructor(dockerApi, uniquePrefix, environment, networkName, additionalVolumes, defaultPullPolicy = 'Default') { this.config = {}; this.dockerApi = dockerApi; @@ -97,16 +49,76 @@ export default class DockerContainer { this.environment = environment ? environment : null; this.networkName = networkName ? networkName : null; this.volumes = additionalVolumes ? additionalVolumes : []; + this.defaultPullPolicy = defaultPullPolicy; this.inputString = null; this.outputStream = null; this.errorStream = null; } + /** + * Creates the shell script used to run multiple commands in the container. + * @param {string} stepName - name of the step for which the script will be created + * @param {Array} commands - list of commands the script shall run + * @param {string} commandsOutput - logging mode for the commands in the script (default: timestamp-echo) + * @param {string} shell - path to the interpreter with which to run the script (default: /bin/sh) + * @returns {object} the tar-stream 'pack' data for the generated file + */ + static createScript(stepName, commands, commandsOutput = 'timestamp-echo', shell = '/bin/sh') { + let cmd; let escapedCmd; + const scriptText = []; + + scriptText.push('#!' + shell + '\n'); + + if (shell.split('/').reverse()[0] === 'sh' || shell.split('/').reverse()[0] === 'bash') { + scriptText.push('PATH=/cix/bin:$PATH'); + scriptText.push('cix_timestamp_echo() {'); + scriptText.push(' cix_prev_status=$?'); + scriptText.push(' if [ -x "$(command -v date 2>/dev/null)" ]; then'); + scriptText.push(' echo "+ [$(TZ=GMT+8 date +"%Y-%m-%dT%H:%M:%S%z")] $*"'); + scriptText.push(' else'); + scriptText.push(' echo "+ $*"'); + scriptText.push(' fi'); + scriptText.push(' return $cix_prev_status'); + scriptText.push('}'); + scriptText.push('cix_echo() {'); + scriptText.push(' cix_prev_status=$?'); + scriptText.push(' echo "+ $*"'); + scriptText.push(' return $cix_prev_status'); + scriptText.push('}'); + scriptText.push('set -e\n'); + + Logger.silly(`Step ${stepName} commands-output: ${commandsOutput}`); + + for (cmd of commands) { + cmd = _.toString(cmd); + escapedCmd = cmd.replace(/'/g, '\'\\\'\''); + escapedCmd = '\'' + escapedCmd + '\''; + + if (commandsOutput == 'echo') { + scriptText.push(`cix_echo ${escapedCmd}`); + } else if (commandsOutput != 'minimal') { + scriptText.push(`cix_timestamp_echo ${escapedCmd}`); + } + + scriptText.push(cmd + '\n'); + } + } else { + for (cmd of commands) { + scriptText.push(cmd + '\n'); + } + } + + return { + name: 'usr/bin/cixrc', + data: scriptText.join('\n'), + mode: 0o755, + }; + } + /** * - * Produces a Docker API container specification object from a CIX Step definition object. - * + *Produces a Docker API container specification object from a CIX Step definition object. * @param {object} definition - the CIX step definition structure * @returns {object} an object representing the Docker API container structure */ @@ -133,8 +145,21 @@ export default class DockerContainer { containerSpec.WorkingDir = definition['working-dir']; } + if (!_.isNil(definition['user']) && definition['user'] != '') { + let user = `${definition['user']}`; + if (!_.isNil(this.environment)) { + user = this.environment.replace$$Values(user); + } + + if (user.startsWith('$$')) { + Logger.info(`${user} not found in environment, starting step ${definition.name} as root`); + } else { + containerSpec.User = user; + } + } + if (definition.privileged) { - containerSpec.Privileged = true; + containerSpec.HostConfig.Privileged = true; } // The container receives a fully-qualified name (network name + step name) so that it's unique on the Docker host. @@ -144,23 +169,27 @@ export default class DockerContainer { containerSpec.name = `${this.uniquePrefix}-${definition.name}`; } - const aliases = [definition.name]; + if (this.networkName !== 'host') { + const aliases = [definition.name]; - // The hostname internally (among containers on this pipeline's network) is the step name or provided hostname. - if (!_.isNil(definition.hostname)) { - containerSpec.Hostname = definition.hostname; - aliases.push(definition.hostname); - } else { - containerSpec.Hostname = definition.name; - } + // The hostname internally (among containers on this pipeline's network) is the step name or provided hostname. + if (!_.isNil(definition.hostname)) { + containerSpec.Hostname = definition.hostname; + aliases.push(definition.hostname); + } else { + containerSpec.Hostname = definition.name; + } - containerSpec.NetworkingConfig = { - EndpointsConfig: {}, - }; - if (this.networkName) { - containerSpec.NetworkingConfig.EndpointsConfig[this.networkName] = { - Aliases: aliases, + containerSpec.NetworkingConfig = { + EndpointsConfig: {}, }; + if (this.networkName) { + containerSpec.NetworkingConfig.EndpointsConfig[this.networkName] = { + Aliases: aliases, + }; + } + } else { + Logger.warn('Running on experimental \'host\' network, container hostname aliases will not be available.'); } if (!_.isNil(definition.ports)) { @@ -213,7 +242,6 @@ export default class DockerContainer { /** * Produces objects representing the Docker API container and associated configuration from a CIX step definition. - * * @param {object} definition - the CIX step (container) definition structure */ fromStep(definition) { @@ -221,9 +249,9 @@ export default class DockerContainer { if (!_.isNil(definition.commands)) { if (definition['commands-shell']) { - this.config.files = [createScript(definition.commands, definition['commands-shell'])]; + this.config.files = [DockerContainer.createScript(definition.name, definition.commands, definition['commands-output'], definition['commands-shell'])]; } else { - this.config.files = [createScript(definition.commands)]; + this.config.files = [DockerContainer.createScript(definition.name, definition.commands, definition['commands-output'])]; } this.containerSpec.Entrypoint = '/usr/bin/cixrc'; @@ -235,6 +263,9 @@ export default class DockerContainer { this.containerSpec.Env = [ `CIX_CONTAINER_NAME=${this.containerSpec.name}`, `CIX_STEP_NAME=${definition.name}`, + `CIX_WORKDIR=${definition['working-dir'] || '/cix/src'}`, + `CIX_WORKSPACE=${definition['workspace-mount-point'] || '/cix/src'}`, + 'CIX_TEMP=/cix/tmp', ]; if (!_.isNull(this.environment)) { @@ -251,17 +282,6 @@ export default class DockerContainer { } } - /* - * Populate all the special non-Docker fields that are used to convey configuration and metadata. - */ - - if (definition['pull-policy']) { - log.debug('Pull-policy for this image is ' + definition['pull-policy']); - this.config.pullPolicy = definition['pull-policy']; - } else { - this.config.pullPolicy = 'Default'; - } - // Parse the image name for registry information, expand if aliased, and populate auth credentials. const imageParts = DockerUtil.parseRepositoryTag(definition.image); @@ -278,16 +298,16 @@ export default class DockerContainer { if (imageRegistry && definition.registry[imageRegistry]) { if (imageRegistry.includes('.')) { - log.debug(`Registry ${imageRegistry} is a literal host name`); + Logger.debug(`Registry ${imageRegistry} is a literal host name`); } else if (definition.registry[imageRegistry].host && definition.registry[imageRegistry].host.name) { - log.debug(`Registry ${imageRegistry} is an alias for ${definition.registry[imageRegistry].host.name}`); + Logger.debug(`Registry ${imageRegistry} is an alias for ${definition.registry[imageRegistry].host.name}`); this.config.imageRepository = this.config.imageRepository.replace(imageRegistry, definition.registry[imageRegistry].host.name); } if (definition.registry[imageRegistry].username && definition.registry[imageRegistry].password) { if (definition.registry[imageRegistry].username.startsWith('$$')) { - log.warn('Username not set, logging in anonymously...'); + Logger.debug('Registry username variable unset, access will be unauthenticated'); } else { this.config.authConfig = { username: definition.registry[imageRegistry].username, @@ -302,18 +322,31 @@ export default class DockerContainer { } } - this.containerSpec.Image = this.config.imageRepository + ':' + this.config.imageTag; + if (this.config.imageTag.includes('sha256:')) { + this.containerSpec.Image = this.config.imageRepository + '@' + this.config.imageTag; + } else { + this.containerSpec.Image = this.config.imageRepository + ':' + this.config.imageTag; + } + + /* + * Populate all the special non-Docker fields that are used to convey configuration and metadata. + */ + if (definition['pull-policy']) { + this.config.pullPolicy = definition['pull-policy']; + Logger.debug(`Pull-policy for ${this.containerSpec.Image} is ${this.config.pullPolicy} from step definition.`); + } else { + this.config.pullPolicy = this.defaultPullPolicy; + Logger.debug(`Pull-policy for ${this.containerSpec.Image} is ${this.config.pullPolicy} from default pull-policy.`); + } this.config.shortName = definition.name; this.config.qualifiedName = this.containerSpec.name; - this.config.continueOnFail = Boolean(definition['continue-on-fail']); this.config.background = Boolean(definition.background); } /** * Return a shortened version of a Docker container ID. - * * @returns {string} the short container ID (12 hex digits) */ getId() { @@ -322,7 +355,6 @@ export default class DockerContainer { /** * Return the container's short, friendly name, which is the CIX step name. - * * @returns {string} the container's short name */ getShortName() { @@ -331,7 +363,6 @@ export default class DockerContainer { /** * Return the Docker engine's name for the container (network name '-' short name). - * * @returns {string} the container name qualified with unique network name */ getQualifiedName() { @@ -340,7 +371,6 @@ export default class DockerContainer { /** * Transfer files into a container. - * * @param {Array} files files to put in container * @returns {Promise} a promise which resolves to a Dockerode container object */ @@ -364,18 +394,26 @@ export default class DockerContainer { for (let i = 0; i < iterations; i++) { try { - await this.pullImageFromRegistry(imagePullSpec); + // dockerode may modify imagePullSpec + const imagePullSpecClone = _.cloneDeep(imagePullSpec); + + Logger.debug(`Starting attempt ${i + 1}/${iterations} to pull ${imagePullSpecClone.fromImage}:${imagePullSpecClone.tag} ` + + `(username: ${imagePullSpecClone.authconfig ? imagePullSpecClone.authconfig.username : 'not provided'})`); + await this.pullImageFromRegistry(imagePullSpecClone); break; } catch (err) { if (err.statusCode && err.statusCode === 500) { - // Registry server errors (timeout, bad auth, etc) will be retried. + Logger.warn(`Error while attempting docker pull for step ${this.getShortName()}:`); + Logger.warn(` ${err}`); + // registry server errors (timeout, bad auth, etc) will be retried. if (i + 1 < iterations) { - log.warn(`DockerPullImage: received error communicating with registry, retrying (attempt ${i + 1}/${iterations})`); await new Promise((resolve) => setTimeout(resolve, backoff * 1000)); + continue; } - } else { - throw new DockerError(err); + } else if (err.statusCode && err.statusCode === 404) { + Logger.warn('Encountered 404 error while attempting docker pull, did you provide the correct credentials for the Docker registry?'); } + throw err; } } } @@ -383,7 +421,7 @@ export default class DockerContainer { async pullImageFromRegistry(imagePullSpec) { const imageStream = await this.dockerApi.createImage(imagePullSpec); const output = await util.promisify(this.dockerApi.modem.followProgress)(imageStream); - log.debug(`DockerPullFromRegistry '${this.getQualifiedName()}': ${output[output.length - 1].status}`); + Logger.debug(`DockerPullFromRegistry '${this.getQualifiedName()}': ${output[output.length - 1].status}`); } /** @@ -398,14 +436,13 @@ export default class DockerContainer { username: this.config.authConfig.username, password: this.config.authConfig.password, }; - - log.debug(`DockerPullImage '${this.getQualifiedName()}': using authentication (username ${imagePullSpec.authconfig.username})`); } imagePullSpec.fromImage = this.config.imageRepository; imagePullSpec.tag = this.config.imageTag; - log.debug(`DockerPullImage '${this.getQualifiedName()}': pulling image ${imagePullSpec.fromImage}:${imagePullSpec.tag}`); + Logger.debug(`Pulling image ${imagePullSpec.fromImage}:${imagePullSpec.tag} ` + + `(username: ${imagePullSpec.authconfig ? imagePullSpec.authconfig.username : 'not provided'})`); if (this.config.pullRetryConfig) { await this.pullRetryDecorator(imagePullSpec); @@ -414,13 +451,12 @@ export default class DockerContainer { await this.pullImageFromRegistry(imagePullSpec); } } catch (err) { - throw new DockerError(`DockerPullImage '${this.getQualifiedName()}': failed to pull image ${this.config.imageRepository}:${this.config.imageTag}: \n\t${err}`); + throw new DockerError(`Failed to pull image ${this.config.imageRepository}:${this.config.imageTag}: ${err.message}`); } } /** * Create container from this object. - * * @param {object} definition - the step container definition. * @param {string} [input] - string data which will be provided to the container on its stdin. */ @@ -437,19 +473,19 @@ export default class DockerContainer { this.fromStep(definition); try { - log.debug(`Creating ${this.getShortName()} (docker container ${this.getQualifiedName()})`); + Logger.debug(`Creating container for step ${this.getShortName()} (docker container ${this.getQualifiedName()})`); if (this.config.imageTag === 'latest' && - this.config.pullPolicy !== 'IfNotPresent' && - this.config.pullPolicy !== 'Never' || - this.config.pullPolicy === 'Always') { + this.config.pullPolicy !== 'IfNotPresent' && + this.config.pullPolicy !== 'Never' || + this.config.pullPolicy === 'Always') { try { await this.pullImage(); } catch (err) { if (this.config.pullPolicy === 'Always') { throw err; } - log.warn('Failed to update image tagged \'latest\', image may be out of date'); + Logger.warn('Failed to update image tagged \'latest\', image may be out of date'); } } @@ -458,17 +494,21 @@ export default class DockerContainer { if (err.statusCode && err.statusCode === 404) { // Docker Engine reports image is not found locally, attempt to pull it from a registry (unless policy is Never). if (this.config.pullPolicy === 'Never') { - throw new DockerError(`DockerCreate: could not create container '${this.getQualifiedName()}', image is not present and the pull policy is 'Never': \n\t${err}`); + throw new DockerError(`Could not create container '${this.getQualifiedName()}', image is not present and the pull policy is 'Never': ${err}`); } try { await this.pullImage(); container = await this.dockerApi.createContainer(this.containerSpec); } catch (errNested) { - throw new DockerError(`DockerCreate: could not create container '${this.getQualifiedName()}': \n\t${errNested}`); + throw new DockerError(`Could not create container '${this.getQualifiedName()}': ${errNested}`); } } else { - throw new DockerError(err); + if (err.constructor == DockerError) { + throw err; + } else { + throw new DockerError(err); + } } } @@ -481,13 +521,12 @@ export default class DockerContainer { /** * Start the container. - * * @param {object} [outputStream] - stream to which the container will print standard output. * @param {object} [errorStream] - stream to which the container will print error output. * @returns {number} the container's exit status */ async start(outputStream, errorStream) { - log.debug(`Starting ${this.getShortName()} (docker container ${this.getQualifiedName()})`); + Logger.debug(`Starting ${this.getShortName()} (docker container ${this.getQualifiedName()})`); if (outputStream) { this.outputStream = outputStream; @@ -517,7 +556,7 @@ export default class DockerContainer { stream: true, }, (err, muxedStream) => { if (err) { - throw new DockerError(`Container '${this.getQualifiedName()}' attach failed: \n\t${err}`); + throw new DockerError(`Container '${this.getQualifiedName()}' attach failed: ${err}`); } if (attachStdin) { @@ -529,41 +568,38 @@ export default class DockerContainer { }); if (this.config.background) { - log.debug(`Background container '${this.getQualifiedName()}' (${this.getId()}) started`); + Logger.debug(`Background container '${this.getQualifiedName()}' (${this.getId()}) started`); return; } let waitResult; try { - log.debug(`Waiting for container ${this.getQualifiedName()} (${this.getId()})`); + Logger.debug(`Waiting for container ${this.getQualifiedName()} (${this.getId()})`); waitResult = await this.container.wait(); } catch (e) { - log.debug(`Exception while waiting for container ${this.getQualifiedName()}: \n\t${e}`); + Logger.debug(`Exception while waiting for container ${this.getQualifiedName()}:`); + Logger.debug(` ${e}`); } try { await this.remove(); } catch (e) { - log.debug(`Exception while removing container ${this.getQualifiedName()}`); + Logger.debug(`Exception while removing container ${this.getQualifiedName()}`); } if (waitResult && waitResult.StatusCode) { - if (this.config.continueOnFail) { - log.debug(`Container '${this.getQualifiedName()}' exited with non-zero status ${waitResult.StatusCode} (ignored)`); - } else { - throw new DockerError(`Container '${this.getQualifiedName()}' failed with non-zero exit status ${waitResult.StatusCode}`); - } + throw new PipelineError(`Step ${this.getShortName()} failed with non-zero exit status ${waitResult.StatusCode}`); } else { - log.debug(`Container '${this.getQualifiedName()}' exited cleanly`); + Logger.debug(`Container '${this.getQualifiedName()}' exited cleanly`); } + this.endConsoleLog(); return waitResult.StatusCode; } /** * Stop the container. - * * @param {object} options - Dockerode stop API options, e.g. t: timeout before killing the container * @returns {object} this DockerContainer object, which allows chaining */ @@ -574,11 +610,25 @@ export default class DockerContainer { /** * Remove the container. - * * @returns {object} this DockerContainer object, which allows chaining */ async remove() { await this.container.remove(); return this; } + + /** + * Ends the logger to flush any truncated output. + */ + async endConsoleLog() { + await new Promise((resolve) => { + this.outputStream.on('finish', resolve); + this.errorStream.on('error', resolve); + + this.outputStream.end(); + this.errorStream.end(); + // don't wait longer than 1s for stream to close.. + setTimeout(resolve, 1000); + }); + } } diff --git a/src/docker/DockerContainer.test.js b/src/docker/DockerContainer.test.js index be1856e2..b2465001 100644 --- a/src/docker/DockerContainer.test.js +++ b/src/docker/DockerContainer.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -87,6 +87,9 @@ describe('Step definition transformation', () => { Env: [ 'CIX_CONTAINER_NAME=testnetwork-test', 'CIX_STEP_NAME=test', + 'CIX_WORKDIR=/cix/src', + 'CIX_WORKSPACE=/cix/src', + 'CIX_TEMP=/cix/tmp', ], HostConfig: { Binds: [], @@ -109,11 +112,10 @@ describe('Step definition transformation', () => { }); expect(dockerContainer.config).toEqual({ background: false, - continueOnFail: false, files: [ { data: '#!/bin/sh\n\nPATH=/cix/bin:$PATH\n' + - 'cix_echo() {\n' + + 'cix_timestamp_echo() {\n' + ' cix_prev_status=$?\n' + ' if [ -x \"$(command -v date 2>/dev/null)\" ]; then\n' + ' echo \"+ [$(TZ=GMT+8 date +\"%Y-%m-%dT%H:%M:%S%z\")] $*\"\n' + @@ -122,8 +124,13 @@ describe('Step definition transformation', () => { ' fi\n' + ' return $cix_prev_status\n' + '}\n' + + 'cix_echo() {\n' + + ' cix_prev_status=$?\n' + + ' echo \"+ $*\"\n' + + ' return $cix_prev_status\n' + + '}\n' + 'set -e\n\n' + - 'cix_echo \'echo hello world\'\n' + + 'cix_timestamp_echo \'echo hello world\'\n' + 'echo hello world\n', mode: 0o755, name: 'usr/bin/cixrc', @@ -151,6 +158,9 @@ describe('Step definition transformation', () => { expect(dockerContainer.containerSpec.Env).toEqual([ 'CIX_CONTAINER_NAME=testnetwork-test', 'CIX_STEP_NAME=test', + 'CIX_WORKDIR=/cix/src', + 'CIX_WORKSPACE=/cix/src', + 'CIX_TEMP=/cix/tmp', 'SYSVAR=SYSVALUE', 'VAR=VALUE', ]); @@ -217,6 +227,44 @@ describe('Step definition transformation', () => { expect(dockerContainer.config.imageRepository).toEqual('path/image'); }); + test('fromStep() will parse image names with sha256 digest', () => { + const step = { + name: 'test', + image: 'path/image@sha256:1234', + }; + dockerContainer.fromStep(step); + expect(dockerContainer.containerSpec.Image).toEqual('path/image@sha256:1234'); + }); + + + test('fromStep() will parse image names with tag', () => { + const step = { + name: 'test', + image: 'path/image:1234', + }; + dockerContainer.fromStep(step); + expect(dockerContainer.containerSpec.Image).toEqual('path/image:1234'); + }); + + test('fromStep() will parse image names with tag and sha256 digest', () => { + const step = { + name: 'test', + image: 'path/image:1234@sha256:1234', + }; + dockerContainer.fromStep(step); + expect(dockerContainer.containerSpec.Image).toEqual('path/image:1234@sha256:1234'); + }); + + + test('fromStep() will parse image names with no tag as latest', () => { + const step = { + name: 'test', + image: 'path/image', + }; + dockerContainer.fromStep(step); + expect(dockerContainer.containerSpec.Image).toEqual('path/image:latest'); + }); + test('createContainerSpec() configures system and step volumes', () => { dockerContainer.volumes = [{name: 'stepvol', path: '/stepvol'}]; const step = { @@ -248,13 +296,67 @@ describe('Step definition transformation', () => { expect(newSpec.WorkingDir).toEqual('/cix/src'); }); + test('createContainerSpec() sets user when passed as string', () => { + const step = { + 'name': 'step-name', + 'user': '123', + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual('123'); + }); + + test('createContainerSpec() sets user when passed as int', () => { + const step = { + 'name': 'step-name', + 'user': 123, + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual('123'); + }); + + test('createContainerSpec() picks user from environment variable', () => { + dockerContainer.environment.addEnvironmentVariable({name: 'ug-id', value: '123:456'}); + const step = { + 'name': 'step-name', + 'user': '$$ug-id', + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual('123:456'); + }); + + test('createContainerSpec() skips setting user if starts with $$', () => { + const step = { + 'name': 'step-name', + 'user': '$$ug-id', + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual(undefined); + }); + + test('createContainerSpec() user is undefined by default', () => { + const step = { + 'name': 'step-name', + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual(undefined); + }); + + test('createContainerSpec() user is undefined when user-id is empty', () => { + const step = { + 'name': 'step-name', + 'user': '', + }; + const newSpec = dockerContainer.createContainerSpec(step); + expect(newSpec.User).toEqual(undefined); + }); + test('createContainerSpec() sets Privileged mode', () => { const step = { name: 'step-name', privileged: true, }; const newSpec = dockerContainer.createContainerSpec(step); - expect(newSpec.Privileged).toEqual(true); + expect(newSpec.HostConfig.Privileged).toEqual(true); }); test('createContainerSpec() configures unattached container', () => { @@ -386,12 +488,6 @@ describe('Basic container operations (start, stop, remove)', () => { await expect(dockerContainer.start()).rejects.toThrow('failed with non-zero exit status'); }); - test('start() with continue-on-fail does not throw when the container fails', async () => { - mockContainer.wait = jest.fn().mockResolvedValue({StatusCode: 1}); - dockerContainer.config.continueOnFail = true; - await expect(dockerContainer.start()).resolves.toEqual(1); - }); - test('stop() calls Dockerode stop', async () => { await expect(dockerContainer.stop()).resolves.toEqual(dockerContainer); expect(mockContainer.stop).toHaveBeenCalled(); @@ -413,7 +509,7 @@ describe('Pulling Images', () => { dockerContainer = new DockerContainer(dockerApi, 'testprefix', environment, 'testnetwork'); }); - test('pullImage() calls pullImageFromRegistry', () => { + test('pullImage() calls pullImageFromRegistry', async () => { dockerContainer.config = { authConfig: { username: 'user', @@ -424,7 +520,7 @@ describe('Pulling Images', () => { }; dockerContainer.pullImageFromRegistry = jest.fn(); - expect(dockerContainer.pullImage()).resolves.toBeUndefined(); + expect(await dockerContainer.pullImage()).toBeUndefined(); expect(dockerContainer.pullImageFromRegistry).toHaveBeenCalledWith({ authconfig: { password: 'pass', @@ -435,7 +531,7 @@ describe('Pulling Images', () => { }); }); - test('pullImage() with Retry calls pullRetryDecorator', () => { + test('pullImage() with retry calls pullRetryDecorator', async () => { dockerContainer.config = { imageRepository: 'test', imageTag: 'latest', @@ -447,7 +543,7 @@ describe('Pulling Images', () => { dockerContainer.pullImageFromRegistry = jest.fn(); dockerContainer.pullRetryDecorator = jest.fn(); - expect(dockerContainer.pullImage()).resolves.toBeUndefined(); + expect(await dockerContainer.pullImage()).toBeUndefined(); expect(dockerContainer.pullImageFromRegistry).not.toHaveBeenCalled(); expect(dockerContainer.pullRetryDecorator).toHaveBeenCalledWith({ fromImage: 'test', @@ -455,35 +551,44 @@ describe('Pulling Images', () => { }); }); - test.skip('pullRetryDecorator()', () => { + test.skip('pullRetryDecorator()', async () => { + const imagePullSpec = { + 'fromImage': 'does-not-exist', + 'tag': 'latest', + }; + dockerContainer.config = { - pullRetryConfig: { - backoff: 1, - iterations: 2, + 'authConfig': { + 'username': 'test', + 'password': 'test-pass', + }, + 'pullRetryConfig': { + 'backoff': 0, + 'iterations': 3, }, }; - let count = 1; - dockerContainer.pullImageFromRegistry = jest.fn().mockImplementation(() => { - console.log(`Retry test count is ${count}`); - if (count++ === 3) { - console.log('Retry test returning'); + + let count = 0; + dockerContainer.pullImageFromRegistry = jest.fn().mockImplementation((imagePullSpec) => { + count++; + + expect(imagePullSpec.authconfig.username).toBe('test'); + expect(imagePullSpec.authconfig.password).toBe('test-pass'); + + // dockerode currently strips credentials from imagePullSpec during a request + imagePullSpec.authconfig = undefined; + + if (count === 3) { return; } else { - console.log('Retry test throwing'); const e = new Error(); e.statusCode = 500; throw e; } }); - jest.useFakeTimers(); - expect(dockerContainer.pullRetryDecorator()).resolves.toBeUndefined(); - jest.runAllTimers(); + await dockerContainer.pullRetryDecorator(imagePullSpec); expect(dockerContainer.pullImageFromRegistry).toHaveBeenCalledTimes(3); - // Is it only counting the non-Thrown calls? Maybe there's a way to count all regardless - }); - - test.skip('pullImageFromRegistry', () => { }); }); @@ -511,4 +616,120 @@ describe('Copying files to the container', () => { // expect(dockerContainer.container.putArchive).toHaveBeenCalled(); // OK we're not really testing anything here... }); + + describe('createScript', () => { + test('should support the echo commands-output', () => { + const result = DockerContainer.createScript('test-echo', ['ls', 'pwd'], 'echo'); + const expectedResult = { + data: '#!/bin/sh\n\nPATH=/cix/bin:$PATH\n' + + 'cix_timestamp_echo() {\n' + + ' cix_prev_status=$?\n' + + ' if [ -x \"$(command -v date 2>/dev/null)\" ]; then\n' + + ' echo \"+ [$(TZ=GMT+8 date +\"%Y-%m-%dT%H:%M:%S%z\")] $*\"\n' + + ' else\n' + + ' echo \"+ $*\"\n' + + ' fi\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'cix_echo() {\n' + + ' cix_prev_status=$?\n' + + ' echo \"+ $*\"\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'set -e\n\n' + + 'cix_echo \'ls\'\n' + + 'ls\n\n' + + 'cix_echo \'pwd\'\n' + + 'pwd\n', + mode: 0o755, + name: 'usr/bin/cixrc', + }; + expect(result).toEqual(expectedResult); + }); + + test('should support the minimal commands-output', () => { + const result = DockerContainer.createScript('test-minimal', ['ls', 'pwd'], 'minimal'); + const expectedResult = { + data: '#!/bin/sh\n\nPATH=/cix/bin:$PATH\n' + + 'cix_timestamp_echo() {\n' + + ' cix_prev_status=$?\n' + + ' if [ -x \"$(command -v date 2>/dev/null)\" ]; then\n' + + ' echo \"+ [$(TZ=GMT+8 date +\"%Y-%m-%dT%H:%M:%S%z\")] $*\"\n' + + ' else\n' + + ' echo \"+ $*\"\n' + + ' fi\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'cix_echo() {\n' + + ' cix_prev_status=$?\n' + + ' echo \"+ $*\"\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'set -e\n\n' + + 'ls\n\n' + + 'pwd\n', + mode: 0o755, + name: 'usr/bin/cixrc', + }; + expect(result).toEqual(expectedResult); + }); + + test('should support the timestamp commands-output', () => { + const result = DockerContainer.createScript('test-timestamp', ['ls', 'pwd'], 'timestamp'); + const expectedResult = { + data: '#!/bin/sh\n\nPATH=/cix/bin:$PATH\n' + + 'cix_timestamp_echo() {\n' + + ' cix_prev_status=$?\n' + + ' if [ -x \"$(command -v date 2>/dev/null)\" ]; then\n' + + ' echo \"+ [$(TZ=GMT+8 date +\"%Y-%m-%dT%H:%M:%S%z\")] $*\"\n' + + ' else\n' + + ' echo \"+ $*\"\n' + + ' fi\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'cix_echo() {\n' + + ' cix_prev_status=$?\n' + + ' echo \"+ $*\"\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'set -e\n\n' + + 'cix_timestamp_echo \'ls\'\n' + + 'ls\n\n' + + 'cix_timestamp_echo \'pwd\'\n' + + 'pwd\n', + mode: 0o755, + name: 'usr/bin/cixrc', + }; + expect(result).toEqual(expectedResult); + }); + + test('should default to the timestamp commands-output', () => { + const result = DockerContainer.createScript('test-default', ['ls', 'pwd']); + const expectedResult = { + data: '#!/bin/sh\n\nPATH=/cix/bin:$PATH\n' + + 'cix_timestamp_echo() {\n' + + ' cix_prev_status=$?\n' + + ' if [ -x \"$(command -v date 2>/dev/null)\" ]; then\n' + + ' echo \"+ [$(TZ=GMT+8 date +\"%Y-%m-%dT%H:%M:%S%z\")] $*\"\n' + + ' else\n' + + ' echo \"+ $*\"\n' + + ' fi\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'cix_echo() {\n' + + ' cix_prev_status=$?\n' + + ' echo \"+ $*\"\n' + + ' return $cix_prev_status\n' + + '}\n' + + 'set -e\n\n' + + 'cix_timestamp_echo \'ls\'\n' + + 'ls\n\n' + + 'cix_timestamp_echo \'pwd\'\n' + + 'pwd\n', + mode: 0o755, + name: 'usr/bin/cixrc', + }; + expect(result).toEqual(expectedResult); + }); + }); }); diff --git a/src/docker/DockerExec.js b/src/docker/DockerExec.js index 944df244..837b0cc8 100644 --- a/src/docker/DockerExec.js +++ b/src/docker/DockerExec.js @@ -1,13 +1,12 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ContainerLogger, DockerError, NodeProvider, _} from '../common/index.js'; +import {DockerError, Logger, NodeProvider, _} from '../common/index.js'; import Docker from 'dockerode'; import DockerContainer from './DockerContainer.js'; -import log from 'winston'; import stream from 'stream'; import url from 'url'; import util from 'util'; @@ -27,21 +26,27 @@ export default class DockerExec { this.dockerApi = new Docker(options); + if (!_.isNil(_.emptyToNull(NodeProvider.getProcess().env.DOCKER_NETWORK))) { + this.networkName = NodeProvider.getProcess().env.DOCKER_NETWORK; + Logger.warn(`Using provided DOCKER_NETWORK=${this.networkName} override.`); + this.networkProvided = true; + } else { + this.networkProvided = false; + } + this.containers = []; } /** - * Return the ContainerLogger to use. - * - * @returns {object} the docker logger for this execution + * Return the default Docker pull-policy for the pipeline + * @returns {string} the default pull-policy */ - getContainerLogger() { - return this.containerLogger; + getDefaultPullPolicy() { + return this.pipeline.defaultPullPolicy; } /** * Return the Dockerode object. - * * @returns {object} Dockerode object */ getDockerApi() { @@ -50,7 +55,6 @@ export default class DockerExec { /** * Return the environment object. - * * @returns {object} environment object */ getEnvironment() { @@ -59,7 +63,6 @@ export default class DockerExec { /** * Return the Docker network name created upon initialization. - * * @returns {string} the network name */ getNetworkName() { @@ -68,7 +71,6 @@ export default class DockerExec { /** * Return the unique prefix assigned to this Exec. - * * @returns {string} the unique prefix string */ getUniquePrefix() { @@ -77,7 +79,6 @@ export default class DockerExec { /** * Return the Docker volumes to mount in each container. - * * @returns {Array} list of volume objects */ getVolumes() { @@ -86,7 +87,6 @@ export default class DockerExec { /** * Adds container to the list of tracked containers. - * * @param {object} container - DockerContainer object */ trackContainer(container) { @@ -95,16 +95,22 @@ export default class DockerExec { /** * Return the list of containers that have been created by this Exec. - * * @returns {Array} list of container objects */ getTrackedContainers() { return this.containers; } + /** + * Is the docker network provided. + * @returns {boolean} true if the network is provided + */ + isNetworkProvided() { + return this.networkProvided; + } + /** * Find the most recent CIX image on the host. - * * @returns {string} latest CIX Image on the host */ async getLatestCixImage() { @@ -122,84 +128,87 @@ export default class DockerExec { * Copy CIX-furnished utility commands into the cix-bin volume. */ async installUtilities() { - log.debug('Installing utilities'); + Logger.debug('Installing utilities'); const cixImageId = (await this.getLatestCixImage()).Id; const cixBinPath = _.find(this.getVolumes(), ['path', '/cix/bin']); const cixBinMount = cixBinPath.name + ':' + cixBinPath.path; const definition = { - name: 'cix-utils', - image: cixImageId, - commands: ['cp scripts/cix-bin/* /cix/bin'], - volumes: [cixBinMount], + 'name': 'cix-utils', + 'image': cixImageId, + 'commands': ['cp scripts/cix-bin/* /cix/bin'], + 'pull-policy': 'Never', + 'volumes': [cixBinMount], }; const container = new DockerContainer(this.getDockerApi(), this.getUniquePrefix()); await container.create(definition); this.trackContainer(container); - // TODO: log output using ContainerLogger (need an Environment object, not from Pipeline) + // TODO: log output using PipelineLogger (need an Environment object, not from Pipeline) await container.start(null, NodeProvider.getProcess().stderr); } /** * Prepares Docker to run containers by creating the network, volumes, and any other dependencies. - * * @param {object} pipeline - Pipeline reference. */ async init(pipeline) { this.pipeline = pipeline; - if (!_.isNil(this.getNetworkName())) { - throw new DockerError(`Network ${this.getNetworkName()} has already been created`); - } - this.uniquePrefix = `cix-${this.pipeline.getShortId()}`; - try { - this.networkName = this.uniquePrefix; - - this.getEnvironment().addEnvironmentVariable({name: 'CIX_NETWORK_NAME', value: this.networkName, type: 'internal'}); + if (!this.isNetworkProvided()) { + try { + this.networkName = this.uniquePrefix; - await this.dockerApi.createNetwork({ - Name: this.networkName, - Driver: 'bridge', - }); - } catch (err) { - throw new DockerError(`Failed to create the network ${this.networkName} (host might be out of address ranges - run 'docker system prune' to remove cruft):\n\t${err}`); + await this.dockerApi.createNetwork({ + Name: this.networkName, + Driver: 'bridge', + }); + } catch (err) { + throw new DockerError(`Failed to create the network ${this.networkName} (host might be out of address ranges - run 'docker system prune' to remove cruft): ${err}`); + } } + this.getEnvironment().addEnvironmentVariable({name: 'CIX_NETWORK_NAME', value: this.networkName, type: 'internal'}); + this.volumes = _.cloneDeep(DEFAULT_VOLUMES); _.map(this.volumes, (volume) => volume.name = `${this.uniquePrefix}-${volume.name}`); - _.each(this.volumes, async (volume) => { + _.forEach(this.volumes, async (volume) => { try { await this.dockerApi.createVolume({ Name: volume.name, }); - log.debug(`Successfully created volume ${volume.name}`); + Logger.debug(`Successfully created volume ${volume.name}`); } catch (err) { - throw new DockerError(`Failed to create the volume ${volume.name}:\n\t${err}`); + throw new DockerError(`Failed to create the volume ${volume.name}: ${err}`); } }); - this.containerLogger = new ContainerLogger(); - await this.installUtilities(); } /** * Starts a container, and unless `step.background` is true waits for it to exit. - * * @param {object} step - the CIX step definition object * @returns {object} the original step object, updated with container exit status code if available */ async runStep(step) { - if (_.isNil(this.getNetworkName())) { - throw new DockerError('DockerExec has not been initialized'); + if (!this.isNetworkProvided() && _.isNil(this.getNetworkName())) { + throw new DockerError('DockerExec network has not been initialized'); } - const container = new DockerContainer(this.dockerApi, this.uniquePrefix, this.getEnvironment(), this.getNetworkName(), this.getVolumes()); + const container = new DockerContainer( + this.dockerApi, + this.uniquePrefix, + this.getEnvironment(), + this.getNetworkName(), + this.getVolumes(), + this.getDefaultPullPolicy(), + ); + await container.create(step); this.trackContainer(container); @@ -210,9 +219,11 @@ export default class DockerExec { qualifiedName: container.getQualifiedName(), }; + const pipelineLogger = Logger.getPipelineLogger(this.pipeline?.getId()); + const statusCode = await container.start( - this.getContainerLogger().createServerOutputStream(this.pipeline, containerNames), - this.getContainerLogger().createServerOutputStream(this.pipeline, containerNames, true), + pipelineLogger.createContainerOutputStream(containerNames), + pipelineLogger.createContainerOutputStream(containerNames, true), ); step.Finished = { @@ -224,7 +235,6 @@ export default class DockerExec { /** * Execute a preprocessor container. - * * @param {string} image - name of the preprocessor image * @param {string} data - string data to pass to the container on stdin * @returns {string} transformed data @@ -235,7 +245,6 @@ export default class DockerExec { 'name': 'cix-prepro', 'image': image, 'pull-policy': 'IfNotPresent', - 'continue-on-fail': true, }; const container = new DockerContainer(this.dockerApi, `cix-${_.randomString(8)}`); @@ -268,65 +277,108 @@ export default class DockerExec { } /** - * Cleans up the Docker containers and network used by the pipeline. + * Stop and remove one step container. + * @param {object} container - container to remove */ - async tearDown() { - if (_.isEmpty(this.getTrackedContainers())) { - log.debug('No containers to stop'); - } else { - log.debug('Ensuring all containers are stopped and removed'); - - await _.each(this.getTrackedContainers(), async (container) => { - let containerToRemove; - - log.debug(`Removing container ${container.getId()}`); - - try { - containerToRemove = await container.stop(); - } catch (err) { - if (err.message && err.message.includes('container already stopped')) { - containerToRemove = container; - } else if (err.message && err.message.includes('no such container')) { - // This isn't strictly necessary but we want to ignore the exception as no container needs to be removed. - containerToRemove = null; - } else { - log.error(`Failed to stop container ${container.getId()}: \n\t${err}\n`); - } - } - - if (containerToRemove) { - try { - await containerToRemove.remove(); - } catch (err) { - log.error(`Failed to remove container ${containerToRemove.getId()}: \n\t${err}\n`); - } - } - }); + async tearDownOneContainer(container) { + let containerToRemove; + + Logger.debug(`Removing container ${container.getId()} (${container.getQualifiedName()})`); + + try { + containerToRemove = await container.stop(); + } catch (err) { + if (err.message && err.message.includes('container already stopped')) { + containerToRemove = container; + } else if (err.message && err.message.includes('no such container')) { + // This isn't strictly necessary but we want to ignore the exception as no container needs to be removed. + containerToRemove = null; + } else { + Logger.error(`Failed to stop container ${container.getQualifiedName()}:`); + Logger.error(` ${err}`); + } } - if (this.getNetworkName() != undefined) { - log.debug(`Removing network ${this.getNetworkName()}`); + if (containerToRemove) { + try { + await containerToRemove.remove(); + } catch (err) { + Logger.error(`Failed to remove container ${containerToRemove.getQualifiedName()}:`); + Logger.error(` ${err}`); + } + } + } + /** + * Ensure all step containers are stopped and removed. + */ + async tearDownContainers() { + Logger.info('Ensuring all containers are stopped and removed'); + _.forEach(this.getTrackedContainers(), async (container) => await this.tearDownOneContainer(container)); + + // Wait for background containers to stop. Should take a maximum of 15 seconds, the default stop() timeout. + // If we don't wait, the network and volumes can't be removed. We don't know if there are actually background + // containers in the pipeline... + for (let i = 0; i < 15; i++) { + const onlineContainers = await this.dockerApi.listContainers({filters: JSON.stringify({network: [this.getNetworkName()], status: ['running', 'removing']})}); + Logger.silly(`Online containers: ${JSON.stringify(onlineContainers)}`); + if (_.isEmpty(onlineContainers)) { + break; + } + Logger.debug(`Waiting for any stubborn containers to be stopped. ${15 - i} seconds to timeout.`); + await new Promise((r) => setTimeout(r, 1000)); + } + } + + /** + * Remove the Docker network created for the pipeline. + */ + async tearDownNetwork() { + if (this.getNetworkName() != undefined && !this.isNetworkProvided()) { + Logger.debug(`Removing network ${this.getNetworkName()}`); const network = this.dockerApi.getNetwork(this.getNetworkName()); try { await network.remove(this.getNetworkName()); } catch (err) { - log.error(`Failed to remove network ${this.getNetworkName()}: \n\t${err}\n`); + Logger.error(`Failed to remove network ${this.getNetworkName()}:`); + Logger.error(` ${err}`); } } + } - await _.each(this.getVolumes(), async (volume) => { + /** + * Remove the utility volumes create for the pipeline. + */ + async tearDownVolumes() { + _.forEach(this.getVolumes(), async (volume) => { const dockerVolume = this.dockerApi.getVolume(volume.name); - - log.debug(`Removing volume ${volume.name}`); - + Logger.debug(`Removing volume ${volume.name}`); try { await dockerVolume.remove(volume.name); } catch (err) { - log.error(`Failed to remove volume ${volume.name}: \n\t${err}\n`); + Logger.error(`Failed to remove volume ${volume.name}:`); + Logger.error(` ${err}`); } }); - log.debug('DockerExec teardown complete'); + // Node isn't waiting for the remove()s. If we don't wait, they won't be removed. + Logger.info('Waiting for volumes to be removed'); + await new Promise((r) => setTimeout(r, 3000)); + } + + /** + * Cleans up the Docker containers and network used by the pipeline. + */ + async tearDown() { + if (_.isEmpty(this.getTrackedContainers())) { + Logger.debug('No containers to stop'); + } else { + await this.tearDownContainers(); + } + + await this.tearDownNetwork(); + await this.tearDownVolumes(); + + Logger.debug('DockerExec teardown complete'); } } diff --git a/src/docker/DockerExec.test.js b/src/docker/DockerExec.test.js index e454c9b3..8397bab8 100644 --- a/src/docker/DockerExec.test.js +++ b/src/docker/DockerExec.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -7,6 +7,7 @@ /* global jest, describe, test, expect, beforeEach */ import DockerContainer from './DockerContainer'; import DockerExec from './DockerExec'; +import {NodeProvider} from '../common/index.js'; // eslint-disable-next-line no-unused-vars import dockerode from 'dockerode'; // eslint-disable-next-line no-unused-vars @@ -18,12 +19,43 @@ jest.mock('dockerode'); const environmentMock = {'addEnvironmentVariable': () => {}}; const pipelineMock = {'getEnvironment': () => environmentMock, 'getShortId': () => 'abcd1234', 'getLogStream': () => {}}; +describe('DockerExec runStep()', () => { + let dockerExec; + + beforeEach(() => { + jest.clearAllMocks(); + dockerExec = new DockerExec(); + }); + + test('cannot be called if uninitialized', async () => { + await expect(dockerExec.runStep()).rejects.toThrow('DockerExec network has not been initialized'); + }); + + test('runStep creates and starts a container', async () => { + dockerExec.networkName = 'test'; + dockerExec.getEnvironment = () => []; + dockerExec.getVolumes = () => []; + dockerExec.getDefaultPullPolicy = () => 'Default'; + + expect(dockerExec.getTrackedContainers()).toHaveLength(0); + + const step = await dockerExec.runStep({name: 'test-step', image: 'image:test'}); + + expect(DockerContainer.mock.instances[0].create).toHaveBeenCalled(); + expect(DockerContainer.mock.instances[0].start).toHaveBeenCalled(); + expect(dockerExec.getTrackedContainers()).toHaveLength(1); + expect(step).toHaveProperty('name', 'test-step'); + expect(step).toHaveProperty('Finished.StatusCode'); + }); +}); + describe('DockerExec init()', () => { const createNetworkResponse = {id: 'TEST'}; let dockerExec; let dockerApi; beforeEach(() => { + process.exit = jest.fn(); // ensure tests never exit dockerExec = new DockerExec(); jest.spyOn(dockerExec, 'installUtilities').mockImplementation(() => {}); dockerApi = dockerExec.getDockerApi(); @@ -31,46 +63,50 @@ describe('DockerExec init()', () => { dockerApi.createVolume.mockReset(); }); - test('cannot be called twice', () => { + test('cannot be called twice', async () => { dockerExec.networkName = 'test'; - return dockerExec.init(pipelineMock).catch((err) => expect(err.message).toMatch('has already been created')); + await dockerExec.init(pipelineMock).catch((err) => expect(err.message).toMatch('has already been created')); }); - test('calls createVolume twice', () => { + test('calls createVolume twice', async () => { dockerApi.createNetwork.mockResolvedValueOnce(createNetworkResponse); dockerApi.createVolume.mockResolvedValue(); - return dockerExec.init(pipelineMock).then(() => expect(dockerApi.createVolume.mock.calls.length).toBe(3)); + await dockerExec.init(pipelineMock).then(() => expect(dockerApi.createVolume.mock.calls.length).toBe(3)); }); - test('calls createNetwork once', () => { + test('calls createNetwork once', async () => { dockerApi.createNetwork.mockResolvedValueOnce(createNetworkResponse); dockerApi.createVolume.mockResolvedValue(); - return dockerExec.init(pipelineMock).then(() => expect(dockerApi.createNetwork.mock.calls.length).toBe(1)); + await dockerExec.init(pipelineMock).then(() => expect(dockerApi.createNetwork.mock.calls.length).toBe(1)); }); - test('skips createVolume when createNetwork fails with appropriate error', () => { + test('skips createVolume when createNetwork fails with appropriate error', async () => { dockerApi.createNetwork.mockRejectedValueOnce(); dockerApi.createVolume.mockResolvedValue(); - return dockerExec.init(pipelineMock).catch((err) => { + await dockerExec.init(pipelineMock).catch((err) => { expect(dockerApi.createVolume).not.toHaveBeenCalled(); expect(err.message).toMatch(/Failed to create the network/); expect(err.message).not.toMatch(/Failed to create the volume/); }); }); - test('fails when second createVolume fails with appropriate error', () => { - dockerApi.createNetwork.mockResolvedValueOnce(createNetworkResponse); - dockerApi.createVolume.mockResolvedValueOnce(); - dockerApi.createVolume.mockRejectedValueOnce(); - return dockerExec.init(pipelineMock).catch((err) => { - expect(dockerApi.createVolume.mock.calls.length).toBe(2); - expect(err.message).toMatch(/Failed to create the volume/); - expect(err.message).not.toMatch(/Failed to create the network/); + test('will skip network creation if one is provided', async () => { + const spy = jest.spyOn(NodeProvider, 'getProcess').mockImplementation(() => { + return { + env: {DOCKER_NETWORK: 'host'}, + }; }); + dockerExec = new DockerExec(); + spy.mockRestore(); + jest.spyOn(dockerExec, 'installUtilities').mockImplementation(() => {}); + + dockerApi = dockerExec.getDockerApi(); + + await dockerExec.init(pipelineMock).then(() => expect(dockerApi.createNetwork.mock.calls.length).toBe(0)); }); }); @@ -85,7 +121,7 @@ describe('DockerExec installUtilities()', () => { dockerApi = dockerExec.getDockerApi(); }); - test('picks latest CIX image to run', () => { + test('picks latest CIX image to run', async () => { dockerApi.createNetwork.mockResolvedValueOnce(createNetworkResponse); dockerApi.createVolume.mockResolvedValue(); dockerApi.listImages.mockResolvedValue([ @@ -94,17 +130,17 @@ describe('DockerExec installUtilities()', () => { {'Id': 3, 'Created': 1520482069, 'Labels': {'SCM_URL': 'https://github.com/salesforce/cix'}}, ]); - return dockerExec.getLatestCixImage().then((image) => expect(image.Id).toBe(2)); + await dockerExec.getLatestCixImage().then((image) => expect(image.Id).toBe(2)); }); - test('throws error when no CIX image found', () => { + test('throws error when no CIX image found', async () => { dockerApi.createNetwork.mockResolvedValueOnce(createNetworkResponse); dockerApi.createVolume.mockResolvedValue(); dockerApi.listImages.mockResolvedValue([ {'Id': 1, 'Created': 1549302571, 'Labels': {'SCM_URL': 'not-cix'}}, ]); - return dockerExec.getLatestCixImage().catch((err) => expect(err.message).toMatch(/Unable to locate CIX image/)); + await dockerExec.getLatestCixImage().catch((err) => expect(err.message).toMatch(/Unable to locate CIX image/)); }); test('installUtilities creates and starts a container', async () => { @@ -123,37 +159,6 @@ describe('DockerExec installUtilities()', () => { }); }); -describe('DockerExec runStep()', () => { - let dockerExec; - - beforeEach(() => { - jest.resetAllMocks(); - dockerExec = new DockerExec(); - }); - - test('cannot be called if uninitialized', async () => { - await expect(dockerExec.runStep()).rejects.toThrow('DockerExec has not been initialized'); - }); - - test('runStep creates and starts a container', async () => { - dockerExec.networkName = 'test'; - dockerExec.containerLogger = {createServerOutputStream: jest.fn()}; - dockerExec.getEnvironment = () => []; - dockerExec.getVolumes = () => []; - - expect(dockerExec.getTrackedContainers()).toHaveLength(0); - - const step = await dockerExec.runStep({name: 'test-step', image: 'image:test'}); - - expect(DockerContainer.mock.instances[0].create).toHaveBeenCalled(); - expect(DockerContainer.mock.instances[0].start).toHaveBeenCalled(); - expect(dockerExec.getTrackedContainers()).toHaveLength(1); - expect(dockerExec.containerLogger.createServerOutputStream.mock.calls).toHaveLength(2); - expect(step).toHaveProperty('name', 'test-step'); - expect(step).toHaveProperty('Finished.StatusCode'); - }); -}); - describe('DockerExec runPreprocessor()', () => { let dockerExec; @@ -194,52 +199,52 @@ describe('DockerExec tearDown()', () => { beforeEach(() => { jest.resetAllMocks(); + jest.useFakeTimers(); dockerExec = new DockerExec(); dockerApi = dockerExec.getDockerApi(); }); - - test('tearDown() stops and removes a container', async () => { + test('tearDownContainers() stops and removes a container', async () => { const container1 = new DockerContainer(); container1.stop = jest.fn().mockImplementation(() => container1); - dockerExec.getNetworkName = jest.fn().mockResolvedValue('test'); dockerExec.getTrackedContainers = jest.fn().mockImplementation(() => [container1]); - expect(dockerExec.getTrackedContainers()).toHaveLength(1); - await dockerExec.tearDown(); + dockerExec.tearDownContainers(); + jest.advanceTimersByTime(15000); + await Promise.resolve(); expect(DockerContainer.mock.instances[0].stop).toHaveBeenCalled(); expect(DockerContainer.mock.instances[0].remove).toHaveBeenCalled(); }); - test('tearDown() removes a stopped container', async () => { + test('tearDownContainers() removes a stopped container', async () => { const container1 = new DockerContainer(); container1.stop = jest.fn().mockImplementation(() => { throw new Error('container already stopped'); }); - dockerExec.getNetworkName = jest.fn().mockResolvedValue('test'); dockerExec.getTrackedContainers = jest.fn().mockImplementation(() => [container1]); - expect(dockerExec.getTrackedContainers()).toHaveLength(1); - await dockerExec.tearDown(); + dockerExec.tearDownContainers(); + jest.advanceTimersByTime(15000); + await Promise.resolve(); expect(DockerContainer.mock.instances[0].stop).toHaveBeenCalled(); expect(DockerContainer.mock.instances[0].remove).toHaveBeenCalled(); }); - test('tearDown() doesn\'t remove a non-existent container', async () => { + test('tearDownContainers() doesn\'t remove a non-existent container', async () => { const container1 = new DockerContainer(); container1.stop = jest.fn().mockImplementation(() => { throw new Error('no such container'); }); - dockerExec.getNetworkName = jest.fn().mockResolvedValue('test'); dockerExec.getTrackedContainers = jest.fn().mockImplementation(() => [container1]); - expect(dockerExec.getTrackedContainers()).toHaveLength(1); - await dockerExec.tearDown(); + dockerExec.tearDownContainers(); + jest.advanceTimersByTime(15000); + await Promise.resolve(); expect(DockerContainer.mock.instances[0].stop).toHaveBeenCalled(); expect(DockerContainer.mock.instances[0].remove).not.toHaveBeenCalled(); }); - test('tearDown() removes the network', async () => { + test('tearDownNetwork() removes the network', async () => { dockerExec.getNetworkName = jest.fn().mockResolvedValue('test'); const mockRemoveNetwork = jest.fn(); dockerApi.getNetwork = jest.fn().mockImplementation(() => { @@ -248,11 +253,31 @@ describe('DockerExec tearDown()', () => { }; }); - await dockerExec.tearDown(); + await dockerExec.tearDownNetwork(); expect(mockRemoveNetwork).toHaveBeenCalled(); }); - test('tearDown() removes a volume', async () => { + test('tearDownNetwork() skips teardown if DOCKER_HOSTNAME is set', async () => { + const spy = jest.spyOn(NodeProvider, 'getProcess').mockImplementation(() => { + return { + env: {DOCKER_NETWORK: 'host'}, + }; + }); + dockerExec = new DockerExec(); + spy.mockRestore(); + + const mockRemoveNetwork = jest.fn(); + dockerApi.getNetwork = jest.fn().mockImplementation(() => { + return { + remove: mockRemoveNetwork, + }; + }); + + await dockerExec.tearDownNetwork(); + expect(mockRemoveNetwork).not.toHaveBeenCalled(); + }); + + test('tearDownVolumes() removes a volume', async () => { dockerExec.getVolumes = jest.fn().mockImplementation(() => [{name: 'test'}]); const mockRemoveVolume = jest.fn(); dockerApi.getVolume = jest.fn().mockImplementation(() => { @@ -261,7 +286,9 @@ describe('DockerExec tearDown()', () => { }; }); - await dockerExec.tearDown(); + dockerExec.tearDownVolumes(); + jest.advanceTimersByTime(3000); + await Promise.resolve(); expect(mockRemoveVolume).toHaveBeenCalled(); }); }); diff --git a/src/docker/index.js b/src/docker/index.js index e14a0325..e5a3c8d0 100644 --- a/src/docker/index.js +++ b/src/docker/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/EnvironmentService.js b/src/engine/EnvironmentService.js index ece8e330..8ae1d2ad 100644 --- a/src/engine/EnvironmentService.js +++ b/src/engine/EnvironmentService.js @@ -1,23 +1,20 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ExecutionError, _} from '../common/index.js'; +import {ExecutionError, Logger, _} from '../common/index.js'; import PipelineService from './PipelineService.js'; -import log from 'winston'; /** * @class - * * @description Services all engine environment calls. */ class EnvironmentService { /** * @function module:engine.EnvironmentService#getPipeline * @description Validates pipeline exists and returns it. - * * @param {string} pipelineId - The pipeline to retrieve. * @returns {object} A reference to the requested Pipeline. */ @@ -28,39 +25,37 @@ class EnvironmentService { /** * @function module:engine.EnvironmentService#setEnvironmentVar * @description Adds/Replaces an environment variable to a pipeline. - * * @param {string} pipelineId - The pipeline to add to. * @param {object} environmentVar - A representation of the environment variable. */ setEnvironmentVar(pipelineId, environmentVar) { // validation of additional options if (_.isNil(environmentVar)) { - log.silly('Cannot set environment variable, parameter required.'); + Logger.silly('Cannot set environment variable, parameter required.'); throw new ExecutionError('The environmentVar parameter is required.', 400); } if (_.isNil(environmentVar.name) || _.isNil(environmentVar.value)) { - log.silly('Cannot set environment variable, parameter is malformed".'); + Logger.silly('Cannot set environment variable, parameter is malformed".'); throw new ExecutionError('The environmentVar parameter is malformed.', 400); } - log.debug(`Adding environment variable ${environmentVar.name} for Pipeline ${pipelineId}`); + Logger.debug(`Adding environment variable ${environmentVar.name} for Pipeline ${pipelineId}`); this.getPipeline(pipelineId).getEnvironment().addEnvironmentVariable(environmentVar); } /** * @function module:engine.EnvironmentService#getEnvironmentVar * @description Gets an environment variable from a pipeline. - * * @param {string} pipelineId - The pipeline to fetch from. * @param {string} name - The name of the environment variable to fetch. * @returns {object} A representation of the environment variable. */ getEnvironmentVar(pipelineId, name) { if (_.isNil(name)) { - log.silly('Cannot get environment variable, parameter required.'); + Logger.silly('Cannot get environment variable, parameter required.'); throw new ExecutionError('The name parameter is required.', 400); } - log.debug(`Getting environment variable ${name} for Pipeline ${pipelineId}`); + Logger.debug(`Getting environment variable ${name} for Pipeline ${pipelineId}`); const environment = this.getPipeline(pipelineId).getEnvironment(); return JSON.parse(environment.redactSecrets(JSON.stringify(environment.getEnvironmentVariable(name)))); } @@ -68,12 +63,11 @@ class EnvironmentService { /** * @function module:engine.EnvironmentService#listEnvironmentVar * @description Lists the environment variables for a given pipeline. - * * @param {string} pipelineId - The pipeline to fetch the list from. * @returns {Array} A list of environment variable names. */ listEnvironmentVar(pipelineId) { - log.debug(`Getting all environment variables for Pipeline ${pipelineId}`); + Logger.debug(`Getting all environment variables for Pipeline ${pipelineId}`); return this.getPipeline(pipelineId).getEnvironment().listEnvironmentVariables(); } } diff --git a/src/engine/EnvironmentService.test.js b/src/engine/EnvironmentService.test.js index 973c352b..7ad397a6 100644 --- a/src/engine/EnvironmentService.test.js +++ b/src/engine/EnvironmentService.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -14,10 +14,10 @@ describe('EnvironmentService tests', () => { // !Important! EnvironmentService is a SINGLETON type object // cannot create more than one for testing, order is important!! - test('listEnvironmentVar should only contain CIX_HOSTNAME and CIX_EXECUTION_ID', async () => { + test('listEnvironmentVar should only contain CIX_HOSTNAME, CIX_SERVER_PORT and CIX_EXECUTION_ID', async () => { const resp = await PipelineService.addPipeline({yamlPath: 'docs/examples/basic.yaml', type: 'standard', environment: []}); - expect(EnvironmentService.listEnvironmentVar(resp.id)).toEqual(['CIX_EXECUTION_ID', 'CIX_HOSTNAME']); + expect(EnvironmentService.listEnvironmentVar(resp.id)).toEqual(['CIX_EXECUTION_ID', 'CIX_HOSTNAME', 'CIX_SERVER_PORT']); }); test('get/setEnvironmentVar should work', async () => { diff --git a/src/engine/PipelineService.js b/src/engine/PipelineService.js index 54268ac8..0adb1eaf 100644 --- a/src/engine/PipelineService.js +++ b/src/engine/PipelineService.js @@ -1,43 +1,42 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ExecutionError, NodeProvider, _} from '../common/index.js'; +import {ExecutionError, Logger, NodeProvider, _} from '../common/index.js'; import Pipeline from './pipeline/Pipeline.js'; -import log from 'winston'; /** * @class - * * @description Services all engine pipeline calls. */ class PipelineService { constructor() { - this.pipelines = {}; this.aliases = {}; + this.loggingFormat = 'console'; + this.pipelines = {}; } /** * @function module:engine.PipelineService#addPipeline * @description Creates a new Pipeline. - * * @async * @param {object} pipelineSpec - All parameters are sent in this object. * @returns {object} An object response containing the ID of the pipeline. */ addPipeline(pipelineSpec) { - const pipeline = new Pipeline(pipelineSpec); + Logger.silly('Creating new pipeline.'); + const pipeline = new Pipeline(pipelineSpec, this.loggingFormat); const pipelineId = pipeline.getId(); - log.debug(`Adding new pipeline to CIX: ${pipelineId}`); + Logger.debug(`Adding new pipeline to CIX: ${pipelineId}`); this.pipelines[pipelineId] = pipeline; - log.silly(`Returning new Pipeline with ID: ${pipelineId}`); + Logger.silly(`Returning new Pipeline with ID: ${pipelineId}`); - log.silly(`Setting latest pipeline ${pipelineId} to latest`); + Logger.silly(`Setting latest pipeline ${pipelineId} to latest`); this.setAliasForPipeline('latest', pipelineId); if (pipelineSpec.pipelineAlias) { @@ -50,7 +49,6 @@ class PipelineService { /** * @function module:engine.PipelineService#getPipeline * @description Validates pipeline exists and returns it. - * * @param {string} pipelineId - The pipeline to retrieve. * @returns {object} A reference to the requested Pipeline. */ @@ -67,23 +65,21 @@ class PipelineService { /** * @function module:engine.PipelineService#getAliasList * @description Gets a list of aliases that exist. - * * @returns {Array} List of alias names. */ getAliasList() { - log.silly('Getting list of aliases.'); + Logger.silly('Getting list of aliases.'); return Object.keys(this.aliases); } /** * @function module:engine.PipelineService#getAliasesForPipeline * @description Returns aliases for a given pipeline id. - * * @param {string} pipelineId - The pipeline id to lookup. * @returns {Array} The list of corresponding aliases. */ getAliasesForPipeline(pipelineId) { - log.silly(`Getting list of alias for pipeline ${pipelineId}.`); + Logger.silly(`Getting list of alias for pipeline ${pipelineId}.`); // Check to see if pipeline exists this.getPipeline(pipelineId); @@ -99,12 +95,11 @@ class PipelineService { /** * @function module:engine.PipelineService#getPipelineForAlias * @description Returns the pipeline id for a given alias. - * * @param {string} pipelineAlias - The alias to lookup. * @returns {string} The id of the pipeline. */ getPipelineForAlias(pipelineAlias) { - log.silly(`Getting pipelineId for alias ${pipelineAlias}.`); + Logger.silly(`Getting pipelineId for alias ${pipelineAlias}.`); if (_.isNil(pipelineAlias)) { throw new ExecutionError('No pipelineAlias parameter(s) provided.', 400); } @@ -117,16 +112,15 @@ class PipelineService { /** * @function module:engine.PipelineService#setAliasForPipeline * @description Create a new pipeline alias or updates existing. - * * @param {string} pipelineAlias - The alias to create or update. * @param {string} pipelineId - The ID of the pipeline to set alias to. */ setAliasForPipeline(pipelineAlias, pipelineId) { - log.silly(`Setting alias ${pipelineAlias} for pipeline ${pipelineId}.`); + Logger.silly(`Setting alias ${pipelineAlias} for pipeline ${pipelineId}.`); // Check to see if pipeline exists this.getPipeline(pipelineId); if (pipelineAlias in this.aliases) { - log.info(`Overwriting existing alias ${pipelineAlias} that was pointed to ${this.aliases[pipelineAlias]}.`); + Logger.info(`Overwriting existing alias ${pipelineAlias} that was pointed to ${this.aliases[pipelineAlias]}.`); } this.aliases[pipelineAlias] = pipelineId; } @@ -134,12 +128,11 @@ class PipelineService { /** * @function module:engine.PipelineService#getPipelineSequence * @description Gets a definition of a pipeline. - * * @param {string} pipelineId - The ID of the pipeline to get. * @returns {object} A representation of the Pipeline. */ async getPipelineSequence(pipelineId) { - log.debug(`Getting pipeline ${pipelineId}`); + Logger.debug(`Getting pipeline ${pipelineId}`); const pipeline = this.getPipeline(pipelineId); return await pipeline.describeSequence(); } @@ -147,47 +140,43 @@ class PipelineService { /** * @function module:engine.PipelineService#getPipelineList * @description Gets a list of pipeline IDs that exist. - * * @returns {Array} List of pipeline ids. */ getPipelineList() { - log.silly('Getting list of pipeline IDs.'); + Logger.silly('Getting list of pipeline IDs.'); return Object.keys(this.pipelines); } /** * @function module:engine.PipelineService#linkPipeline * @description Chains one pipeline to another. - * * @async * @param {string} pipelineId - The pipeline to chain to. * @param {string} nextPipelineId - The pipeline to run next. */ async linkPipeline(pipelineId, nextPipelineId) { - log.debug(`Setting ${nextPipelineId} as next pipeline after ${pipelineId}`); + Logger.debug(`Setting ${nextPipelineId} as next pipeline after ${pipelineId}`); this.getPipeline(pipelineId).setNextPipeline(this.getPipeline(nextPipelineId)); } /** * @function module:engine.PipelineService#startPipeline * @description Starts a pipeline and it's chained pipelines. - * * @async * @param {string} pipelineId - The pipeline to start. */ async startPipeline(pipelineId) { let pipeline = this.getPipeline(pipelineId); - let terminalState = false; let terminalPipelineId; + let terminalState = false; do { - log.info(`Starting Pipeline: ${pipeline.getId()}`); + Logger.info(`Starting Pipeline: ${pipeline.getId()}`); if (!terminalState || pipeline.getType() === 'teardown') { await pipeline.start(); if (pipeline.getStatus() === 'failed') { terminalState = true; - terminalPipelineId = pipeline.getShortId(); } } else { pipeline.setStatus(Pipeline.STATUS.skipped); @@ -195,16 +184,11 @@ class PipelineService { pipeline = pipeline.getNextPipeline(); } while (pipeline); - - if (terminalState) { - throw new ExecutionError(`Pipeline '${terminalPipelineId}' failed.`); - } } /** * @function module:engine.PipelineService#pausePipeline * @description Pauses a pipeline and it's chained pipelines. - * * @async * @param {string} pipelineId - The pipeline to pause. */ @@ -215,7 +199,6 @@ class PipelineService { /** * @function module:engine.PipelineService#resumePipeline * @description Continues a pipeline that is paused or not started. - * * @async * @param {string} pipelineId - The pipeline to move forward one step. * @param {string} step (optional) - A step name to run to and pause on. @@ -234,7 +217,6 @@ class PipelineService { /** * @function module:engine.PipelineService#killPipeline * @description Kills a pipeline and it's chained pipelines. - * * @async * @param {string} pipelineId - The pipeline to kill. */ @@ -245,7 +227,6 @@ class PipelineService { /** * @function module:engine.PipelineService#nextStepInPipeline * @description Moves a pipeline forward one step. - * * @async * @param {string} pipelineId - The pipeline to move forward one step. */ @@ -259,6 +240,24 @@ class PipelineService { throw new ExecutionError('Pipeline: This pipeline is already finished.', 400); } } + + /** + * @function module:engine.PipelineService#setLoggingFormat + * @description Sets the logging format for the pipeline service. + * @param {string} format - The logging format to use. + */ + setLoggingFormat(format) { + this.loggingFormat = format; + } + + /** + * @function module:engine.PipelineService#getLoggingFormat + * @description Gets the logging format for the pipeline service. + * @returns {string} The logging format. + */ + getLoggingFormat() { + return this.loggingFormat; + } } // make PipelineService a singleton @@ -277,27 +276,30 @@ const signals = { /** * @function module:engine.PipelineService#shutdown * @description Shuts down the CIX process and kills all pipelines - * * @param {string} signal - string representation of signal type * @param {number} value - number representation of signal type */ async function shutdown(signal, value) { // Prints out this message every 3 seconds while we shut things down. (function printWait() { - log.warn(`Received ${signal} shutting down CIX, please wait...`); + Logger.warn(`Received ${signal} shutting down CIX, please wait...`); setTimeout(printWait, 3000); })(); // Will force a process quit after 15seconds setTimeout(() => { - log.error('Was not able to cleanly shutdown...'); - log.error('It is highly recommended you run a \'docker system prune\' to cleanup any orphaned resources.'); + Logger.error('Was not able to cleanly shutdown...'); + Logger.error('It is highly recommended you run a \'docker system prune\' to cleanup any orphaned resources.'); NodeProvider.getProcess().exit(100 + value); }, 15000); const pipelines = instance.getPipelineList(); + Logger.warn(`list of pipelines: ${pipelines}`); for (let i = 0; i < pipelines.length; i++) { + Logger.warn(`killing pipeline ${i + 1} of ${pipelines.length}`); await instance.killPipeline(pipelines[i]); + Logger.warn(`done killing pipeline ${i}`); } + Logger.warn('done killing pipelines'); NodeProvider.getProcess().exit(100 + value); } diff --git a/src/engine/PipelineService.test.js b/src/engine/PipelineService.test.js index c4e4ddb7..7c5f6966 100644 --- a/src/engine/PipelineService.test.js +++ b/src/engine/PipelineService.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/PluginService.js b/src/engine/PluginService.js index 72788237..d414b2cc 100644 --- a/src/engine/PluginService.js +++ b/src/engine/PluginService.js @@ -1,16 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {PluginError, _} from '../common/index.js'; +import {Logger, PluginError, _} from '../common/index.js'; import Plugin from './plugin/Plugin.js'; -import log from 'winston'; /** * @class - * * @description Services all engine calls. */ class PluginService { @@ -21,7 +19,6 @@ class PluginService { /** * @function module:engine.PluginService#addPlugin * @description Creates a new Plugin. - * * @async * @param {object} definition - A object representation of the plugin. * @returns {string} An object response containing the ID of the plugin. @@ -32,7 +29,7 @@ class PluginService { await plugin.loadAndValidate(); const pluginId = plugin.getId(); - log.debug(`Adding new plugin to CIX: ${pluginId}`); + Logger.debug(`Adding new plugin to CIX: ${pluginId}`); this.plugins[pluginId] = plugin; @@ -42,18 +39,16 @@ class PluginService { /** * @function module:engine.PluginService#getPluginList * @description Gets a list of plugin IDs that exist. - * * @returns {Array} List of pipeline ids. */ getPluginList() { - log.silly('Getting list of plugin IDs.'); + Logger.silly('Getting list of plugin IDs.'); return Object.keys(this.plugins); } /** * @function module:engine.PluginService#getPlugin * @description Validates and loads a yaml. - * * @async * @param {object} pluginId - All parameters are sent in this object. * @returns {object} The validated/loaded YAML Pipeline. diff --git a/src/engine/PluginService.test.js b/src/engine/PluginService.test.js index 5ff25037..112a9ea5 100644 --- a/src/engine/PluginService.test.js +++ b/src/engine/PluginService.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/ValidateService.js b/src/engine/ValidateService.js index 2bf10b47..830afad0 100644 --- a/src/engine/ValidateService.js +++ b/src/engine/ValidateService.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -9,14 +9,12 @@ import Validate from './validate/Validate.js'; /** * @class - * * @description Services all engine calls. */ class ValidateService { /** * @function module:engine.ValidateService#validatePipelineSchema * @description Validates and loads a yaml. - * * @async * @param {object} definition - A object representation of a pipeline. * @returns {Array} List of errors. @@ -28,7 +26,6 @@ class ValidateService { /** * @function module:engine.ValidateService#validatePluginSchema * @description Validates and loads a plugin yaml. - * * @async * @param {object} definition - A object representation of a plugin. * @returns {Array} List of errors. @@ -40,7 +37,6 @@ class ValidateService { /** * @function module:engine.ValidateService#validatePipeline * @description Validates and loads a yaml. - * * @async * @param {object} pipelineSpec - All parameters are sent in this object. * @returns {object} The validated/loaded YAML Pipeline. diff --git a/src/engine/ValidateService.test.js b/src/engine/ValidateService.test.js index c773affe..c1811a9c 100644 --- a/src/engine/ValidateService.test.js +++ b/src/engine/ValidateService.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/environment/Environment.js b/src/engine/environment/Environment.js index 1a043f4f..fdb37f8d 100644 --- a/src/engine/environment/Environment.js +++ b/src/engine/environment/Environment.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -43,7 +43,6 @@ export default class Environment { return _.keys(_.omitBy(this.#environmentMap, (environmentVar) => environmentVar.type !== type)); } } - replace$$Values(input) { if (_.isNil(input)) { return ''; diff --git a/src/engine/environment/Environment.test.js b/src/engine/environment/Environment.test.js index 4ca5cca6..9f353faa 100644 --- a/src/engine/environment/Environment.test.js +++ b/src/engine/environment/Environment.test.js @@ -1,10 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -/* global jest, describe, it, beforeEach, expect */ +/* global jest, describe, test, beforeEach, expect */ import Environment from './Environment.js'; // const Environment = require('./Environment.js'); jest.autoMockOff(); @@ -16,17 +16,17 @@ describe('Environment add/remove/list', () => { env = new Environment(); }); - it('addEnvironmentVariable should add a variable.', () => { + test('addEnvironmentVariable should add a variable.', () => { const testName = 'test'; const testValue = '1234'; const testVar = {name: testName, value: testValue}; env.addEnvironmentVariable(testVar); expect(env.getEnvironmentVariable(testName)).toEqual(testVar); }); - it('getting value that does not exist returns undefined.', () => { + test('getting value that does not exist returns undefined.', () => { expect(env.getEnvironmentVariable('ghost')).toEqual(undefined); }); - it('second addEnvironmentVariable replaces.', () => { + test('second addEnvironmentVariable replaces.', () => { const testName = 'test'; const testValue = '1234'; const testVar = {name: testName, value: testValue}; const testValue2 = '5678'; const testVar2 = {name: testName, value: testValue2}; env.addEnvironmentVariable(testVar); @@ -34,7 +34,7 @@ describe('Environment add/remove/list', () => { expect(env.getEnvironmentVariable(testName)).toEqual(testVar2); }); - it('list environment variables.', () => { + test('list environment variables.', () => { const testName = 'test'; const testValue = '1234'; const testVar = {name: testName, value: testValue}; const testName2 = 'test2'; const testVar2 = {name: testName2, value: testValue}; env.addEnvironmentVariable(testVar); @@ -50,21 +50,21 @@ describe('Environment replace$$Values', () => { env = new Environment(); }); - it('should replace $$ value in string by itself', () => { + test('should replace $$ value in string by itself', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); const replacement = env.replace$$Values('$$FOO'); expect(replacement).toEqual('foo'); }); - it('should replace repeated $$ values in string', () => { + test('should replace repeated $$ values in string', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); const replacement = env.replace$$Values('$$FOO$$FOO'); expect(replacement).toEqual('foofoo'); }); - it('should replace multiple $$ values in string', () => { + test('should replace multiple $$ values in string', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); env.addEnvironmentVariable({name: 'BAR', value: 'bar'}); const replacement = env.replace$$Values('$$FOO$$BAR'); @@ -72,21 +72,21 @@ describe('Environment replace$$Values', () => { expect(replacement).toEqual('foobar'); }); - it('should not replace $$ values when not a full match', () => { + test('should not replace $$ values when not a full match', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); const replacement = env.replace$$Values('$$FOO$$FOO_BAR'); expect(replacement).toEqual('foo$$FOO_BAR'); }); - it('should not replace values when not preceded by $$', () => { + test('should not replace values when not preceded by $$', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); const replacement = env.replace$$Values('FOO'); expect(replacement).toEqual('FOO'); }); - it('should not expand $FOOBAR when $FOO is defined', () => { + test('should not expand $FOOBAR when $FOO is defined', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo'}); const replacement = env.replace$$Values('$$FOOBAR'); @@ -102,7 +102,7 @@ describe('Environment redactSecrets', () => { env = new Environment(); }); - it('should redact a secrets', () => { + test('should redact a secrets', () => { env.addEnvironmentVariable({name: 'FOO', value: 'foo', type: 'secret'}); const replacement = env.redactSecrets('foo'); diff --git a/src/engine/index.js b/src/engine/index.js index ceeb9e06..3c160d24 100644 --- a/src/engine/index.js +++ b/src/engine/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/pipeline/Pipeline.js b/src/engine/pipeline/Pipeline.js index ebbaee59..34644adc 100644 --- a/src/engine/pipeline/Pipeline.js +++ b/src/engine/pipeline/Pipeline.js @@ -1,10 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ExecutionError, NodeProvider, ValidateError, _} from '../../common/index.js'; +import {ExecutionError, Logger, NodeProvider, ValidateError, _} from '../../common/index.js'; import Environment from '../environment/Environment.js'; import ExecFactory from '../runtime/ExecFactory.js'; import PipelineImporter from './PipelineImporter.js'; @@ -13,10 +13,7 @@ import Step from './Step.js'; import Steps from './Steps.js'; import ValidateService from '../ValidateService.js'; import events from 'events'; -import log from 'winston'; -import os from 'os'; import path from 'path'; -import stream from 'stream'; import {v4 as uuidv4} from 'uuid'; // Static constant for Pipeline Class, so that you can use Pipeline.STATUS.ready @@ -51,11 +48,23 @@ export default class Pipeline { if (_.isNil(pipelineSpec)) { throw new ValidateError('Must pass in a new Pipeline object.', 400); } + + // Create an ID + this.id = uuidv4(); + // get a logger + Logger.createPipelineLogger(this); + Logger.debug(`Generated ID for pipeline: ${this.getId()}`, this.getId()); + + this.nextPipeline = null; this.status = Pipeline.STATUS.ready; // Seed our environment object this.environment = new Environment(pipelineSpec.environment); + this.environment.addEnvironmentVariable({name: 'CIX_EXECUTION_ID', value: this.getId(), type: 'internal'}); + + // Set default pull policy + this.defaultPullPolicy = pipelineSpec.defaultPullPolicy; // Attach plugins this.plugins = []; @@ -79,14 +88,14 @@ export default class Pipeline { // Configure workspace this.workspacePath = this.resolveWorkspace(pipelineSpec.workspace); - // Create an ID - this.id = uuidv4(); - log.debug(`Generated ID for pipeline: ${this.id}`); - this.environment.addEnvironmentVariable({name: 'CIX_EXECUTION_ID', value: this.id, type: 'internal'}); - // Set Hostname if (!this.environment.getEnvironmentVariable('CIX_HOSTNAME')) { - this.environment.addEnvironmentVariable({name: 'CIX_HOSTNAME', value: os.hostname(), type: 'internal'}); + this.environment.addEnvironmentVariable({name: 'CIX_HOSTNAME', value: 'localhost', type: 'internal'}); + } + + // Set Port + if (!this.environment.getEnvironmentVariable('CIX_SERVER_PORT')) { + this.environment.addEnvironmentVariable({name: 'CIX_SERVER_PORT', value: '10030', type: 'internal'}); } // set Type @@ -105,17 +114,16 @@ export default class Pipeline { /** * @function module:engine.Pipeline#resolveWorkspace * @description Creates a new Pipeline. - * * @param {string} workspace - path of the workspace. * @returns {string} fully qualified */ resolveWorkspace(workspace) { if (workspace) { workspace = path.resolve(workspace); - log.debug(`Workspace supplied: ${workspace}`); + Logger.debug(`Workspace supplied: ${workspace}`, this.getId()); } else { workspace = NodeProvider.getProcess().cwd(); - log.debug(`Workspace not supplied. Using current working directory: ${workspace}`); + Logger.debug(`Workspace not supplied. Using current working directory: ${workspace}`, this.getId()); } const fs = NodeProvider.getFs(); @@ -126,8 +134,8 @@ export default class Pipeline { try { fs.accessSync(workspace, fs.constants.W_OK); } catch (error) { - log.debug(`${error}`); - log.warn(`Workspace is not writable: ${workspace}`); + Logger.debug(`${error}`, this.getId()); + Logger.warn(`Workspace is not writable: ${workspace}`, this.getId()); } return workspace; @@ -136,7 +144,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#STATUS * @description Status static constant. - * * @returns {object} A dictionary of statuses. */ static get STATUS() { @@ -146,7 +153,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#isActive * @description Indicates if a pipeline is in a state that may need cleanup. - * * @returns {boolean} The boolean stating if it is active or not. */ isActive() { @@ -161,7 +167,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#isTerminal * @description Indicates if a pipeline has finished running. - * * @returns {boolean} The boolean stating if it is finished or not. */ isTerminal() { @@ -176,7 +181,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#isStartable * @description Indicates if a pipeline can be started. - * * @returns {boolean} The boolean stating if pipeline can be started. */ isStartable() { @@ -187,80 +191,9 @@ export default class Pipeline { return false; } - /** - * @function module:engine.Pipeline#generateLogStream - * @description Generates a log stream for remote client requests. - * - * @returns {stream} The log stream. - */ - generateLogStream() { - // Cleanup after any old log streams... - this.destroyLogStream(); - - // transform the docker and winston object streams into a string stream, also deals with back pressure - this.logStream = new stream.Transform({ - writableObjectMode: true, - transform(chunk, _encoding, callback) { - chunk.message = chunk.message.toString('utf8'); - // prevent any back pressure, if the readable buffer goes over the max -16K for warnings, it starts skipping messages. - if (this.readableLength < this.readableHighWaterMark) { - this.push(JSON.stringify(chunk) + '\n'); - } else { - // send a warning if we still have room to do so. - if (!this.warningSent) { - this.push(JSON.stringify({'level': 'warn', 'message': 'Not able to keep up with server log streaming, having to drop packets....'})); - this.warningSent = true; - // wait 1000 milliseconds before allowing this to be sent again... - setTimeout(() => { - this.warningSent = false; - }, 1000); - } - } - callback(); - }, - }); - // hook into the global winston CIX log stream, track it so we can remove it later - // Note: this means if we're running more than one pipeline concurrently, you'll get logs from both - this.winstonStream = new log.transports.Stream({ - stream: this.logStream, - }); - log.add(this.winstonStream); - return this.logStream; - } - - /** - * @function module:engine.Pipeline#generateLogStream - * @description Returns a reference to the pipelines log stream. - * - * @returns {stream} The log stream. - */ - getLogStream() { - if (this.logStream) { - return this.logStream; - } else { - return null; - } - } - - /** - * @function module:engine.Pipeline#generateLogStream - * @description Destroys the log stream if one exists - */ - destroyLogStream() { - if (!_.isNil(this.winstonStream)) { - log.remove(this.winstonStream); - this.winstonStream = null; - } - if (!_.isNil(this.logStream)) { - this.logStream.destroy(); - this.logStream = null; - } - } - /** * @function module:engine.Pipeline#getLength * @description Returns the number of individual steps in the pipeline. - * * @returns {number} The number of steps in the pipeline. */ getLength() { @@ -270,7 +203,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getStepList * @description Returns a list of steps that can be iterated through. - * * @returns {Array} The list of Step references. */ getStepList() { @@ -280,7 +212,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getStepsList * @description Returns a list of step groups that can be iterated through - * * @returns {Array} The list of Steps references. */ getStepsList() { @@ -290,7 +221,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getRemainingSteps * @description Returns a list of steps that have not yet run. - * * @returns {Array} The list of Step references. */ getRemainingSteps() { @@ -300,7 +230,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getPipelineNodeList * @description Returns a list of step groups and steps that can be iterated through - * * @returns {Array} The list of PipelineNode references. */ getPipelineNodeList() { @@ -310,7 +239,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getPlugins * @description Returns a list of plugins enabled on this pipeline. - * * @returns {Array} The list of plugins for this pipeline. */ getPlugins() { @@ -320,7 +248,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getErrors * @description Returns the errors associated with this pipeline's individual steps. - * * @returns {Array} An array of strings representing any errors found in the payload. */ getErrors() { @@ -330,7 +257,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getEnvironment * @description Returns a reference to the environment object. - * * @returns {object} A reference to the environment object. */ getEnvironment() { @@ -340,7 +266,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#awaitStatusChange * @description Allows the caller to wait for a pipeline status change. - * * @param {Array} statuses - (optional) status to be notified on. * @returns {Promise} A promise which will resolve once status is set. */ @@ -358,28 +283,44 @@ export default class Pipeline { }); } + /** + * @function module:engine.Pipeline#getLoggingFormat + * @description Returns the logging format for this pipeline. + * @returns {string} The logging format for this pipeline. + */ + getLoggingFormat() { + return PluginService.getLoggingFormat(); + } + /** * @function module:engine.Pipeline#getStatus * @description Returns the status of the pipeline. - * + * @param {boolean} chainedStatus - Returns the status of chained pipelines (default: false). * @returns {string} The status of the pipeline. */ - getStatus() { - return this.status; + getStatus(chainedStatus = false) { + let status = this.status; + if (chainedStatus) { + let nextPipeline = this.getNextPipeline(); + while (!_.isNil(nextPipeline) && status == 'successful') { + status = nextPipeline.status; + nextPipeline = nextPipeline.getNextPipeline(); + } + } + return status; } /** * @function module:engine.Pipeline#setStatus * @description Sets the status of the pipeline. - * * @param {string} status - The status of the pipeline. */ setStatus(status) { if (!_.includes(_.keys(Pipeline.STATUS), status)) { throw new ExecutionError(`'${status}' is not a valid status for a pipeline.`); } - log.silly(`Changing ${this.getId()} status to '${status}'.`); + Logger.silly(`Changing ${this.getId()} status to '${status}'.`, this.getId()); if (this.status != status) { this.status = status; this.statusEmitter.emit('status', status); @@ -389,11 +330,10 @@ export default class Pipeline { /** * @function module:engine.Pipeline#setNextPipeline * @description Sets the next pipeline to call after this one completes. - * * @param {object} nextPipeline - A reference to the next pipeline. */ setNextPipeline(nextPipeline) { - log.debug(`Linking pipeline ${this.getId()} -> ${nextPipeline.getShortId()}`); + Logger.debug(`Linking pipeline ${this.getId()} -> ${nextPipeline.getShortId()}`, this.getId()); this.nextPipeline = nextPipeline; } @@ -401,7 +341,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getNextPipeline * @description Gets the next pipeline to call after this one completes. - * * @returns {object} A reference to the next pipeline. */ getNextPipeline() { @@ -411,7 +350,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getType * @description Returns the type of the pipeline. - * * @returns {string} The type of pipeline. */ getType() { @@ -421,7 +359,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#setType * @description Sets the type of the pipeline. - * * @param {string} type - The type of pipeline. */ setType(type) { @@ -431,7 +368,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getExec * @description Gets the execution runtime. - * * @returns {object} The execution runtime. */ getExec() { @@ -441,7 +377,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getWorkspacePath * @description Gets the workspace path for the pipeline. - * * @returns {string} A string representation of the workspace path. */ getWorkspacePath() { @@ -451,7 +386,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getRegistries * @description Gets the registry information defined for the pipeline. - * * @returns {object} An object representation of the registries. */ getRegistries() { @@ -461,7 +395,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getId * @description Gets the ID of the pipeline. - * * @returns {string} An ID. */ getId() { @@ -471,7 +404,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getShortId * @description Gets a shortened ID of the pipeline. - * * @returns {string} An truncated 5 character ID. */ getShortId() { @@ -481,7 +413,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getBreakpoint * @description Gets the current breakpoint or null if one is not set. - * * @returns {string} name of the breakpoint */ getBreakpoint() { @@ -491,11 +422,10 @@ export default class Pipeline { /** * @function module:engine.Pipeline#setBreakpoint * @description Sets a step breakpoint to pause on. - * * @param {string} breakpoint - the step which to pause on. */ setBreakpoint(breakpoint) { - log.debug(`Setting a step breakpoint on ${this.getShortId()} to ${breakpoint}.`); + Logger.debug(`Setting a step breakpoint on ${this.getShortId()} to ${breakpoint}.`, this.getId()); this.breakpoint = `${breakpoint}`; } @@ -504,7 +434,7 @@ export default class Pipeline { * @description Clears the breakpoint. */ clearBreakpoint() { - log.silly('Clearing breakpoint.'); + Logger.silly('Clearing breakpoint.', this.getId()); this.breakpoint = null; } @@ -512,7 +442,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#getPipelineTreeRoot * @description returns the root Steps node of the pipeline tree. - * * @returns {Steps} - the root Steps object. */ getPipelineTreeRoot() { @@ -528,7 +457,6 @@ export default class Pipeline { /** * @function module:engine.Pipeline#describeSequence * @description Returns a object/json representation of the step sequence. - * * @returns {object} The sequence. */ async describeSequence() { @@ -571,31 +499,15 @@ export default class Pipeline { */ async loadAndValidate() { if (this.getStatus() === Pipeline.STATUS.ready) { - log.debug(`Loading pipeline ${this.getShortId()}.`); + Logger.debug(`Loading pipeline ${this.getShortId()}.`, this.getId()); let importer; - if (!_.isEmpty(this.getPlugins())) { - log.debug('Plugins installed, running all preprocessors.'); - let pipelineDefinition; - if (this.yamlPath) { - pipelineDefinition = await _.fetch(this.yamlPath); - } else { - pipelineDefinition = this.rawPipeline; - } - pipelineDefinition = pipelineDefinition.toString(); - for (const plugin of this.getPlugins()) { - log.silly(`Pipeline definition before preprocessor ${plugin.getId()}:\n${pipelineDefinition}`); - pipelineDefinition = await plugin.runPreprocessor(this.getExec(), pipelineDefinition); - log.silly(`Pipeline definition after preprocessor ${plugin.getId()}:\n${pipelineDefinition}`); - } - pipelineDefinition = _.loadYamlOrJson(pipelineDefinition); - importer = new PipelineImporter({rawPipeline: pipelineDefinition, environment: this.environment}); - } else if (this.yamlPath) { - log.debug(`Creating Pipeline with yaml: ${this.yamlPath}`); - importer = new PipelineImporter({path: this.yamlPath, environment: this.environment}); + if (this.yamlPath) { + Logger.debug(`Creating Pipeline with yaml: ${this.yamlPath}`, this.getId()); + importer = new PipelineImporter({path: this.yamlPath, pipeline: this}); } else { - log.debug(`Creating Pipeline with JSON input: ${JSON.stringify(this.rawPipeline)}`); - importer = new PipelineImporter({rawPipeline: this.rawPipeline, environment: this.environment}); + Logger.debug(`Creating Pipeline with JSON input: ${JSON.stringify(this.rawPipeline)}`, this.getId()); + importer = new PipelineImporter({rawPipeline: this.rawPipeline, pipeline: this}); } // load the pipeline @@ -608,9 +520,9 @@ export default class Pipeline { if (!_.isEmpty(errors)) { _.forEach(errors, (error) => { if (error.message) { - log.warn(`Error validating pipeline schema: ${error.message}: ${(error.params) ? JSON.stringify(error.params) : ''} at ${error.dataPath}`); + Logger.warn(`Error validating pipeline schema: ${error.message}: ${(error.params) ? JSON.stringify(error.params) : ''} at ${error.dataPath}`, this.getId()); } else { - log.warn(`${JSON.stringify(error)}`); + Logger.warn(`${typeof error === 'string' ? error : JSON.stringify(error)}`, this.getId()); } }); this.setStatus(Pipeline.STATUS.failed); @@ -618,34 +530,34 @@ export default class Pipeline { } this.setStatus(STATUS.loaded); } else { - log.silly(`Pipeline ${this.getShortId()} was previously loaded, using that.`); + Logger.silly(`Pipeline ${this.getShortId()} was previously loaded, using that.`, this.getId()); } } /** * @function module:engine.Pipeline#start * @description Starts the pipeline. - * * @async */ async start() { try { await this.loadAndValidate(); - log.debug('Creating new container executor.'); + Logger.debug('Creating new container executor.', this.getId()); this.setStatus(Pipeline.STATUS.initializing); await this.getExec().init(this); this.setStatus(Pipeline.STATUS.running); await this.getPipelineTreeRoot().start(); - log.debug(`Pipeline ${this.getId()} successful.`); + Logger.debug(`Pipeline ${this.getId()} successful.`, this.getId()); this.setStatus(Pipeline.STATUS.successful); } catch (error) { - log.error(`Pipeline ${this.getId()} failed: ${error}`); + Logger.error(`Pipeline ${this.getId()} failed:`, this.getId()); + Logger.error(` ${error}`, this.getId()); if (error && error.stack) { - log.debug(`${error.stack}`); + Logger.debug(`${error.stack}`, this.getId()); } this.setStatus(Pipeline.STATUS.failed); } finally { - log.debug('Tearing down container executor.'); + Logger.debug('Tearing down container executor.', this.getId()); await this.cleanup(); } } @@ -653,18 +565,16 @@ export default class Pipeline { /** * @function module:engine.Pipeline#pause * @description Pauses the pipeline. - * * @async */ async pause() { - log.debug('Pausing pipeline.'); + Logger.debug('Pausing pipeline.', this.getId()); this.setStatus(Pipeline.STATUS.paused); } /** - * @function module:engine.PipelineService#resumePipeline + * @function module:engine.Pipeline#resumePipeline * @description Continues a pipeline that is paused or not started. - * * @async * @param {string} [step] - A step name to run to and pause on. */ @@ -680,11 +590,11 @@ export default class Pipeline { if (step) { this.setBreakpoint(step); if (step == this.getStepList().pop().name) { - log.debug(`Step '${step}' was the last step, running entire pipeline.`); + Logger.debug(`Step '${step}' was the last step, running entire pipeline.`, this.getId()); this.clearBreakpoint(); } else if (step == this.getStepsList().pop().name && this.getStepsList().pop().getDescendants().pop() === this.getStepList().pop()) { - log.debug(`Steps '${step}' was the last step, running entire pipeline.`); + Logger.debug(`Steps '${step}' was the last step, running entire pipeline.`, this.getId()); this.clearBreakpoint(); } } else { @@ -703,11 +613,9 @@ export default class Pipeline { /** * @function module:engine.Pipeline#cleanup * @description Cleans up log stream and docker. - * * @async */ async cleanup() { - this.destroyLogStream(); if (this.getExec()) { await this.getExec().tearDown(); } @@ -717,24 +625,22 @@ export default class Pipeline { /** * @function module:engine.Pipeline#kill * @description Kills the pipeline and runs teardown. - * * @async */ async kill() { if (this.isActive()) { - log.info(`Killing pipeline ${this.getId()} as it is in the state '${this.getStatus()}'.`); + Logger.info(`Killing pipeline ${this.getId()} as it is in the state '${this.getStatus()}'.`, this.getId()); this.setStatus(Pipeline.STATUS.paused); // paused while we tear it down await this.cleanup(); this.setStatus(Pipeline.STATUS.skipped); } else { - log.silly(`Not killing pipeline ${this.getId()} as it is in the state '${this.getStatus()}'.`); + Logger.silly(`Not killing pipeline ${this.getId()} as it is in the state '${this.getStatus()}'.`, this.getId()); } } /** * @function module:engine.Pipeline#nextStep * @description Continues pipeline to next step. - * * @async */ async nextStep() { diff --git a/src/engine/pipeline/Pipeline.test.js b/src/engine/pipeline/Pipeline.test.js index 616ee084..b43d8652 100644 --- a/src/engine/pipeline/Pipeline.test.js +++ b/src/engine/pipeline/Pipeline.test.js @@ -1,15 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* eslint-disable key-spacing */ /* global describe, expect */ -import {ExecutionError, NodeProvider, ValidateError, _} from '../../common/index.js'; +import {ExecutionError, Logger, NodeProvider, ValidateError, _} from '../../common/index.js'; import Pipeline from './Pipeline.js'; import Step from './Step.js'; -import log from 'winston'; const mockInit = jest.fn().mockResolvedValue(); const mockRunStep = jest.fn().mockResolvedValue(); @@ -219,21 +218,28 @@ describe('Pipeline tests:', () => { expect(mockTeardown.mock.calls.length).toBe(1); }); - test('Pipeline: generate a logStream.', async () => { - basicPipeline.generateLogStream(); - expect(basicPipeline.getLogStream()).not.toBeNull(); - await basicPipeline.start(); + test('Pipeline: can get status of chained pipelines on failure.', async () => { + basicPipeline.setNextPipeline(nestedPipeline); + basicPipeline.setStatus(Pipeline.STATUS.successful); + nestedPipeline.setStatus(Pipeline.STATUS.failed); + expect(basicPipeline.getStatus()).toBe(Pipeline.STATUS.successful); + expect(basicPipeline.getStatus(true)).toBe(Pipeline.STATUS.failed); }); - test('Pipeline: don\'t generate a log stream by default.', async () => { - await basicPipeline.start(); - expect(basicPipeline.getLogStream()).toBeNull(); + test('Pipeline: can get status of chained pipelines on success.', async () => { + basicPipeline.setNextPipeline(nestedPipeline); + basicPipeline.setStatus(Pipeline.STATUS.successful); + nestedPipeline.setStatus(Pipeline.STATUS.successful); + expect(basicPipeline.getStatus()).toBe(Pipeline.STATUS.successful); + expect(basicPipeline.getStatus(true)).toBe(Pipeline.STATUS.successful); }); - test('Pipeline: destroy will set logStream to null.', async () => { - basicPipeline.generateLogStream(); - basicPipeline.destroyLogStream(); - expect(basicPipeline.getLogStream()).toBeNull(); + test('Pipeline: can get status of chained pipelines while in progress.', async () => { + basicPipeline.setNextPipeline(nestedPipeline); + basicPipeline.setStatus(Pipeline.STATUS.successful); + nestedPipeline.setStatus(Pipeline.STATUS.running); + expect(basicPipeline.getStatus()).toBe(Pipeline.STATUS.successful); + expect(basicPipeline.getStatus(true)).toBe(Pipeline.STATUS.running); }); test('Pipeline: setStatus throws on unrecognized status.', () => { @@ -430,7 +436,7 @@ describe('Pipeline tests:', () => { throw new Error(); }}; }); - const warnMock = jest.spyOn(log, 'warn').mockImplementation(); + const warnMock = jest.spyOn(Logger, 'warn').mockImplementation(); basicPipeline.resolveWorkspace('test'); expect(warnMock.mock.calls[0][0]).toMatch(/Workspace is not writable/); diff --git a/src/engine/pipeline/PipelineImporter.js b/src/engine/pipeline/PipelineImporter.js index a86039a5..42c8b41f 100644 --- a/src/engine/pipeline/PipelineImporter.js +++ b/src/engine/pipeline/PipelineImporter.js @@ -1,24 +1,24 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ExecutionError, ValidateError, _} from '../../common/index.js'; -import log from 'winston'; -import paths from 'deepdash/paths.js'; +import {ExecutionError, Logger, ValidateError, _} from '../../common/index.js'; +import paths from 'deepdash/paths'; export default class PipelineImporter { - constructor({path, rawPipeline, environment}) { + constructor({path, rawPipeline, pipeline, httpAuthToken}) { this.path = _.emptyToNull(path); + this.rawPipeline = rawPipeline; + this.pipeline = pipeline; if (this.path != null) { - this.path = environment.replace$$Values(this.path); + this.path = this.pipeline.getEnvironment().replace$$Values(this.path); if (this.path.includes('$$')) { throw new ValidateError(`Import path '${this.path}' is missing an environment variable for substitution.`); } } - this.rawPipeline = rawPipeline; - this.environment = environment; + this.httpAuthToken = httpAuthToken; this.definition = {}; this.errors = []; } @@ -31,13 +31,16 @@ export default class PipelineImporter { let importPath; try { definition._yaml = new PipelineImporter({ - path: _.relativePath(this.path, this.environment, definition), - environment: this.environment, + path: _.relativePath(this.path, this.pipeline.getEnvironment(), definition), + pipeline: this.pipeline, + httpAuthToken: definition['http_authorization_token'] || // for backwards compatibility + definition['http-authorization-token'] || + this.httpAuthToken, }); importPath = definition._yaml.path; await definition._yaml.loadFromFile(); } catch (error) { - log.debug(error); + Logger.debug(error, this.pipeline.getId()); throw new ExecutionError(`Failed importing ${importPath}`); } } @@ -51,8 +54,7 @@ export default class PipelineImporter { } expandImportRequest(yaml, request) { - const - importFullPath = _.isObject(request) ? _.getOnlyElement(_.keys(request)) : request; + const importFullPath = _.isObject(request) ? _.getOnlyElement(_.keys(request)) : request; const [importPath, importSubPath] = _.split(importFullPath, '.'); const environmentOverrides = _.isObject(request) ? _.getOnlyElement(_.values(request)).environment : {}; @@ -105,30 +107,46 @@ export default class PipelineImporter { async loadFromFile() { try { - let definition = await _.fetch(this.path, this.environment); + let definition = await _.fetch(this.path, this.pipeline.getEnvironment(), this.httpAuthToken); + if (!_.isEmpty(this.pipeline.getPlugins())) { + Logger.debug(`Plugins installed, running all preprocessors on ${this.path}.`, this.pipeline.getId()); + definition = await this.runPreprocessor(definition); + } definition = await _.loadYamlOrJson(definition); definition = await this.loadImports(definition); definition = await this.expandImports(definition); this.definition = definition; } catch (error) { - log.debug(`${error}`); + Logger.error(`${error}`, this.pipeline.getId()); throw new ExecutionError(`Failed loading YAML ${this.path}`); } return this.definition; } async loadFromMemory() { + if (!_.isEmpty(this.pipeline.getPlugins())) { + Logger.warn('Skipping preprocessor because of raw JSON input.', this.pipeline.getId()); + } try { let definition = await this.loadImports(this.rawPipeline); definition = await this.expandImports(definition); this.definition = definition; } catch (error) { - log.debug(`${error}`); - throw new ExecutionError('Failed loading pipeline from memory'); + Logger.debug(`${error}`, this.pipeline.getId()); + throw new ExecutionError('Failed loading pipeline from memory', this.pipeline.getId()); } return this.definition; } + async runPreprocessor(definition) { + for (const plugin of this.pipeline.getPlugins()) { + Logger.silly(`Pipeline definition before preprocessor ${plugin.getId()}:\n${definition}`, this.pipeline.getId()); + definition = await plugin.runPreprocessor(this.pipeline.getExec(), definition); + Logger.silly(`Pipeline definition after preprocessor ${plugin.getId()}:\n${definition}`, this.pipeline.getId()); + } + return definition; + } + getSteps({path, environmentOverrides}) { return { steps: applyEnvironmentOverrides( @@ -141,10 +159,8 @@ export default class PipelineImporter { /** * @function module:engine.PipelineImporter#applyEnvironmentOverrides - * * @param {object} yaml - yaml to insert environment overrides * @param {object} environmentOverrides - object representing overrides - * * @returns {object} yaml object with all overrides in place */ function applyEnvironmentOverrides(yaml, environmentOverrides) { diff --git a/src/engine/pipeline/PipelineImporter.test.js b/src/engine/pipeline/PipelineImporter.test.js index 1e7c4c8c..03dee57b 100644 --- a/src/engine/pipeline/PipelineImporter.test.js +++ b/src/engine/pipeline/PipelineImporter.test.js @@ -1,28 +1,119 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* eslint-disable key-spacing */ /* global describe, expect */ - +import {ValidateError, _} from '../../common/index.js'; import Environment from '../environment/Environment.js'; import PipelineImporter from './PipelineImporter.js'; -import {ValidateError} from '../../common/index.js'; + describe('Pipeline tests:', () => { + afterEach(() => { + jest.clearAllMocks(); + jest.restoreAllMocks(); + }); + test('PipelineImporter substitutes $$ on path imports', async () => { const env = new Environment(); + const pipeline = {getEnvironment: () => env}; env.addEnvironmentVariable({'name': 'EXISTS', 'value': 'MAGIC'}); - const importer = new PipelineImporter({'path': '$$EXISTS/path', 'environment': env}); + const importer = new PipelineImporter({'path': '$$EXISTS/path', 'pipeline': pipeline}); expect(importer.path).toEqual('MAGIC/path'); }); test('PipelineImporter throws error when $$ is not substituted.', async () => { const env = new Environment(); + const pipeline = {getEnvironment: () => env}; expect(() => { - new PipelineImporter({'path': '$$NOT_EXIST/path', 'environment': env}); + new PipelineImporter({'path': '$$NOT_EXIST/path', 'pipeline': pipeline}); }).toThrow(ValidateError); }); + + test('PipelineImporter will propagate the http-authorization-token.', async () => { + const pipelineWithImport = jest.fn().mockImplementation(() => ` + version: 2.1 + imports: + library: + src: https://github.com/raw/ci/cix/foo.yaml + http-authorization-token: 1234 + pipeline: + - import: + - library.bar + `); + const firstImportedPipeline = jest.fn().mockImplementation(() => ` + imports: + common: + src: ./common.yaml + bar: + steps: + name: bar + pipeline: + - step: + name: step + image: alpine:3.9 + commands: + - echo hi + `); + const secondImportedPipeline = jest.fn().mockImplementation(() => ` + version: 2.1 + pipeline: + - step: + name: basic + image: alpine:3.9 + commands: + - hostname + `); + const env = new Environment(); + const pipeline = {getEnvironment: () => env, getPlugins: () => []}; + const importer = new PipelineImporter({'path': 'mockPath.yaml', 'pipeline': pipeline}); + + jest.spyOn(_, 'fetch').mockImplementationOnce(pipelineWithImport); + jest.spyOn(_, 'fetch').mockImplementationOnce(firstImportedPipeline); + jest.spyOn(_, 'fetch').mockImplementationOnce(secondImportedPipeline); + await importer.load(); + + // the original pipeline will not have the authToken + expect(pipelineWithImport.mock.calls[0][2]).toBe(undefined); + // imported pipeline should have a authToken attached + expect(firstImportedPipeline.mock.calls[0][2]).toBe(1234); + // imports of imports should also get it... + expect(secondImportedPipeline.mock.calls[0][2]).toBe(1234); + }); + + test('PipelineImporter will run preprocessor on all files(including imports).', async () => { + const pipelineWithImport = jest.fn().mockImplementation(() => ` + version: 2.1 + imports: + basic: + src: basic.yaml + pipeline: + - import: + - basic.bar + `); + const importedPipeline = jest.fn().mockImplementation(() => ` + bar: + steps: + name: basic-hi + pipeline: + - step: + name: basic-hi + image: alpine:3.9 + commands: + - echo hi + `); + const env = new Environment(); + const mockPlugin = {runPreprocessor: (exec, input) => input.replace(/basic/g, 'advanced'), getId: () => 'plugin-abc-123'}; + const pipeline = {getEnvironment: () => env, getPlugins: () => [mockPlugin], getExec: () => {}, getId: () => 'abc-123'}; + const importer = new PipelineImporter({'path': 'mockPath.yaml', 'pipeline': pipeline}); + + jest.spyOn(_, 'fetch').mockImplementationOnce(pipelineWithImport); + jest.spyOn(_, 'fetch').mockImplementationOnce(importedPipeline); + const result = await importer.load(); + expect(result['imports']['advanced']['src']).toBe('advanced.yaml'); + expect(result['pipeline'][0]['steps']['pipeline'][0]['steps']['name']).toBe('advanced-hi'); + }); }); diff --git a/src/engine/pipeline/PipelineNode.js b/src/engine/pipeline/PipelineNode.js index 4e11bdeb..b5b81866 100644 --- a/src/engine/pipeline/PipelineNode.js +++ b/src/engine/pipeline/PipelineNode.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -24,7 +24,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getName * @description Gets the name of the pipeline node. - * * @returns {string} The name of the node. */ getName() { @@ -34,7 +33,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getParent * @description Gets the parent of the pipeline item. - * * @returns {object} A reference to the parent. */ getParent() { @@ -44,7 +42,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getPipeline * @description Gets a pipeline reference. - * * @returns {object} A reference to the pipeline. */ getPipeline() { @@ -62,7 +59,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getType * @description Gets the type of the pipeline. - * * @returns {string} The type of pipeline. */ getType() { @@ -72,7 +68,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getDescendants * @description Returns a list of steps that can be iterated through. - * * @param {boolean} includeSelf - includes itself in the result. * @returns {Array} The list of descendants and current node. */ @@ -93,7 +88,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getErrors * @description Returns the errors associated with this pipeline's individual steps. - * * @returns {Array} An array of strings representing any errors found in the payload. */ getErrors() { @@ -103,7 +97,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getEnvironment * @description Returns a reference to the environment object. - * * @returns {object} A reference to the environment object. */ getEnvironment() { @@ -113,7 +106,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getExpectedEnvironment * @description Returns an array of keys representing env the step expects. - * * @returns {Array} Keys of the environment variables the step can use. */ getExpectedEnvironment() { @@ -129,7 +121,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getExec * @description Sets a new execution runtime. - * * @returns {object} The execution runtime. */ getExec() { @@ -139,7 +130,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getWorkspacePath * @description Gets the workspace path for the pipeline. - * * @returns {string} A string representation of the workspace path. */ getWorkspacePath() { @@ -149,7 +139,6 @@ export default class PipelineNode { /** * @function module:engine.PipelineNode#getRegistries * @description Gets the registry information defined for the pipeline. - * * @returns {object} An object representation of the registries. */ getRegistries() { @@ -159,7 +148,6 @@ export default class PipelineNode { /** * @function module:engine.Step#start * @description Starts a step. - * * @async */ async start() { diff --git a/src/engine/pipeline/Step.js b/src/engine/pipeline/Step.js index 94102763..b816c5fd 100644 --- a/src/engine/pipeline/Step.js +++ b/src/engine/pipeline/Step.js @@ -1,14 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ExecutionError, Provider, _} from '../../common/index.js'; +import {ExecutionError, Logger, Provider, ValidateError, _} from '../../common/index.js'; import Pipeline from './Pipeline.js'; import PipelineNode from './PipelineNode.js'; import StepDecorators from './decorators/StepDecorators.js'; -import log from 'winston'; +import Steps from './Steps.js'; // Static constant for Step Status const STATUS = { @@ -32,6 +32,7 @@ export default class Step extends PipelineNode { constructor(definition, parentItem) { super(definition, parentItem); this.type = 'Step'; + this.originalDefinition = _.cloneDeep(definition); this.modifyDefinition(); this.validateDefinition(); this.status = Step.STATUS.ready; @@ -40,7 +41,6 @@ export default class Step extends PipelineNode { /** * @function module:engine.Step#getStatus * @description Returns the status of the Step. - * * @returns {string} The status of the Step. */ getStatus() { @@ -50,36 +50,63 @@ export default class Step extends PipelineNode { /** * @function module:engine.Step#setStatus * @description Sets the status of the Step. - * * @param {string} status - The status of the Step. */ setStatus(status) { if (!_.includes(_.keys(Step.STATUS), status)) { throw new ExecutionError(`'${status}' is not a valid status for a Step.`); } - log.silly(`Changing ${this.getName()} status to '${status}'.`); + Logger.silly(`Changing ${this.getName()} status to '${status}'.`, this.getPipeline().getId()); this.status = status; } /** * @function module:engine.Step#start * @description Starts a step. - * * @async */ async start() { - log.debug(`Preparing step '${this.definition.name}'`); + // Substitute $$ for for-each and loop + if (!_.isNil(this.definition['loop']) && String(this.definition['loop']).includes('$$')) { + this.definition['loop'] = this.getEnvironment().replace$$Values(this.definition['loop']); + if (this.definition['loop'].includes('$$')) { + Logger.debug(`Step ${this.definition.name} has un-substituted loop, skipping...`, this.getPipeline().getId()); + return; + } + } + if (!_.isNil(this.definition['for-each']) && String(this.definition['for-each']).includes('$$')) { + this.definition['for-each'] = this.getEnvironment().replace$$Values(this.definition['for-each']); + if (this.definition['for-each'].includes('$$')) { + Logger.debug(`Step ${this.definition.name} has un-substituted for-each, skipping...`, this.getPipeline().getId()); + return; + } + } + + if (!_.isNil(this.definition['loop']) || !_.isNil(this.definition['for-each']) ) { + await this.loopingStart(); + } else { + await this.regularStart(); + } + } + + /** + * @function module.engine.Steps#regularStart + * @description Transforms step into a steps for looping + * @async + */ + async regularStart() { + Logger.debug(`Preparing step ${this.definition.name}`, this.getPipeline().getId()); if (this.definition.name.includes('.')) { - log.warn(`Step name '${this.definition.name}' contains the period (.) character which can interfere with hostname lookups.`); + Logger.warn(`Step name '${this.definition.name}' contains the period (.) character which can interfere with hostname lookups.`, this.getPipeline().getId()); } while (this.getPipeline().getStatus() === Pipeline.STATUS.paused) { - log.info('Pipeline is paused, waiting for status to change...'); + Logger.info('Pipeline is paused, waiting for status to change...', this.getPipeline().getId()); await this.getPipeline().awaitStatusChange(); } let promiseProvider = Provider.fromFunction(async () => { - await this.getExec().runStep(this.definition); + await this.getExec().runStep(_.cloneDeep(this.definition)); }); try { @@ -87,16 +114,16 @@ export default class Step extends PipelineNode { promiseProvider = decorator(this.definition, this, promiseProvider); }); if (this.status === Step.STATUS.skipped) { // allow for skipped runs to remain so - log.info(`Skipping step '${this.definition.name}'`); + Logger.info(`Skipping step ${this.definition.name}`, this.getPipeline().getId()); } else { - log.info(`Starting step '${this.definition.name}'`); + Logger.info(`Starting step ${this.definition.name}`, this.getPipeline().getId()); this.setStatus(Step.STATUS.running); await promiseProvider.get(); - log.info(`Successfully completed step '${this.definition.name}'`); + Logger.info(`Successfully completed step ${this.definition.name}`, this.getPipeline().getId()); this.setStatus(Step.STATUS.successful); } } catch (error) { - log.warn(`Failed on executing step '${this.definition.name}': ${error}`); + Logger.warn(`Failed while executing step ${this.definition.name}`, this.getPipeline().getId()); this.setStatus(Step.STATUS.failed); _.each(this.getPipeline().getRemainingSteps(), (step) => { // set all remaining steps to `skipped` if a step fails if (step.getStatus() === Step.STATUS.ready) { @@ -107,12 +134,98 @@ export default class Step extends PipelineNode { } finally { // pause on the execution of the next step if (this.getPipeline().getBreakpoint() == this.definition.name) { - log.info(`Pausing pipeline after running step '${this.definition.name}'.`); + Logger.info(`Pausing pipeline after running step ${this.definition.name}.`, this.getPipeline().getId()); this.getPipeline().pause(); } } } + + /** + * @function module.engine.Steps#loopingStart + * @description Transforms step into a steps for looping + * @async + */ + async loopingStart() { + const isLoop = !_.isNil(this.definition['loop']); + const isForEach = !_.isNil(this.definition['for-each']); + + if ( isLoop && isForEach ) { + throw new ValidateError('Cannot use both for-each and loop within the same step.'); + } + let loopCount = 0; + let listOfElements = []; + let counterVariableName; + let elementVariableName; + + if (isLoop) { + // traditional loop with or without counter + loopCount = _.toInteger(this.definition.loop); + } else { + // for-each style loop with element variable + if (_.isArray(this.definition['for-each'])) { + listOfElements = this.definition['for-each']; + } else if (this.definition['for-each'].includes(',')) { + listOfElements = this.definition['for-each'].split(','); + } else if (!_.isEmpty(this.definition['for-each'])) { + // single element list, convert to array + listOfElements = [this.definition['for-each']]; + } else { + // empty element list, return + return; + } + // remove empty elements + listOfElements = listOfElements.filter((element) => !_.isEmpty(element)); + loopCount = listOfElements.length; + if (_.isNil(this.definition['element-variable'])) { + throw new ValidateError('for-each must include an element-variable definition.'); + } + elementVariableName = this.definition['element-variable']; + } + + // counter-variable can be used with either type of loop + if (!_.isNil(this.definition['counter-variable'])) { + counterVariableName = this.definition['counter-variable']; + } + + // build new step definition that we'll use within the loop + const newStepsDefinition = { + name: this.originalDefinition.name, + parallel: this.originalDefinition.parallel, + when: this.originalDefinition.when, + pipeline: [], + }; + + // When we loop, we convert the Step into a Steps + for (let i = 0; i < loopCount; i++) { + const newStep = { + step: _.cloneDeep(this.originalDefinition), + }; + + newStep.step.environment = _.cloneDeep(this.originalDefinition.environment); + if (_.isNil(newStep.step.environment)) { + newStep.step.environment = []; + } + if (isForEach) { + newStep.step.environment.push({'name': elementVariableName, 'value': listOfElements[i]}); + } + if (!_.isNil(counterVariableName)) { + newStep.step.environment.push({'name': counterVariableName, 'value': i + 1}); + } + // Delete these the looping from the new step so we don't recurse + delete newStep.step['loop']; + delete newStep.step['for-each']; + delete newStep.step['parallel']; + delete newStep.step['when']; + newStep.step['name'] = `${newStep.step['name']}-${i + 1}`; + newStepsDefinition.pipeline.push(newStep); + } + + const newSteps = new Steps(newStepsDefinition, this.getParent()); + this.getParent().updateDecentant(this, newSteps); + await newSteps.start(); + } + /** * @function module:engine.Step#validateDefinition */ @@ -179,6 +292,14 @@ export default class Step extends PipelineNode { this.errors.push(`Yaml: step contains invalid pull-policy '${this.definition['pull-policy']}' (allowed: Default, Always, IfNotPresent, Never): ${this.definition.name}`); } } + + if (this.definition['commands-output']) { + if (this.definition['commands-output'] !== 'echo' && + this.definition['commands-output'] !== 'minimal' && + this.definition['commands-output'] !== 'timestamp') { + this.errors.push(`Yaml: step contains invalid commands-output '${this.definition['commands-output']}' (allowed: minimal, echo, timestamp)`); + } + } } /** @@ -242,7 +363,6 @@ export default class Step extends PipelineNode { /** * @function module:engine.Step#STATUS * @description Allow access to STATUS statically - * * @returns {object} A dictionary of statuses. */ static get STATUS() { diff --git a/src/engine/pipeline/Step.test.js b/src/engine/pipeline/Step.test.js index fb8db2f3..c780d2b4 100644 --- a/src/engine/pipeline/Step.test.js +++ b/src/engine/pipeline/Step.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -209,3 +209,245 @@ describe('Step tests:', () => { expect(step.definition['workspace-mount-point']).toEqual('/tmp/project'); }); }); + +describe('Step looping tests:', () => { + let emptyPipeline; + let pipelineRootNode; + const mockRunStep = jest.fn().mockResolvedValue(); + + beforeEach( async () => { + emptyPipeline = new Pipeline({rawPipeline: {version: 2.1, pipeline: [{}]}}); + await emptyPipeline.loadAndValidate(); + emptyPipeline.getExec = jest.fn().mockImplementation(() => { + return {runStep: mockRunStep, constructor: {name: 'Pipeline'}}; + }); + pipelineRootNode = emptyPipeline.getPipelineTreeRoot(); + jest.clearAllMocks(); + }); + + test('Step:getParent(): returns parent.', () => { + const step = new Step({ + name: 'basic', + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + expect(step.getParent()).toEqual(pipelineRootNode); + }); + + test('Step:start(): loop will run step n times.', async () => { + const step = new Step({ + name: 'basic', + loop: 3, + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(3); + }); + + test('Step:start(): loop will not run if count is 0.', async () => { + const step = new Step({ + name: 'basic', + loop: 0, + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(0); + }); + + test('Step:start(): loop with count 1 is fine.', async () => { + const step = new Step({ + name: 'basic', + loop: 1, + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(1); + }); + + test('Step:start(): loop only takes numeric values.', async () => { + const step = new Step({ + name: 'basic', + loop: 'a', + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(0); + }); + + test('Step:start(): loop quoted strings work.', async () => { + const step = new Step({ + name: 'basic', + loop: '2', + image: 'alpine:3.9', + commands: ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(2); + }); + + test('Step:start(): counter-variable can be set.', async () => { + const step = new Step({ + 'name': 'basic', + 'loop': '2', + 'counter-variable': 'COUNTER', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls[0][0].environment).toEqual([{name: 'COUNTER', value: '1'}]); + expect(mockRunStep.mock.calls[1][0].environment).toEqual([{name: 'COUNTER', value: '2'}]); + }); + + test('Step:start(): cannot use for-each and loop.', async () => { + const step = new Step({ + 'name': 'basic', + 'loop': '2', + 'for-each': 'a,b,c,d', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + try { + await step.start(); + } catch (err) { + expect(err.message).toEqual('Cannot use both for-each and loop within the same step.'); + } + expect(mockRunStep.mock.calls.length).toEqual(0); + expect.assertions(2); + }); + + test('Step:start(): for-each will run CSV list.', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': 'a,b,c,d', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(4); + }); + + test('Step:start(): for-each will accept a single value', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': 'a', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(1); + }); + + test('Step:start(): for-each with no value will not run', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': '', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(0); + }); + + test('Step:start(): for-each with no value will not run (as YAML)', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': [''], + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(0); + }); + + test('Step:start(): for-each can use counter-variable', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': ['a', 'b'], + 'element-variable': 'e', + 'counter-variable': 'c', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls[0][0].environment).toEqual([{name: 'e', value: 'a'}, {name: 'c', value: '1'}]); + expect(mockRunStep.mock.calls[1][0].environment).toEqual([{name: 'e', value: 'b'}, {name: 'c', value: '2'}]); + expect(mockRunStep.mock.calls.length).toEqual(2); + }); + + test('Step:start(): for-each will run YAML list.', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': ['a', 'b', 'c', 'd'], + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(4); + }); + + test('Step:start(): for-each will ignore trailing comma.', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': 'a,b,c,', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(3); + }); + + + test('Step:start(): for-each must provide element-variable.', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': 'a,b,c,d', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + try { + await step.start(); + } catch (err) { + expect(err.message).toMatch('for-each must include an element-variable definition.'); + } + expect(mockRunStep.mock.calls.length).toEqual(0); + expect.assertions(2); + }); + + test('Step:start(): for-each will substitute and environment variable.', async () => { + emptyPipeline.getEnvironment().addEnvironmentVariable({name: 'LOOP', value: 'a,b,c'}); + const step = new Step({ + 'name': 'basic', + 'for-each': '$$LOOP', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(3); + }); + + test('Step:start(): for-each with un-substituted variable will skip.', async () => { + const step = new Step({ + 'name': 'basic', + 'for-each': '$$LOOP', + 'element-variable': 'i', + 'image': 'alpine:3.9', + 'commands': ['hostname'], + }, pipelineRootNode); + + await step.start(); + expect(mockRunStep.mock.calls.length).toEqual(0); + }); +}); diff --git a/src/engine/pipeline/Steps.js b/src/engine/pipeline/Steps.js index 4c8a63f4..21a62201 100644 --- a/src/engine/pipeline/Steps.js +++ b/src/engine/pipeline/Steps.js @@ -1,16 +1,14 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {Provider, _} from '../../common/index.js'; +import {Logger, Provider, _} from '../../common/index.js'; import Pipeline from './Pipeline.js'; import PipelineNode from './PipelineNode.js'; import Step from './Step.js'; import StepsDecorators from './decorators/StepsDecorators.js'; -import log from 'winston'; - /** * @namespace Steps */ @@ -18,7 +16,6 @@ export default class Steps extends PipelineNode { /** * @class module:engine.Steps#constructor * @description Steps Constructor. - * * @param {object} definition - JSON representation of the Step group. * @param {string} parentItem - A reference to it's parent. */ @@ -26,7 +23,7 @@ export default class Steps extends PipelineNode { super(definition, parentItem); this.type = 'Steps'; this.descendants = []; - log.silly(`Steps: RAW payload: ${JSON.stringify(definition, ' ', 2)}`); + Logger.silly(`Steps: RAW payload: ${JSON.stringify(definition, ' ', 2)}`, this.getPipeline().getId()); if (definition.parallel) { this.parallel = definition.parallel; } else { @@ -41,19 +38,26 @@ export default class Steps extends PipelineNode { _.forEach(definition.pipeline, (item) => { if (item.steps) { - log.silly(`Steps: Pushing new Steps item: ${JSON.stringify(item.steps, ' ', 2)}`); + Logger.silly(`Steps: Pushing new Steps item: ${JSON.stringify(item.steps, ' ', 2)}`, this.getPipeline().getId()); this.descendants.push(new Steps(item.steps, this)); } else if (item.step !== undefined) { - log.silly(`Steps: Pushing new Step item: ${JSON.stringify(item.step, ' ', 2)}`); + Logger.silly(`Steps: Pushing new Step item: ${JSON.stringify(item.step, ' ', 2)}`, this.getPipeline().getId()); this.descendants.push(new Step(item.step, this)); } }); } + updateDecentant(oldChild, newChild) { + const index = this.descendants.indexOf(oldChild); + + if (index !== -1) { + this.descendants[index] = newChild; + } + } + /** * @function module:engine.Steps#isParallel * @description Returns the parallel property. - * * @returns {boolean} Whether the Steps descendants will run in parallel. */ isParallel() { @@ -63,7 +67,6 @@ export default class Steps extends PipelineNode { /** * @function module:engine.Steps#setStatus * @description Sets the status of all Step type descendants - * * @param {string} status - The status of the Step. */ setStatus(status) { @@ -75,12 +78,11 @@ export default class Steps extends PipelineNode { /** * @function module:engine.Steps#start * @description Starts a step grouping. - * * @async */ async start() { while (this.getPipeline().getStatus() === Pipeline.STATUS.paused) { - log.info('Pipeline is paused, waiting for status to change...'); + Logger.info('Pipeline is paused, waiting for status to change...', this.getPipeline().getId()); await this.getPipeline().awaitStatusChange(); } @@ -91,7 +93,7 @@ export default class Steps extends PipelineNode { const descendants = this.descendants; if (_.find(descendants, {'name': currentBreakpoint})) { // Changing breakpoint scope to be the entire step. - log.warn(`Target step '${this.getPipeline().getBreakpoint()}' within parallel grouping. Running all grouped steps.`); + Logger.warn(`Target step ${this.getPipeline().getBreakpoint()} is within a parallel group. Running all grouped steps.`, this.getPipeline().getId()); this.getPipeline().setBreakpoint(this.name); } const finished = []; @@ -113,24 +115,24 @@ export default class Steps extends PipelineNode { promiseProvider = decorator(this.definition, this, promiseProvider); }); if (!_.isEmpty(this.name)) { - log.info(`Starting ${this.parallel ? 'parallel' : 'serial'} step group '${this.name}'`); + Logger.info(`Starting ${this.parallel ? 'parallel' : 'serial'} step group ${this.name}`, this.getPipeline().getId()); } await promiseProvider.get(); if (!_.isEmpty(this.name)) { - log.info(`Successfully completed step group '${this.definition.name}'`); + Logger.info(`Successfully completed step group ${this.definition.name}`, this.getPipeline().getId()); } } catch (error) { if (!_.isEmpty(this.name)) { - log.warn(`Failed on executing step group '${this.definition.name}'`); + Logger.warn(`Failed while executing step group ${this.definition.name}`, this.getPipeline().getId()); } throw error; } // pause on the execution of the next step if (this.getPipeline().getBreakpoint() == this.definition.name) { - log.info(`Pausing pipeline after running steps '${this.definition.name}'.`); + Logger.info(`Pausing pipeline after running steps ${this.definition.name}.`, this.getPipeline().getId()); this.getPipeline().pause(); } - log.debug(`Step group '${this.name}' is complete`); + Logger.debug(`Step group ${this.name} is complete`, this.getPipeline().getId()); } } } diff --git a/src/engine/pipeline/Steps.test.js b/src/engine/pipeline/Steps.test.js index c612dc00..84dc4d12 100644 --- a/src/engine/pipeline/Steps.test.js +++ b/src/engine/pipeline/Steps.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -59,9 +59,9 @@ describe('Steps.modifyPayload', () => { pipelineRootNode = emptyPipeline.getPipelineTreeRoot(); }); - test('Parallel steps get run in parallel.', async () => { - const steps = new Steps(_.cloneDeep(parallelGroup), pipelineRootNode); - expect(steps.isParallel()).toBeTruthy(); + test('serial steps get run in serial.', async () => { + const steps = new Steps(_.cloneDeep(serialGroup), pipelineRootNode); + expect(steps.isParallel()).toBeFalsy(); const descendants = steps.getDescendants(); expect(descendants.length).toBe(2); mockRunStep.mockImplementation(() => { @@ -69,16 +69,13 @@ describe('Steps.modifyPayload', () => { setTimeout(resolve, 1000); }); }); - const result = steps.start(); - expect(descendants.filter((descendant) => descendant.getStatus() === 'running').length).toBe(2); - jest.runAllTimers(); - await result; - expect(descendants.filter((descendant) => descendant.getStatus() === 'successful').length).toBe(2); + steps.start(); + expect(descendants.filter((descendant) => descendant.getStatus() === 'running').length).toBe(1); }); - test('serial steps get run in serial.', async () => { - const steps = new Steps(_.cloneDeep(serialGroup), pipelineRootNode); - expect(steps.isParallel()).toBeFalsy(); + test('Parallel steps get run in parallel.', async () => { + const steps = new Steps(_.cloneDeep(parallelGroup), pipelineRootNode); + expect(steps.isParallel()).toBeTruthy(); const descendants = steps.getDescendants(); expect(descendants.length).toBe(2); mockRunStep.mockImplementation(() => { @@ -86,8 +83,11 @@ describe('Steps.modifyPayload', () => { setTimeout(resolve, 1000); }); }); - steps.start(); - expect(descendants.filter((descendant) => descendant.getStatus() === 'running').length).toBe(1); + const result = steps.start(); + expect(descendants.filter((descendant) => descendant.getStatus() === 'running').length).toBe(2); + jest.advanceTimersByTime(2000); + await result; + expect(descendants.filter((descendant) => descendant.getStatus() === 'successful').length).toBe(2); }); test('Breakpoints in parallel block trigger all steps within that block.', async () => { diff --git a/src/engine/pipeline/decorators/ConditionDecorator.js b/src/engine/pipeline/decorators/ConditionDecorator.js index df04dbc9..0d486863 100644 --- a/src/engine/pipeline/decorators/ConditionDecorator.js +++ b/src/engine/pipeline/decorators/ConditionDecorator.js @@ -1,24 +1,23 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {Provider, _} from '../../../common/index.js'; +import {Logger, Provider, _} from '../../../common/index.js'; import Step from '../Step.js'; import YAML from 'js-yaml'; -import log from 'winston'; export default (payload, pipelineNode, promiseProvider) => { const environment = pipelineNode.getEnvironment(); if (!_.isNil(payload.when)) { _.check(_.isArray(payload.when), `On Step ${payload.name} 'when' has to be an array.`); - log.debug(`Checking 'when' conditions for ${payload.name}.`); - const condition = new StepCondition(payload, environment); + Logger.debug(`Checking 'when' conditions for ${payload.name}.`, pipelineNode?.getPipeline?.().getId()); + const condition = new StepCondition(payload, environment, pipelineNode?.getPipeline?.().getId()); condition.evaluate(); if (!condition.isValid()) { - log.debug(`Skipping ${payload.name} with ${condition.getNumFailedConditions()} failed conditions.`); + Logger.debug(`Skipping ${payload.name} with ${condition.getNumFailedConditions()} failed conditions.`, pipelineNode?.getPipeline?.().getId()); if (pipelineNode.getType() === 'Step' || pipelineNode.getType() === 'Steps') { pipelineNode.setStatus(Step.STATUS.skipped); // Steps will set all descendants } @@ -30,13 +29,11 @@ export default (payload, pipelineNode, promiseProvider) => { }; /** - * @function module:engine.ConditionDecorator#replace$$Values - * + * @function module:engine.ConditionDecorator#replace$$ * @param {object} obj - an object containing the key/value pairs to be replaced * @param {string} key - key of the obj containing values to be replaced * @param {object} environment - an object representing the passed environment * @param {string} defaultKey - default value for a given key - * * @returns {object} new object, contained replaced values */ function replace$$(obj, key, environment, defaultKey) { @@ -55,10 +52,8 @@ function replace$$(obj, key, environment, defaultKey) { /** * @function module:engine.ConditionDecorator#defaultExpansion - * * @param {object} condition - an object representing a conditional * @param {object} environment - an object representing the passed environment - * * @returns {object} an object representing the conditional */ function defaultExpansion(condition, environment) { @@ -71,9 +66,7 @@ function defaultExpansion(condition, environment) { /** * @function module:engine.ConditionDecorator#createSimpleEvaluator - * * @param {object} func - a function representing the evaluator - * * @returns {object} an object representing the simple evaluator */ function createSimpleEvaluator(func) { @@ -87,16 +80,75 @@ function createSimpleEvaluator(func) { /** * @function module:engine.ConditionDecorator#isSet - * * @param {object} value - an object to be checked - * * @returns {boolean} a bool representing whether value is set */ function isSet(value) { return !_.isNil(_.emptyToNull(value)); } -const EXISTS = { +/** + * @function module:engine.ConditionDecorator#includesAndNotEmpty + * @param {object} value - an object to be checked if other is an item of + * @param {object} other - an object to be checked if it is an item of other + * @returns {boolean} a bool representing whether other is an item of value + */ +function includesAndNotEmpty(value, other) { + // empty string is substring of every string, overriding this behavior + // empty string will not be included in non-empty strings + return (_.isEmpty(other.trim()) && !_.isEmpty(value.trim())) ? false : _.includes(value, other); +} + +/** + * @function module:engine.ConditionDecorator#regexMatch + * @param {string} value - an object to be matched + * @param {string} expressions - a delimited list of regular expressions to match + * @param {string} delimiter - the delimiter to use + * @returns {boolean} a boolean value whether the other comes within the regex pattern of value + */ +function regexMatch(value, expressions, delimiter) { + const regexPatterns = _.split(expressions, delimiter); + let regxp; + let matchString; + for (const pattern of regexPatterns) { + const trimmedPattern = pattern.trim(); + if (!_.isEmpty(trimmedPattern)) { + regxp = RegExp(trimmedPattern); + matchString = value.match(regxp); + } else { + return false; + } + if ( !_.isNull(regxp) && matchString && (matchString[0] == value) ) { + return true; + } + } + return false; +} + +const MatchEvaluator = { + expand: (condition, environment) => { + if (_.isNil(condition.delimiter)) { + condition.delimiter = ','; + } + return { + operator: condition.operator, + value: replace$$(condition, 'value', environment), + expressions: replace$$(condition, 'expressions', environment), + delimiter: replace$$(condition, 'delimiter', environment), + }; + }, + + isValid: (condition) => { + return regexMatch(condition.value, condition.expressions, condition.delimiter); + }, +}; + +const NotMatchEvaluator = { + expand: MatchEvaluator.expand, + isValid: _.negate(MatchEvaluator.isValid), +}; + +const ExistEvaluator = { expand: (condition, environment) => { return { operator: condition.operator, @@ -112,18 +164,17 @@ const EXISTS = { }, }; -const NOT_EXISTS = { - expand: EXISTS.expand, - isValid: _.negate(EXISTS.isValid), +const NotExistEvaluator = { + expand: ExistEvaluator.expand, + isValid: _.negate(ExistEvaluator.isValid), }; -const OREvaluator = { +const BooleanEvaluator = { expand: (condition, environment) => { return { operator: condition.operator, conditions: _.map(condition.conditions, (subCondition) => { const evaluator = EVALUATORS[subCondition.operator]; - if (!_.isNil(evaluator)) { return evaluator.expand(subCondition, environment); } else { @@ -132,6 +183,10 @@ const OREvaluator = { }), }; }, +}; + +const OREvaluator = { + __proto__: BooleanEvaluator, isValid: (condition) => { if (_.isEmpty(condition.conditions)) { @@ -152,6 +207,28 @@ const OREvaluator = { }, }; +const ANDEvaluator = { + __proto__: BooleanEvaluator, + + isValid: (condition) => { + if (_.isEmpty(condition.conditions)) { + return false; + } + + let passed = true; + + _.map(condition.conditions, (subCondition) => { + const evaluator = EVALUATORS[subCondition.operator]; + + if (!_.isNil(evaluator) && !evaluator.isValid(subCondition)) { + passed = false; + } + }); + + return passed; + }, +}; + const EVALUATORS = { EQ: createSimpleEvaluator(_.isEqual), NEQ: createSimpleEvaluator(_.negate(_.isEqual)), @@ -161,41 +238,45 @@ const EVALUATORS = { GT: createSimpleEvaluator(_.gt), LTE: createSimpleEvaluator(_.lte), LT: createSimpleEvaluator(_.lt), - INCLUDES: createSimpleEvaluator(_.includes), + INCLUDES: createSimpleEvaluator(includesAndNotEmpty), NOT_INCLUDES: createSimpleEvaluator(_.negate(_.includes)), STARTS_WITH: createSimpleEvaluator(_.startsWith), ENDS_WITH: createSimpleEvaluator(_.endsWith), - EXISTS, - NOT_EXISTS, + MATCHES: MatchEvaluator, + NOT_MATCHES: NotMatchEvaluator, + EXISTS: ExistEvaluator, + NOT_EXISTS: NotExistEvaluator, OR: OREvaluator, + AND: ANDEvaluator, }; export class StepCondition { - constructor(step, environment) { + constructor(step, environment, pipelineId) { this.step = step; this.environment = environment; this.numFailedConditions = -1; + this.pipelineId = pipelineId; } isValidCondition(condition) { if (_.isNil(condition.operator)) { - log.error(`Step ${this.step.name} has condition with no operator.`); + Logger.error(`Step ${this.step.name} has condition with no operator.`, this.pipelineId); return false; } const evaluator = EVALUATORS[condition.operator]; if (_.isNil(evaluator)) { - log.error(`Step ${this.step.name} has condition with invalid operator ${condition.operator}. Valid operators are ${_.keys(EVALUATORS)}.`); + Logger.error(`Step ${this.step.name} has condition with invalid operator ${condition.operator}. Valid operators are ${_.keys(EVALUATORS)}.`, this.pipelineId); return false; } const expandedCondition = evaluator.expand(condition, this.environment); if (!evaluator.isValid(expandedCondition)) { - log.debug(`Following condition is not met for ${this.step.name}:`); - log.debug(`Condition:\n${YAML.dump(condition)}`); - log.debug(`Expanded Condition:\n${YAML.dump(expandedCondition)}`); + Logger.debug(`Following condition is not met for ${this.step.name}:`, this.pipelineId); + Logger.debug(`Condition:\n${YAML.dump(condition)}`, this.pipelineId); + Logger.debug(`Expanded Condition:\n${YAML.dump(expandedCondition)}`, this.pipelineId); return false; } diff --git a/src/engine/pipeline/decorators/ConditionDecorator.test.js b/src/engine/pipeline/decorators/ConditionDecorator.test.js index e85be189..b4c7176f 100644 --- a/src/engine/pipeline/decorators/ConditionDecorator.test.js +++ b/src/engine/pipeline/decorators/ConditionDecorator.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -188,6 +188,61 @@ describe('ConditionDecorator:', () => { expect(response).not.toEqual(promiseProvider); }); + test('Returns provided promiseProvider on AND isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'AND', + 'conditions': [ + { + 'operator': 'EQ', // will succeed + 'value': 'foo', + 'other': 'foo', + }, + { + 'operator': 'EQ', // will succeed + 'value': 'bar', + 'other': 'bar', + }, + ], + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment()}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + test('Returns SimpleProvider Promise.resolve() on AND !isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'AND', + 'conditions': [ + { + 'operator': 'EQ', // will fail + 'value': 'foo', + 'other': 'bar', + }, + { + 'operator': 'EQ', // will succeed + 'value': 'bar', + 'other': 'bar', + }, + ], + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toBeInstanceOf(Provider.SimpleProvider); + expect(response).not.toEqual(promiseProvider); + }); + test('Returns provided promiseProvider on STARTS_WITH isValid.', () => { const payload = { 'when': [ @@ -257,4 +312,110 @@ describe('ConditionDecorator:', () => { expect(response).toBeInstanceOf(Provider.SimpleProvider); expect(response).not.toEqual(promiseProvider); }); + + test('Returns provided promiseProvider on MATCHES with CSV isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'MATCHES', + 'value': 'afalko/main-244', + 'expressions': 'foo,bar,afalko/main-\\d+', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + test('Returns provided promiseProvider on MATCHES with space delimiter isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'MATCHES', + 'value': 'afalko/main-244', + 'expressions': 'foo bar afalko/main-\\d+', + 'delimiter': ' ', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + test('Returns provided promiseProvider on MATCHES with wrong delimiter !isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'MATCHES', + 'value': 'afalko/main-244', + 'expressions': 'foo bar afalko/main-\\d+', + 'delimiter': '|', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).not.toEqual(promiseProvider); + }); + + test('Returns provided promiseProvider on MATCHES !isValid.', () => { + const payload = { + 'when': [ + { + 'operator': 'MATCHES', + 'value': 'foo,bar,release_*', + 'expressions': 'releases', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toBeInstanceOf(Provider.SimpleProvider); + expect(response).not.toEqual(promiseProvider); + }); + + test('Returns promiseProvider on MATCHES when valid .', () => { + const payload = { + 'when': [ + { + 'operator': 'MATCHES', + 'value': 'test/mainmainmain', + 'expressions': 'foo,bar,test/[main]*', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + + test('Returns promiseProvider on NOT_MATCHES when valid .', () => { + const payload = { + 'when': [ + { + 'operator': 'NOT_MATCHES', + 'value': 'test/mainmainmain', + 'expressions': 'foo,bar,test/[main]*', + }, + ], + }; + const parentNode = {'getEnvironment': () => new Environment(), 'getType': () => 'Step', 'setStatus': () => undefined}; + const promiseProvider = 'test'; + + const response = condition(payload, parentNode, promiseProvider); + expect(response).not.toEqual(promiseProvider); + }); }); diff --git a/src/engine/pipeline/decorators/ContinueOnFailDecorator.js b/src/engine/pipeline/decorators/ContinueOnFailDecorator.js new file mode 100644 index 00000000..e1b14718 --- /dev/null +++ b/src/engine/pipeline/decorators/ContinueOnFailDecorator.js @@ -0,0 +1,23 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +import {Logger, Provider, _} from '../../../common/index.js'; + + +export default (payload, pipelineNode, promiseProvider) => { + if (!_.isNil(payload['continue-on-fail']) && payload['continue-on-fail'] === true) { + return Provider.fromFunction(async () => { + try { + await promiseProvider.get(); + } catch (error) { + Logger.warn(`Step '${payload.name}' failed, but it is continue-on-fail.`, pipelineNode?.getPipeline?.().getId()); + Logger.debug(`${error}`, pipelineNode?.getPipeline?.().getId()); + } + }); + } else { + return promiseProvider; + } +}; diff --git a/src/engine/pipeline/decorators/ContinueOnFailDecorator.test.js b/src/engine/pipeline/decorators/ContinueOnFailDecorator.test.js new file mode 100644 index 00000000..e429b026 --- /dev/null +++ b/src/engine/pipeline/decorators/ContinueOnFailDecorator.test.js @@ -0,0 +1,71 @@ +/* +* Copyright (c) 2022, salesforce.com, inc. +* All rights reserved. +* SPDX-License-Identifier: BSD-3-Clause +* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause +*/ +/* global describe, expect */ +import ContinueOnFailDecorator from './ContinueOnFailDecorator.js'; +import {Provider} from '../../../common/index.js'; + +describe('ConditionDecorator:', () => { + let continueOnFail; + + beforeEach(() => { + jest.clearAllMocks(); + continueOnFail = ContinueOnFailDecorator; + }); + + test('Step will skip continue-on-fail logic if not set.', () => { + const payload = { + 'name': 'test', + }; + const promiseProvider = 'test'; + + const response = continueOnFail(payload, undefined, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + test('Step will skip continue-on-fail logic if set to false.', () => { + const payload = { + 'name': 'test', + 'continue-on-fail': false, + }; + const promiseProvider = 'test'; + + const response = continueOnFail(payload, undefined, promiseProvider); + expect(response).toEqual(promiseProvider); + }); + + test('Step will continue-on-fail if no failure.', () => { + const payload = { + 'name': 'test', + 'continue-on-fail': true, + }; + + const testFunc = jest.fn().mockImplementation(() => { + return new Promise((resolve) => { + resolve(); + }); + }); + + continueOnFail(payload, undefined, new Provider.fromFunction(testFunc)).get(); + expect(testFunc.mock.calls.length).toEqual(1); + }); + + test('Step will continue-on-fail if failure.', () => { + const payload = { + 'name': 'test', + 'continue-on-fail': true, + }; + + const testFunc = jest.fn().mockImplementation(() => { + return new Promise((resolve, reject) => { + reject(new Error('Some Error')); + }); + }); + + continueOnFail(payload, undefined, new Provider.fromFunction(testFunc)).get(); + expect(testFunc.mock.calls.length).toEqual(1); + }); +}); diff --git a/src/engine/pipeline/decorators/LoopDecorator.js b/src/engine/pipeline/decorators/LoopDecorator.js deleted file mode 100644 index 5a8343cc..00000000 --- a/src/engine/pipeline/decorators/LoopDecorator.js +++ /dev/null @@ -1,24 +0,0 @@ -/* -* Copyright (c) 2020, salesforce.com, inc. -* All rights reserved. -* SPDX-License-Identifier: BSD-3-Clause -* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause -*/ -import {Provider, _} from '../../../common/index.js'; -import log from 'winston'; - -export default (payload, pipelineNode, promiseProvider) => { - if (!_.isNil(payload.loop)) { - _.check(_.isNumeric(payload.loop), `On Step ${payload.name} loop has to be an integer.`); - const loop = _.toInteger(payload.loop); - - return Provider.fromFunction(async () => { - for (let i = 0; i < loop; i++) { - log.info(`Iteration ${i + 1}/${loop} of ${payload.name}.`); - await promiseProvider.get(); - } - }); - } - - return promiseProvider; -}; diff --git a/src/engine/pipeline/decorators/LoopDecorator.test.js b/src/engine/pipeline/decorators/LoopDecorator.test.js deleted file mode 100644 index c9fe89f2..00000000 --- a/src/engine/pipeline/decorators/LoopDecorator.test.js +++ /dev/null @@ -1,48 +0,0 @@ -/* -* Copyright (c) 2020, salesforce.com, inc. -* All rights reserved. -* SPDX-License-Identifier: BSD-3-Clause -* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause -*/ -/* global describe, expect */ -import LoopDecorator from './LoopDecorator.js'; -import {Provider} from '../../../common/index.js'; - -describe('LoopDecorator tests:', () => { - let loop; - const providedFunction = jest.fn(); - - beforeEach(() => { - jest.clearAllMocks(); - loop = LoopDecorator; - }); - - test('LoopDecorator: Loops on length.', async () => { - const response = loop({loop: 3}, undefined, Provider.fromFunction(providedFunction)); - await response.get(); - expect(providedFunction.mock.calls.length).toEqual(3); - }); - - test('LoopDecorator: Returns provider if no loop.', async () => { - const provider = Provider.fromFunction(providedFunction); - const response = loop({}, undefined, provider); - expect(provider).toBe(response); - }); - - test('LoopDecorator: No loop executes once.', async () => { - const response = loop({}, undefined, Provider.fromFunction(providedFunction)); - await response.get(); - expect(providedFunction.mock.calls.length).toEqual(1); - }); - - test('LoopDecorator: Loop only takes numeric values.', async () => { - loop({loop: 'twice'}, undefined, Provider.fromFunction(providedFunction)); - expect(providedFunction.mock.calls.length).toEqual(0); - }); - - test('LoopDecorator: Quoted strings work.', async () => { - const response = loop({loop: '2'}, undefined, Provider.fromFunction(providedFunction)); - await response.get(); - expect(providedFunction.mock.calls.length).toEqual(2); - }); -}); diff --git a/src/engine/pipeline/decorators/ModifyDecorator.js b/src/engine/pipeline/decorators/ModifyDecorator.js index acf55113..73d946e7 100644 --- a/src/engine/pipeline/decorators/ModifyDecorator.js +++ b/src/engine/pipeline/decorators/ModifyDecorator.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -14,11 +14,9 @@ export default (payload, pipelineNode, promiseProvider) => { /** * Returns a step with additional properties provided by cli - * - * @function module:cix-yaml.Payload#getPayloadPromise + * @function module:engine.ModifyDecorator#modifyStep * @param {object} payload - json Object to add properties to * @param {object} environment - object of environment variables - * * @returns {object} json Object representation of a payload */ export function modifyStep(payload, environment) { @@ -32,6 +30,10 @@ export function modifyStep(payload, environment) { } payload.environment = _.map(payload.environment, (env) => { + if (!env.value) { + // if value is undefined, set it to $$env.name + env.value = '$$'.concat(env.name); + } let replacementValue = environment.replace$$Values(env.value); if (env.value && typeof env.value === 'string' && env.value.includes('$$') && env.value === replacementValue) { @@ -67,6 +69,10 @@ export function modifyStep(payload, environment) { payload.timeout = environment.replace$$Values(payload.timeout); } + if (payload['for-each']) { + payload['for-each'] = environment.replace$$Values(payload['for-each']); + } + payload.volumes = _.map(payload.volumes, (volume) => environment.replace$$Values(volume)); return payload; diff --git a/src/engine/pipeline/decorators/ModifyDecorator.test.js b/src/engine/pipeline/decorators/ModifyDecorator.test.js index 28c76265..69d567ed 100644 --- a/src/engine/pipeline/decorators/ModifyDecorator.test.js +++ b/src/engine/pipeline/decorators/ModifyDecorator.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -64,6 +64,40 @@ describe('ModifyDecorator:', () => { expect(payload).toEqual(expected); }); + test('Payload is modified with environment if environment only specifies name', () => { + const payload = { + 'environment': [ + { + 'name': 'foo', + }, + { + 'name': 'bar', + 'default': 'default_bar', + }, + ], + }; + const expected = { + 'environment': [ + { + 'name': 'foo', + 'value': 'foo', + }, + { + 'name': 'bar', + 'value': 'default_bar', // default provided above + }, + ], + 'volumes': [], + }; + const environment = new Environment(); + environment.addEnvironmentVariable({'name': 'foo', 'value': 'foo'}); + + const parentNode = {'getEnvironment': () => environment}; + + modify(payload, parentNode, undefined); + expect(payload).toEqual(expected); + }); + test('Payload is modified with retry substitutions.', () => { const payload = { 'retry': { diff --git a/src/engine/pipeline/decorators/RetryDecorator.js b/src/engine/pipeline/decorators/RetryDecorator.js index 8361592e..ac52aae0 100644 --- a/src/engine/pipeline/decorators/RetryDecorator.js +++ b/src/engine/pipeline/decorators/RetryDecorator.js @@ -1,11 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {Provider, _} from '../../../common/index.js'; -import log from 'winston'; +import {Logger, Provider, _} from '../../../common/index.js'; export default (payload, pipelineNode, promiseProvider) => { @@ -22,8 +21,8 @@ export default (payload, pipelineNode, promiseProvider) => { await promiseProvider.get(); break; } catch (error) { - log.warn(`Iteration ${i + 1}/${iterations} for ${payload.name} failed.`); - log.debug(`${error}`); + Logger.warn(`Iteration ${i + 1}/${iterations} for ${payload.name} failed.`, pipelineNode?.getPipeline?.().getId()); + Logger.debug(`${error}`, pipelineNode?.getPipeline?.().getId()); if ((i + 1) !== iterations) { await new Promise((resolve) => setTimeout(resolve, backoff * 1000)); } else { diff --git a/src/engine/pipeline/decorators/RetryDecorator.test.js b/src/engine/pipeline/decorators/RetryDecorator.test.js index 0b8b5854..cc91b035 100644 --- a/src/engine/pipeline/decorators/RetryDecorator.test.js +++ b/src/engine/pipeline/decorators/RetryDecorator.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/pipeline/decorators/StepDecorators.js b/src/engine/pipeline/decorators/StepDecorators.js index bb9c8716..dc5cc0f6 100644 --- a/src/engine/pipeline/decorators/StepDecorators.js +++ b/src/engine/pipeline/decorators/StepDecorators.js @@ -1,11 +1,11 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import ConditionDecorator from './ConditionDecorator.js'; -import LoopDecorator from './LoopDecorator.js'; +import ContinueOnFailDecorator from './ContinueOnFailDecorator.js'; import ModifyDecorator from './ModifyDecorator.js'; import RetryDecorator from './RetryDecorator.js'; import TimeoutDecorator from './TimeoutDecorator.js'; @@ -23,6 +23,6 @@ export default [ ValidateDecorator, TimeoutDecorator, RetryDecorator, - LoopDecorator, ConditionDecorator, + ContinueOnFailDecorator, ]; diff --git a/src/engine/pipeline/decorators/StepsDecorators.js b/src/engine/pipeline/decorators/StepsDecorators.js index bff36b25..6a9b4f20 100644 --- a/src/engine/pipeline/decorators/StepsDecorators.js +++ b/src/engine/pipeline/decorators/StepsDecorators.js @@ -1,11 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import ConditionDecorator from './ConditionDecorator.js'; -import LoopDecorator from './LoopDecorator.js'; import RetryDecorator from './RetryDecorator.js'; import TimeoutDecorator from './TimeoutDecorator.js'; @@ -19,6 +18,5 @@ import TimeoutDecorator from './TimeoutDecorator.js'; export default [ TimeoutDecorator, RetryDecorator, - LoopDecorator, ConditionDecorator, ]; diff --git a/src/engine/pipeline/decorators/TimeoutDecorator.js b/src/engine/pipeline/decorators/TimeoutDecorator.js index 5c6872e7..ff1b5021 100644 --- a/src/engine/pipeline/decorators/TimeoutDecorator.js +++ b/src/engine/pipeline/decorators/TimeoutDecorator.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -15,8 +15,12 @@ export default (payload, pipelineNode, promiseProvider) => { setTimeout(() => { reject(new ExecutionError(`Step ${payload.name || 'NoName'} timed out after ${payload.timeout} seconds`)); }, _.toInteger(payload.timeout) * 1000); - const result = await promiseProvider.get(); - resolve(result); + try { + const result = await promiseProvider.get(); + resolve(result); + } catch (err) { + reject(err); + } }); }); } diff --git a/src/engine/pipeline/decorators/TimeoutDecorator.test.js b/src/engine/pipeline/decorators/TimeoutDecorator.test.js index 22e4e027..45cffd99 100644 --- a/src/engine/pipeline/decorators/TimeoutDecorator.test.js +++ b/src/engine/pipeline/decorators/TimeoutDecorator.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -41,4 +41,17 @@ describe('TimeoutDecorator tests:', () => { const response = timeout(payload, undefined, promiseProvider).get(); await expect(response).rejects.toEqual(new Error('Step NoName timed out after 1 seconds')); }); + + test('Function provided terminates immediately if there is an exception.', async () => { + const payload = {'timeout': '1'}; // 1 second timeout + const timeoutFunc = () => { + return new Promise((resolve, reject) => setTimeout(() => { + reject(new Error('Broken promises!')); + }, 20)); // will resolve after 20 milliseconds + }; + const promiseProvider = new Provider.fromFunction(timeoutFunc); + + const response = timeout(payload, undefined, promiseProvider).get(); + await expect(response).rejects.not.toEqual(new Error('Step NoName timed out after 1 seconds')); + }); }); diff --git a/src/engine/pipeline/decorators/ValidateDecorator.js b/src/engine/pipeline/decorators/ValidateDecorator.js index 14acedff..b29f48ec 100644 --- a/src/engine/pipeline/decorators/ValidateDecorator.js +++ b/src/engine/pipeline/decorators/ValidateDecorator.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/pipeline/decorators/ValidateDecorator.test.js b/src/engine/pipeline/decorators/ValidateDecorator.test.js index 47be8838..a232e7d8 100644 --- a/src/engine/pipeline/decorators/ValidateDecorator.test.js +++ b/src/engine/pipeline/decorators/ValidateDecorator.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/plugin/Plugin.js b/src/engine/plugin/Plugin.js index 38c50d58..c29ebd91 100644 --- a/src/engine/plugin/Plugin.js +++ b/src/engine/plugin/Plugin.js @@ -1,12 +1,11 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {PluginError, _} from '../../common/index.js'; +import {Logger, PluginError, _} from '../../common/index.js'; import ValidateService from '../ValidateService.js'; -import log from 'winston'; import {v4 as uuidv4} from 'uuid'; /** @@ -20,7 +19,7 @@ export default class Plugin { constructor(pluginSpec) { // Create an ID this.id = uuidv4(); - log.debug(`Generated ID for plugin: ${this.id}`); + Logger.debug(`Generated ID for plugin: ${this.id}`); if (pluginSpec && pluginSpec.pluginPath) { this.path = pluginSpec.pluginPath; } else { @@ -43,9 +42,9 @@ export default class Plugin { if (!_.isEmpty(errors)) { _.forEach(errors, (error) => { if (error.message) { - log.warn(`Error validating plugin schema: ${error.message}: ${(error.params) ? JSON.stringify(error.params) : ''} at ${error.dataPath}`); + Logger.warn(`Error validating plugin schema: ${error.message}: ${(error.params) ? JSON.stringify(error.params) : ''} at ${error.dataPath}`); } else { - log.warn(`${JSON.stringify(error)}`); + Logger.warn(`${JSON.stringify(error)}`); } }); throw new PluginError('Found errors loading and validating plugin.'); @@ -70,17 +69,17 @@ export default class Plugin { */ async runPreprocessor(exec, pipelineDefinition) { if (this.preprocessor) { - log.info(`Running preprocessor ${this.preprocessor.image} for plugin ${this.getId()}`); + Logger.info(`Running preprocessor ${this.preprocessor.image} for plugin ${this.getId()}`); const result = await exec.runPreprocessor(this.preprocessor.image, pipelineDefinition); if (result.status == 0) { pipelineDefinition = result.output; } else { - log.error(`The preprocessor has returned a non-zero exit code (${result.status}):\n${result.output}`); + Logger.error(`The preprocessor has returned a non-zero exit code (${result.status}):\n${result.output}`); throw new PluginError(`The preprocessor has returned a non-zero exit code (${result.status}).`); } } else { - log.debug(`Skipping preprocessor for plugin ${this.getId()}.`); + Logger.debug(`Skipping preprocessor for plugin ${this.getId()}.`); } return pipelineDefinition; } diff --git a/src/engine/plugin/Plugin.test.js b/src/engine/plugin/Plugin.test.js index 09ae4549..fd6912a5 100644 --- a/src/engine/plugin/Plugin.test.js +++ b/src/engine/plugin/Plugin.test.js @@ -1,14 +1,13 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* global jest, describe, expect */ -import {PluginError, _} from '../../common/index.js'; +import {Logger, PluginError, _} from '../../common/index.js'; import {DockerExec} from '../../docker/index.js'; import Plugin from './Plugin.js'; -import log from 'winston'; jest.autoMockOff(); @@ -64,7 +63,7 @@ describe('Plugin tests', () => { jest.spyOn(_, 'fetch').mockResolvedValue('version: 2.4\nkind: Plugin'); const plugin = new Plugin({pluginPath: '/blah'}); await plugin.loadAndValidate(); - const logDebug = jest.spyOn(log, 'debug').mockImplementation(); + const logDebug = jest.spyOn(Logger, 'debug').mockImplementation(); await plugin.runPreprocessor(); expect(logDebug.mock.calls[0][0]).toEqual(expect.stringMatching(/Skipping preprocessor/)); }); @@ -84,7 +83,7 @@ describe('Plugin tests', () => { }); test('runPreprocessor throws PluginError with result.output when exec.runPreprocessor returns a non-0 status', async () => { - const logError = jest.spyOn(log, 'error').mockImplementation(); + const logError = jest.spyOn(Logger, 'error').mockImplementation(); jest.spyOn(_, 'fetch').mockResolvedValue('version: 2.4\nkind: Plugin\npreprocessor:\n image: test:latest'); const plugin = new Plugin({pluginPath: '/blah'}); await plugin.loadAndValidate(); diff --git a/src/engine/runtime/AbstractExec.js b/src/engine/runtime/AbstractExec.js index f63da0e6..18412787 100644 --- a/src/engine/runtime/AbstractExec.js +++ b/src/engine/runtime/AbstractExec.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/runtime/ExecFactory.js b/src/engine/runtime/ExecFactory.js index 8b7e5723..44a738e5 100644 --- a/src/engine/runtime/ExecFactory.js +++ b/src/engine/runtime/ExecFactory.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/engine/validate/JsonSchemas.js b/src/engine/validate/JsonSchemas.js index 9f250bb9..f05457e5 100644 --- a/src/engine/validate/JsonSchemas.js +++ b/src/engine/validate/JsonSchemas.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -8,8 +8,8 @@ * @namespace JsonSchemas */ -export const masterSchema = { - '$id': 'master.json', +export const mainSchema = { + '$id': 'main.json', 'type': 'object', 'properties': { 'version': { @@ -63,10 +63,15 @@ export const stepSchema = { 'properties': { 'name': { 'type': ['string', 'number'], + 'maxLength': 64, + 'pattern': '^[a-zA-Z0-9][a-zA-Z0-9_.-]*$', }, 'image': { 'type': 'string', }, + 'user': { + 'type': ['string', 'number'], + }, 'pull-policy': { 'type': 'string', }, @@ -79,15 +84,18 @@ export const stepSchema = { 'arguments': { 'type': 'array', 'items': { - 'type': 'string', + 'type': ['string', 'number', 'boolean'], }, }, 'commands': { 'type': 'array', 'items': { - 'type': 'string', + 'type': ['string', 'number', 'boolean'], }, }, + 'commands-output': { + 'type': 'string', + }, 'commands-shell': { 'type': 'string', }, @@ -143,6 +151,18 @@ export const stepSchema = { 'type': ['integer', 'string'], 'minimum': 0, }, + 'counter-variable': { + 'type': 'string', + }, + 'for-each': { + 'type': ['string', 'array'], + }, + 'parallel': { + 'type': 'boolean', + }, + 'element-variable': { + 'type': 'string', + }, 'when': { 'type': 'array', 'items': { @@ -182,13 +202,13 @@ export const environmentSchema = { 'type': 'string', }, 'value': { - 'type': ['string', 'number'], + 'type': ['string', 'number', 'boolean'], }, 'default': { - 'type': ['string', 'number'], + 'type': ['string', 'number', 'boolean'], }, }, - 'required': ['name', 'value'], + 'required': ['name'], }; export const importSchema = { @@ -261,6 +281,12 @@ export const whenSchema = { 'other': { 'type': ['string', 'number', 'boolean'], }, + 'expressions': { + 'type': ['string', 'number', 'boolean'], + }, + 'delimiter': { + 'type': ['string', 'number', 'boolean'], + }, 'other-default': { 'type': ['string', 'number', 'boolean'], }, diff --git a/src/engine/validate/Validate.js b/src/engine/validate/Validate.js index a9fdf331..361036f9 100644 --- a/src/engine/validate/Validate.js +++ b/src/engine/validate/Validate.js @@ -1,10 +1,10 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {environmentSchema, importSchema, masterSchema, pipelineSchema, pluginSchema, preprocessorSchema, registrySchema, stepSchema, stepsSchema, whenSchema} from './JsonSchemas.js'; +import {environmentSchema, importSchema, mainSchema, pipelineSchema, pluginSchema, preprocessorSchema, registrySchema, stepSchema, stepsSchema, whenSchema} from './JsonSchemas.js'; import Ajv from 'ajv'; /** @@ -14,7 +14,6 @@ class Validate { /** * @function module:engine.Validate#validatePipelineSchema * @param {string} json - json Object to validate - * * @returns {Array} validation errors */ validatePipelineSchema(json) { @@ -22,18 +21,18 @@ class Validate { // TODO: Fix this Ajs 7.0.3 behavior when Jest supports ESModules natively without Babel if (Ajv.default) { ajv = new Ajv.default({ - schemas: [masterSchema, environmentSchema, importSchema, pipelineSchema, registrySchema, stepSchema, stepsSchema, whenSchema], + schemas: [mainSchema, environmentSchema, importSchema, pipelineSchema, registrySchema, stepSchema, stepsSchema, whenSchema], allErrors: true, allowUnionTypes: true, }); } else { ajv = new Ajv({ - schemas: [masterSchema, environmentSchema, importSchema, pipelineSchema, registrySchema, stepSchema, stepsSchema, whenSchema], + schemas: [mainSchema, environmentSchema, importSchema, pipelineSchema, registrySchema, stepSchema, stepsSchema, whenSchema], allErrors: true, allowUnionTypes: true, }); } - const validate = ajv.getSchema('master.json'); + const validate = ajv.getSchema('main.json'); validate(json); @@ -43,7 +42,6 @@ class Validate { /** * @function module:engine.Validate#validatePipelineSchema * @param {string} json - json Object to validate - * * @returns {Array} validation errors */ validatePluginSchema(json) { diff --git a/src/engine/validate/Validate.test.js b/src/engine/validate/Validate.test.js index 3a52b326..7b0438c2 100644 --- a/src/engine/validate/Validate.test.js +++ b/src/engine/validate/Validate.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -32,24 +32,24 @@ describe('Validate tests', () => { }); test('Missing parameter results in error', () => { - expect(Validate.validatePipelineSchema()).toMatchObject([{'message': 'should be object'}]); + expect(Validate.validatePipelineSchema()).toMatchObject([{'message': 'must be object'}]); }); test('Passing in a JSON string should result in an error.', () => { expect(Validate.validatePipelineSchema('{"version":2.1,"pipeline":[{"step":{"name":"basic","image":"alpine:3.9","commands":["hostname"]}}]}')) - .toMatchObject([{'message': 'should be object'}]); + .toMatchObject([{'message': 'must be object'}]); }); test('Missing version results in error', () => { const copy = _.cloneDeep(validPipeline); delete copy.version; - expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'should have required property \'version\''}]); + expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'must have required property \'version\''}]); }); test('Missing name results in error', () => { const copy = _.cloneDeep(validPipeline); delete copy.pipeline[0].step.name; - expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'should have required property \'name\''}]); + expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'must have required property \'name\''}]); }); test('Can accumulate more than one error.', () => { @@ -58,4 +58,16 @@ describe('Validate tests', () => { delete copy.pipeline[0].step.name; expect(Validate.validatePipelineSchema(copy).length).toBe(2); }); + + test('Space in name causes error.', () => { + const copy = _.cloneDeep(validPipeline); + copy.pipeline[0].step.name = 'invalid name'; + expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'must match pattern \"^[a-zA-Z0-9][a-zA-Z0-9_.-]*$\"'}]); + }); + + test('Really long name causes error.', () => { + const copy = _.cloneDeep(validPipeline); + copy.pipeline[0].step.name = 'this-is-a-very-long-step-name-it-should-not-be-over-64-characters'; + expect(Validate.validatePipelineSchema(copy)).toMatchObject([{'message': 'must NOT have more than 64 characters'}]); + }); }); diff --git a/src/index.js b/src/index.js index 97adb394..1a50c93e 100755 --- a/src/index.js +++ b/src/index.js @@ -1,3 +1,4 @@ #!/usr/bin/env node --no-warnings --experimental-modules import './cli/index.js'; + diff --git a/src/server/Server.js b/src/server/Server.js index d6ada6c3..4cdc0826 100644 --- a/src/server/Server.js +++ b/src/server/Server.js @@ -1,17 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {ServerError, _} from '../common/index.js'; +import {Logger, ServerError, _} from '../common/index.js'; import dirname from './dirname.cjs'; import express from 'express'; import fs from 'fs'; -import helmet from 'helmet'; import http from 'http'; import indexRouter from './routes/index.js'; -import log from 'winston'; import path from 'path'; import pipelineRouter from './routes/api/pipeline.js'; import pluginRouter from './routes/api/plugin.js'; @@ -27,14 +25,11 @@ export default class Server { port = 10030; } - log.debug('Starting CIX Server on port: ' + port); + Logger.debug('Starting CIX Server on port: ' + port); // Create app context this.loadRoutes(); - // secure express server - this.app.set(helmet()); - // Get port from environment and store in Express. this.port = this.normalizePort(port); this.app.set('port', this.port); @@ -51,7 +46,7 @@ export default class Server { async block() { // awaiting on a promise that will not resolve... - await new Promise(() => log.debug('Control-C to terminate...')); + await new Promise(() => Logger.debug('Control-C to terminate...')); } async close() { @@ -97,12 +92,12 @@ export default class Server { this.app.use((error, req, res, next) => { // render the error page if (res._headerSent) { - log.debug('Headers already sent. Unable to send error response to client for pipeline failure.'); + Logger.debug('Headers already sent. Unable to send error response to client for pipeline failure.'); } else { const status = error.status || error.errorCode || 500; - log.error(`${status} handling error ${error.message}`); + Logger.error(`${status} handling error ${error.message}`); if (error.stack) { - log.debug(`${error.stack}`); + Logger.debug(`${error.stack}`); } res.status(status); @@ -116,9 +111,7 @@ export default class Server { /** * @function module:server.Server#normalizePort - * * @param {object} port - port to normalize - * * @returns {number} normalized port */ normalizePort(port) { @@ -137,7 +130,6 @@ export default class Server { /** * @function module:server.Server#onError * @description Event listener for HTTP server "error" event. - * * @param {object} error - error object */ onError(error) { @@ -154,7 +146,6 @@ export default class Server { /** * @function module:server.Server#expressLogger - * * @param {object} req - request object * @param {object} res - response object * @param {object} next - function to be executed after logger @@ -167,10 +158,10 @@ export default class Server { if (remoteAddress == '1') { remoteAddress = '127.0.0.1'; } - log.debug(`${remoteAddress} ${req.method} ${req.originalUrl} received`); - log.silly(`Express Logger Message: ${JSON.stringify(req.body, null, 4)}`); + Logger.debug(`${remoteAddress} ${req.method} ${req.originalUrl} received`); + Logger.silly(`Express Logger Message: ${JSON.stringify(req.body, null, 4)}`); res.on('finish', function() { - log.debug(`${remoteAddress} ${req.method} ${req.originalUrl} ${res.statusCode}`); + Logger.debug(`${remoteAddress} ${req.method} ${req.originalUrl} ${res.statusCode}`); }); next(); } @@ -180,6 +171,6 @@ export default class Server { */ onListening() { const addr = this.server.address(); - log.info(`Swagger ready at http://localhost:${addr.port}/api-docs`); + Logger.info(`Swagger ready at http://localhost:${addr.port}/api-docs`); } } diff --git a/src/server/Server.test.js b/src/server/Server.test.js index e735c7d2..846ea3a3 100644 --- a/src/server/Server.test.js +++ b/src/server/Server.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -10,7 +10,7 @@ import {ServerError} from '../common/index.js'; import request from 'supertest'; // Testing only routes here which are not covered by specific route api classes -describe('Server.loadRoutes', async () => { +describe('Server.loadRoutes', () => { let server; const port = Math.floor(Math.random() * 64536) + 1000; diff --git a/src/server/index.js b/src/server/index.js index 254e0556..fe6d004e 100644 --- a/src/server/index.js +++ b/src/server/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/server/routes/api/asyncLogHandler.js b/src/server/routes/api/asyncLogHandler.js index 46963e82..7ccfe66d 100644 --- a/src/server/routes/api/asyncLogHandler.js +++ b/src/server/routes/api/asyncLogHandler.js @@ -4,14 +4,13 @@ * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -import {PipelineService} from '../../../engine/index.js'; +import {Logger} from '../../../common/index.js'; import {pipeline} from 'stream'; /** * This method does two things. First it wraps async functions sending errors to the * express error handler. Secondly it sets up and closes the log stream for compatible * requests. - * * @param {Function} callback - function to wrap with log handler * @returns {Function} - the function to return to the express router */ @@ -19,9 +18,7 @@ export default function asyncLogHandler(callback) { return function(req, res, next) { pipeLogStream(req, res); // catch(next) sends the error to the error handler - callback(req, res, next).finally(() => { - destroyLogStream(req); - }).catch(next); + callback(req, res, next).catch(next); }; } @@ -31,15 +28,6 @@ export default function asyncLogHandler(callback) { */ function pipeLogStream(req, res) { if (req.params['pipelineId'] && req.query['remoteLogs'] && req.query['remoteLogs'] == 'true') { - pipeline(PipelineService.getPipeline(req.params['pipelineId']).generateLogStream(), res, () => {}); - } -} - -/** - * @param {object} req - express request object - */ -function destroyLogStream(req) { - if (req.params['pipelineId'] && req.query['remoteLogs'] && req.query['remoteLogs'] == 'true') { - PipelineService.getPipeline(req.params['pipelineId']).destroyLogStream(); + pipeline(Logger.getPipelineLogger(req.params['pipelineId']).getRemoteReadableStream(), res, () => {}); } } diff --git a/src/server/routes/api/environment.js b/src/server/routes/api/environment.js index 138b1f52..b799a5e5 100644 --- a/src/server/routes/api/environment.js +++ b/src/server/routes/api/environment.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/server/routes/api/environment.test.js b/src/server/routes/api/environment.test.js index 5f0255ac..d75f934f 100644 --- a/src/server/routes/api/environment.test.js +++ b/src/server/routes/api/environment.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -8,7 +8,7 @@ import Server from '../../Server.js'; import request from 'supertest'; -describe('environment', async () => { +describe('environment', () => { let server; let pipelineId; const port = Math.floor(Math.random() * 64536) + 1000; @@ -76,6 +76,6 @@ describe('environment', async () => { test('getting all environment variables works as expected', async () => { const response = await request(server.app).get(`/api/pipeline/${pipelineId}/environment`); - expect(response.text).toBe('[\"test\",\"CIX_HOSTNAME\",\"CIX_EXECUTION_ID\",\"test2\",\"test3\"]'); + expect(response.text).toBe('[\"test\",\"CIX_HOSTNAME\",\"CIX_SERVER_PORT\",\"CIX_EXECUTION_ID\",\"test2\",\"test3\"]'); }); }); diff --git a/src/server/routes/api/pipeline.js b/src/server/routes/api/pipeline.js index 4285a0ae..f2dae234 100644 --- a/src/server/routes/api/pipeline.js +++ b/src/server/routes/api/pipeline.js @@ -1,15 +1,15 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ /* eslint-disable jsdoc/check-tag-names */ +import {Logger} from '../../../common/index.js'; import {PipelineService} from '../../../engine/index.js'; import asyncLogHandler from './asyncLogHandler.js'; import environment from './environment.js'; import express from 'express'; -import log from 'winston'; const pipeline = express.Router(); pipeline.use(environment); @@ -120,13 +120,18 @@ pipeline.post('/', asyncLogHandler(async (req, res) => { if (!pipelineSpec.environment) { pipelineSpec.environment = []; } - // Grab the host header, remove the port - let host = req.headers.host; - if (host.includes(':')) { - host = host.split(':')[0]; + // extract hostname and port from request + let host = 'locahost'; + let port = '10030'; + if (req.headers.host.includes(':')) { + host = req.headers.host.split(':')[0]; } - // Add the host as 'CIX_HOSTNAME' for the pipeline + if (req.headers.host.includes(':')) { + port = req.headers.host.split(':')[1]; + } + // Add the host as 'CIX_HOSTNAME' and 'CIX_SERVER_PORT' for the pipeline pipelineSpec.environment.push({name: 'CIX_HOSTNAME', value: host, type: 'internal'}); + pipelineSpec.environment.push({name: 'CIX_SERVER_PORT', value: port, type: 'internal'}); res.send(await PipelineService.addPipeline(pipelineSpec)); })); @@ -321,7 +326,10 @@ pipeline.post('/:pipelineId/alias/:pipelineAlias', asyncLogHandler(async (req, r * in: "path" * description: "Pipeline ID" * required: true - * type: string + * - name: "chainedStatus" + * in: "query" + * description: "Returns the status of chained pipeline" + * type: boolean * responses: * 200: * description: "successful operation" @@ -337,7 +345,11 @@ pipeline.post('/:pipelineId/alias/:pipelineAlias', asyncLogHandler(async (req, r * type: string */ pipeline.get('/:pipelineId/status', asyncLogHandler(async (req, res) => { - res.send({status: await PipelineService.getPipeline(req.params['pipelineId']).getStatus()}); + if (req.query.chainedStatus !== undefined && req.query.chainedStatus === 'true') { + res.send({status: await PipelineService.getPipeline(req.params['pipelineId']).getStatus(true)}); + } else { + res.send({status: await PipelineService.getPipeline(req.params['pipelineId']).getStatus()}); + } })); /** @@ -379,10 +391,10 @@ pipeline.get('/:pipelineId/status', asyncLogHandler(async (req, res) => { */ pipeline.get('/:pipelineId/start', asyncLogHandler(async (req, res) => { if (req.query.blocking === undefined || req.query.blocking === 'true') { - log.debug('Server blocking on pipeline execution.'); + Logger.debug('Server blocking on pipeline execution.'); await PipelineService.startPipeline(req.params['pipelineId']); } else { - log.debug('Server non-blocking on pipeline execution.'); + Logger.debug('Server non-blocking on pipeline execution.'); PipelineService.startPipeline(req.params['pipelineId']); } res.send(); @@ -464,10 +476,10 @@ pipeline.get('/:pipelineId/pause', asyncLogHandler(async (req, res) => { */ pipeline.get('/:pipelineId/resume', asyncLogHandler(async (req, res) => { if (req.query.blocking === undefined || req.query.blocking === 'true') { - log.debug('Server blocking on pipeline execution.'); + Logger.debug('Server blocking on pipeline execution.'); await PipelineService.resumePipeline(req.params['pipelineId'], req.query['step']); } else { - log.debug('Server non-blocking on pipeline execution.'); + Logger.debug('Server non-blocking on pipeline execution.'); PipelineService.resumePipeline(req.params['pipelineId'], req.query['step']); } res.send(); @@ -546,10 +558,10 @@ pipeline.get('/:pipelineId/kill', asyncLogHandler(async (req, res) => { */ pipeline.get('/:pipelineId/next-step', asyncLogHandler(async (req, res) => { if (req.query.blocking === undefined || req.query.blocking === 'true') { - log.debug('Server blocking on pipeline execution.'); + Logger.debug('Server blocking on pipeline execution.'); await PipelineService.nextStepInPipeline(req.params['pipelineId'], req.query['step']); } else { - log.debug('Server non-blocking on pipeline execution.'); + Logger.debug('Server non-blocking on pipeline execution.'); PipelineService.nextStepInPipeline(req.params['pipelineId'], req.query['step']); } res.send(); diff --git a/src/server/routes/api/pipeline.test.js b/src/server/routes/api/pipeline.test.js index 4d73d3b1..8a2ff56d 100644 --- a/src/server/routes/api/pipeline.test.js +++ b/src/server/routes/api/pipeline.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -10,7 +10,7 @@ import request from 'supertest'; jest.mock('../../../docker/DockerExec.js'); // TODO fix me - auto-mocking currently has issues with es6 modules -describe('environment', async () => { +describe('environment', () => { let server; const port = Math.floor(Math.random() * 64536) + 1000; diff --git a/src/server/routes/api/plugin.js b/src/server/routes/api/plugin.js index 0b9fca97..369994f3 100644 --- a/src/server/routes/api/plugin.js +++ b/src/server/routes/api/plugin.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/server/routes/api/plugin.test.js b/src/server/routes/api/plugin.test.js index c06236fc..8fda3ae1 100644 --- a/src/server/routes/api/plugin.test.js +++ b/src/server/routes/api/plugin.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -8,7 +8,7 @@ import Server from '../../Server.js'; import request from 'supertest'; -describe('plugin', async () => { +describe('plugin', () => { let server; const port = Math.floor(Math.random() * 64536) + 1000; diff --git a/src/server/routes/api/validate.js b/src/server/routes/api/validate.js index f2a2f479..c8956138 100644 --- a/src/server/routes/api/validate.js +++ b/src/server/routes/api/validate.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/server/routes/api/validate.test.js b/src/server/routes/api/validate.test.js index 84e3e9cf..6d97f540 100644 --- a/src/server/routes/api/validate.test.js +++ b/src/server/routes/api/validate.test.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause @@ -8,7 +8,7 @@ import Server from '../../Server.js'; import request from 'supertest'; -describe('validate', async () => { +describe('validate', () => { let server; const port = Math.floor(Math.random() * 64536) + 1000; @@ -105,8 +105,8 @@ describe('validate', async () => { .send(pipeline); expect(response.statusCode).toBe(400); - expect(response.text).toContain('should have required property \'name\''); - expect(response.text).toContain('should have required property \'image\''); + expect(response.text).toContain('must have required property \'name\''); + expect(response.text).toContain('must have required property \'image\''); }); test('succeeds when validating a valid plugin schema', async () => { @@ -139,6 +139,6 @@ describe('validate', async () => { .send(plugin); expect(response.statusCode).toBe(400); - expect(response.text).toContain('should NOT have additional properties'); + expect(response.text).toContain('must NOT have additional properties'); }); }); diff --git a/src/server/routes/index.js b/src/server/routes/index.js index e2d170d0..7d27a064 100644 --- a/src/server/routes/index.js +++ b/src/server/routes/index.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/src/server/routes/swagger.js b/src/server/routes/swagger.js index c95dbffd..8cec9292 100644 --- a/src/server/routes/swagger.js +++ b/src/server/routes/swagger.js @@ -1,5 +1,5 @@ /* -* Copyright (c) 2020, salesforce.com, inc. +* Copyright (c) 2022, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause diff --git a/test/integration/color.bats b/test/integration/color.bats new file mode 100644 index 00000000..d5822b0a --- /dev/null +++ b/test/integration/color.bats @@ -0,0 +1,51 @@ +#!/usr/bin/env bats +load '../../node_modules/bats-support/load' +load '../../node_modules/bats-assert/load' +load 'helper' + +function setup() { + check_port_not_used + configure_cix +} + +@test "CIX with TTY outputs color" { + run $CIX_SCRIPT_WITH_TTY exec -y docs/examples/basic.yaml + assert_success + assert_output --partial '[32mINFO' + assert_output --partial '[34mbasic' +} + +@test "CIX without TTY doesn't output color" { + run $CIX_SCRIPT exec -y docs/examples/basic.yaml + assert_success + refute_output --partial '[32mINFO' + refute_output --partial '[34mbasic' +} + +@test "CIX with TTY doesn't output color if --no-color specified" { + run $CIX_SCRIPT_WITH_TTY exec -y docs/examples/basic.yaml --no-color + assert_success + refute_output --partial '[32mINFO' + refute_output --partial '[34mbasic' +} + +@test "CIX without TTY outputs color if --color specified" { + run $CIX_SCRIPT exec -y docs/examples/basic.yaml --color + assert_success + assert_output --partial '[32mINFO' + assert_output --partial '[34mbasic' +} + +@test "CIX with TTY doesn't output color if both --color and --no-color specified" { + run $CIX_SCRIPT_WITH_TTY exec -y docs/examples/basic.yaml --color --no-color + assert_success + refute_output --partial '[32mINFO' + refute_output --partial '[34mbasic' +} + +@test "CIX without TTY doesn't output color if both --color and --no-color specified" { + run $CIX_SCRIPT exec -y docs/examples/basic.yaml --color --no-color + assert_success + refute_output --partial '[32mINFO' + refute_output --partial '[34mbasic' +} diff --git a/test/integration/general.bats b/test/integration/general.bats index 9a223cd7..1f5e5745 100644 --- a/test/integration/general.bats +++ b/test/integration/general.bats @@ -14,7 +14,6 @@ function setup() { assert_output --partial 'Successfully completed step' } - @test "CIX runs teardown on failure" { mkdir -p tmp cat << EOF > tmp/fail.yaml @@ -22,7 +21,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - exit 1 EOF @@ -31,7 +30,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - echo "Ran Teardown" EOF @@ -49,7 +48,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - | cat << EOAF > tmp/2.yaml @@ -57,7 +56,7 @@ pipeline: pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - echo "hi from generated 2.yaml" EOAF @@ -76,7 +75,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - this IS NOT valid YAML EOF @@ -94,7 +93,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - this IS valid YAML EOF @@ -111,7 +110,7 @@ version: 2.1 pipeline: - step: name: basic - image: alpine:3.9 + image: alpine:3 commands: - if [ -z $CIX_HOSTNAME ] || [ -z $CIX_STEP_NAME ] || [ -z $CIX_NETWORK_NAME ] || [ -z $CIX_CONTAINER_NAME ] || [ -z $CIX_EXECUTION_ID ]; then exit 1; fi EOF @@ -124,5 +123,442 @@ EOF @test "CIX imports work" { run $CIX_SCRIPT_WITH_TTY exec exec -y docs/examples/import/imports.yaml assert_success - assert_output --partial "Successfully completed step group 'full-pipeline'" + assert_output --partial "Successfully completed step group full-pipeline" +} + +@test "CIX runs pipeline as a specific user" { + mkdir -p tmp + cat << EOF > tmp/test-user.yaml +version: 2.1 +pipeline: + - step: + name: first + user: '299:924' + image: alpine:3 + commands: + - id -u + - id -g +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/test-user.yaml + assert_success + assert_output --partial '299' + assert_output --partial '924' + rm tmp/test-user.yaml + rmdir tmp || true +} + +@test "CIX backgrounded steps will log" { + mkdir -p tmp + cat << 'EOF' > tmp/background.yaml +version: 2.1 +pipeline: + - step: + name: basic + image: alpine:3.9 + background: true + commands-output: minimal + commands: + - echo "hello from background" + - step: + name: sleep + image: alpine:3.9 + commands-output: minimal + commands: + - sleep 1 + - echo "complete" +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/background.yaml + assert_output --partial 'hello from background' + rm tmp/background.yaml + rmdir tmp || true +} + +@test "CIX validate passes files with unquoted boolean and number literals for arguments and commands" { + mkdir -p tmp + cat << EOF > tmp/literals.yaml +version: 2.1 +pipeline: + - step: + name: basic-commands + image: alpine:3 + commands: + - true + - 42 + - step: + name: basic-args + image: alpine:3 + arguments: + - sh + - -c + - printf "%s\n" $* + - sh + - true + - 42 + - 3.14159 +EOF + run $CIX_SCRIPT_WITH_TTY validate -y tmp/literals.yaml + assert_success + rm tmp/literals.yaml + rmdir tmp || true +} + +@test "CIX runs pipelines with unquoted boolean and number literals for arguments" { + mkdir -p tmp + cat << 'EOF' > tmp/run-literals.yaml +version: 2.1 +pipeline: + - step: + name: basic-args + image: alpine:3 + arguments: + - sh + - -c + - printf "%s\n" $* + - sh + - true + - 42 + - 3.14159 +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/run-literals.yaml + assert_success + assert_output --partial 'true' + assert_output --partial '3.14159' + rm tmp/run-literals.yaml + rmdir tmp || true +} + +@test "CIX will loop using for-each" { + mkdir -p tmp + cat << 'EOF' > tmp/for-each.yaml +version: 2.1 +pipeline: + - step: + name: for-each + image: alpine:3.9 + for-each: + - element 1 + - element 2 + - element 3 + - element 4 + element-variable: ELEMENT + commands-output: minimal + commands: + - echo $ELEMENT +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/for-each.yaml + assert_success + assert_output --partial 'element 3' + rm tmp/for-each.yaml + rmdir tmp || true +} + +@test "CIX will for-each will contiunue-on-fail" { + mkdir -p tmp + cat << 'EOF' > tmp/for-each.yaml +version: 2.1 +pipeline: + - step: + name: for-each + image: alpine:3.9 + for-each: 'a,b,c' + element-variable: ELEMENT + commands-output: minimal + continue-on-fail: true + parallel: false + commands: + - | + if [ "$ELEMENT" == "a" ]; then + echo "fail $ELEMENT"; + exit 1; + else + echo "pass $ELEMENT" + fi +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/for-each.yaml + assert_success + assert_output --partial 'fail a' + assert_output --partial 'pass b' + assert_output --partial 'pass c' + rm tmp/for-each.yaml + rmdir tmp || true +} + +@test "CIX will for-each will retry" { + mkdir -p tmp + cat << 'EOF' > tmp/for-each.yaml +version: 2.1 +pipeline: + - step: + name: for-each + image: alpine:3.9 + for-each: 'a,b,c' + element-variable: ELEMENT + commands-output: minimal + parallel: true + retry: + iterations: 2 + backoff: 1 + commands: + - | + if [ "$ELEMENT" == "a" ]; then + echo "fail $ELEMENT"; + exit 1; + else + echo "pass $ELEMENT" + fi +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/for-each.yaml + assert_failure + assert_output --partial 'fail a' + assert_output --partial 'pass b' + assert_output --partial 'pass c' + rm tmp/for-each.yaml + rmdir tmp || true +} + +@test "CIX will continue-on-fail if timeout" { + mkdir -p tmp + cat << 'EOF' > tmp/continue-on-fail.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + continue-on-fail: true + timeout: 1 + commands: + - sleep 2 + - step: + name: second + image: alpine:3.9 + commands: + - echo "hello world" +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/continue-on-fail.yaml + assert_success + assert_output --partial 'failed, but it is continue-on-fail' + assert_output --partial 'hello world' + rm tmp/continue-on-fail.yaml + rmdir tmp || true +} + +@test "CIX will print last line if no newline" { + mkdir -p tmp + cat << 'EOF' > tmp/no-newline.yaml +version: 2.1 +pipeline: + - step: + name: newline-test + image: alpine:3.9 + commands: + - echo "first" + - echo -n "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/no-newline.yaml + assert_success + assert_output --partial 'second' + rm tmp/no-newline.yaml + rmdir tmp || true +} + +@test "CIX will log to files" { + mkdir -p tmp + cat << 'EOF' > tmp/files.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -l files -p tmp -y tmp/files.yaml + assert [ -e 'tmp/cix-execution.log' ] + first=(tmp/*first.log) + second=(tmp/*second.log) + assert [ -e "$first" ] + assert [ -e "$second" ] + rm tmp/*.log + rm tmp/files.yaml + rmdir tmp || true +} + +@test "CIX will log to single file" { + mkdir -p tmp + cat << 'EOF' > tmp/file.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -l file -p tmp -y tmp/file.yaml + assert [ -e 'tmp/cix-execution.log' ] + first=(tmp/*first.log) + second=(tmp/*second.log) + assert [ ! -e "$first" ] + assert [ ! -e "$second" ] + rm tmp/*.log + rm tmp/file.yaml + rmdir tmp || true +} + +@test "CIX can log to different filename" { + mkdir -p tmp + cat << 'EOF' > tmp/file.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -l file -L basic.log -p tmp -y tmp/file.yaml + assert [ -e 'tmp/basic.log' ] + assert [ ! -e 'tmp/cix-ececution.log' ] + rm tmp/*.log + rm tmp/file.yaml + rmdir tmp || true +} + +@test "CIX can silence logs" { + mkdir -p tmp + cat << 'EOF' > tmp/file.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec --silent + refute_output +} + +@test "CIX will exit non-zero on chained failed pipeline" { + mkdir -p tmp + cat << EOF > tmp/success.yaml +version: 2.1 +pipeline: + - step: + name: success + image: alpine:3 + commands: + - exit 0 +EOF + cat << EOF > tmp/failed.yaml +version: 2.1 +pipeline: + - step: + name: fail + image: alpine:3 + commands: + - exit 1 +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/success.yaml -y tmp/failed.yaml + assert_failure + rm tmp/failed.yaml tmp/success.yaml + rmdir tmp || true +} +@test "CIX valid regular expression running step" { + mkdir -p tmp + cat << 'EOF' > tmp/regex.yaml +version: 2.1 +pipeline: + - step: + name: regex-test-invalid + image: alpine:3.9 + when: + - operator: NOT_MATCHES + value: 'freeze,test,strata' + expressions: 'strata_\d+\.\d+' + delimiter: ',' + commands: + - echo "unsuccessful check" + - step: + name: regex-test-valid + image: alpine:3.9 + when: + - operator: MATCHES + value: 'freeze,test,strata_1.2' + expressions: 'strata_\d+\.\d+' + delimiter: ',' + commands: + - echo "successful check" +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/regex.yaml + assert_success + assert_output --partial 'unsuccessful' + assert_output --partial 'successful' + rm tmp/regex.yaml + rmdir tmp || true +} +@test "CIX invalid regular expression running step" { + mkdir -p tmp + cat << 'EOF' > tmp/regex_in.yaml +version: 2.1 +pipeline: + - step: + name: regex-test + image: alpine:3.9 + when: + - operator: NOT_MATCHES + value: 'strata1.2' + expressions: 'strata_\d+\.\d+' + commands: + - echo "successful not matches" + - step: + name: regex-test2 + image: alpine:3.9 + commands: + - echo "successful run" +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/regex_in.yaml + assert_success + assert_output --partial 'successful not matches' + assert_output --partial 'successful run' + rm tmp/regex_in.yaml + rmdir tmp || true +} + + +@test "CIX exec will resolve sha256 digest" { + mkdir -p tmp + cat << EOF > tmp/digest.yaml +version: 2.1 +pipeline: + - step: + name: basic + image: alpine@sha256:414e0518bb9228d35e4cd5165567fb91d26c6a214e9c95899e1e056fcd349011 + commands: + - echo this should go +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/digest.yaml + assert_success + rm tmp/digest.yaml + rmdir tmp || true } diff --git a/test/integration/secrets.bats b/test/integration/secrets.bats index e2121cec..adc63ca6 100644 --- a/test/integration/secrets.bats +++ b/test/integration/secrets.bats @@ -9,9 +9,9 @@ function setup() { } @test "CIX able to handle secrets from the command line" { - run $CIX_SCRIPT_WITH_TTY exec -y docs/examples/secrets.yaml -s SECRET_FOO_FROM_COMMAND=secret1 -s SECRET_BAR_FROM_COMMAND=secret2 + run $CIX_SCRIPT_WITH_TTY exec -y docs/examples/secrets.yaml -s SECRET_FOO=secret1 -s SECRET_BAR=secret2 assert_success - assert_output --partial 'Multiple secrets such as ********, ********, and ******** on the same line should be masked' + assert_output --partial 'Multiple secrets such as ********, ********, and ******** on the same line will be masked' refute_output --partial 'secret1' refute_output --partial 'secret2' } diff --git a/test/integration/server.bats b/test/integration/server.bats index 5407a51f..3e9e17f3 100644 --- a/test/integration/server.bats +++ b/test/integration/server.bats @@ -20,6 +20,49 @@ function teardown() { assert_success } +@test "CIX will exit non-zero on failed pipeline" { + mkdir -p tmp + cat << EOF > tmp/failed.yaml +version: 2.1 +pipeline: + - step: + name: fail + image: alpine:3 + commands: + - exit 1 +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/failed.yaml --remote + assert_failure + rm tmp/failed.yaml + rmdir tmp || true +} + +@test "CIX will exit non-zero on chained failed pipeline" { + mkdir -p tmp + cat << EOF > tmp/success.yaml +version: 2.1 +pipeline: + - step: + name: success + image: alpine:3 + commands: + - exit 0 +EOF + cat << EOF > tmp/failed.yaml +version: 2.1 +pipeline: + - step: + name: fail + image: alpine:3 + commands: + - exit 1 +EOF + run $CIX_SCRIPT_WITH_TTY exec -y tmp/success.yaml -y tmp/failed.yaml --remote + assert_failure + rm tmp/failed.yaml tmp/success.yaml + rmdir tmp || true +} + @test "CIX able to load & continue basic.yaml" { run $CIX_SCRIPT_WITH_TTY load -y docs/examples/basic.yaml assert_success @@ -75,19 +118,19 @@ version: 2.1 pipeline: - step: name: first - image: alpine:3.9 + image: alpine:3 commands: - echo "Magic String 1" - sleep 5 - step: name: second - image: alpine:3.9 + image: alpine:3 commands: - echo "Magic String 2" - sleep 5 - step: name: third - image: alpine:3.9 + image: alpine:3 commands: - echo "Magic String 3" - sleep 5 @@ -116,14 +159,14 @@ EOF } -@test "Can disable blocking behaviour" { +@test "CIX can resume without blocking" { mkdir -p tmp cat << EOF > tmp/non-blocking.yaml version: 2.1 pipeline: - step: name: first - image: alpine:3.9 + image: alpine:3 commands: - sleep 2 - echo "Magic String 1" @@ -150,12 +193,12 @@ version: 2.1 pipeline: - step: name: first - image: alpine:3.9 + image: alpine:3 commands: - exit 1 - step: name: second - image: alpine:3.9 + image: alpine:3 commands: - echo "Magic String 2" - sleep 5 @@ -204,3 +247,57 @@ EOF assert_success assert_output --partial 'Pipeline Status: "successful"' } + +@test "CIX will log remotely to files" { + mkdir -p tmp + cat << 'EOF' > tmp/files.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -l files -p tmp -y tmp/files.yaml --remote + assert [ -e 'tmp/cix-execution.log' ] + first=(tmp/*first.log) + second=(tmp/*second.log) + assert [ -e "$first" ] + assert [ -e "$second" ] + rm tmp/*.log + rm tmp/files.yaml + rmdir tmp || true +} + +@test "CIX will log remotely to single file" { + mkdir -p tmp + cat << 'EOF' > tmp/file.yaml +version: 2.1 +pipeline: + - step: + name: first + image: alpine:3.9 + commands: + - echo "first" + - step: + name: second + image: alpine:3.9 + commands: + - echo "second" +EOF + run $CIX_SCRIPT_WITH_TTY exec -l file -p tmp -y tmp/file.yaml --remote + assert [ -e 'tmp/cix-execution.log' ] + first=(tmp/*first.log) + second=(tmp/*second.log) + assert [ ! -e "$first" ] + assert [ ! -e "$second" ] + rm tmp/*.log + rm tmp/file.yaml + rmdir tmp || true +} diff --git a/test/performance/README.md b/test/performance/README.md index 615e846e..1859e7ca 100644 --- a/test/performance/README.md +++ b/test/performance/README.md @@ -1,4 +1,4 @@ -d### Performance tests +### Performance tests Below are some notes on how to performance test CIX. ## Logging @@ -6,20 +6,32 @@ Below are some notes on how to performance test CIX. # Confirming pipeline streams are getting destroyed. We create a log stream for each pipeline. This log stream can buffer up to 18MB and should be destroyed at the end of an pipeline execution. To test this we can output a large amount of logs repeatedly with no remote client draining the stream. -1. Run CIX repeatedly ``./cix.mjs exec -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml -y performance/heavy_logging.yaml > perf.log`` +1. Run CIX repeatedly ```./cix exec -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml -y test/performance/heavy_logging.yaml > perf.log``` 2. Watch memory usage for continual growth (Activity Monitor on MacOS does the trick) # Testing high throughput -1. Start a server, point console logs at a file: ``./cix.mjs server -l console > server.log`` -2. Load the heavy logging yaml: ``./cix.mjs load -y performance/heavy_logging.yaml`` -3. Start a client, point console logs at a file: ``./cix.mjs resume > client.log`` +1. Start a server, point console logs at a file: ``./cix server -l console > server.log`` +2. Load the heavy logging yaml: ``./cix load -y test/performance/heavy_logging.yaml`` +3. Start a client, point console logs at a file: ``./cix resume > client.log`` 4. Diff the files, there should only be some expected application log differences: ``diff client.log server.log`` # Testing back pressure If logs are getting generated faster than the client can read them, shed some of the logs without impacting the pipeline execution. -1. Start a CIX Server ``./cix.mjs server -l console`` -2. Run a pipeline at 64kbps throughput (about 2min to download, pipeline generates 100MB in 30sec) ``./cix.mjs load -y performance/heavy_logging.yaml && curl -s --limit-rate 64k -X GET "http://localhost:10030/api/pipeline/undefined/start" -H "accept: application/json" > client.log`` +1. Start a CIX Server ``./cix server -l console`` +2. Run a pipeline at 64kbps throughput (about 2min to download, pipeline generates 100MB in 30sec) +```sh +./cix load -y test/performance/heavy_logging.yaml +./cix pipelines # get the pipeline id for next line +curl -s --limit-rate 64k -X GET "http://localhost:10030/api/pipeline/1df34465-cb8a-4b6b-9a6c-685473bbc808/start?blocking=true&remoteLogs=true" -H "accept: application/json" > client.log +``` 3. You should see the echo "complete" at the end of the server log. This means the back pressure didn't effect the execution. +```sh +tail server.log +``` 4. You should see the size of the client log is quite a bit smaller, that's because it didn't stream all the logs. +```sh +du -sh server.log +du -sh client.log +``` diff --git a/test/performance/heavy_logging.yaml b/test/performance/heavy_logging.yaml index de7392b8..c859ed5e 100644 --- a/test/performance/heavy_logging.yaml +++ b/test/performance/heavy_logging.yaml @@ -4,6 +4,5 @@ pipeline: name: basic image: alpine:3.9 commands: - # - sleep 5 - dd if=/dev/urandom bs=64 count=1000000 | base64 - echo "complete"