From 0ac229912f4e2f61bc3e5951bd1b1c90d42b0c4d Mon Sep 17 00:00:00 2001 From: panther-bot <54194790+panther-bot@users.noreply.github.com> Date: Thu, 14 Mar 2024 16:43:27 -0500 Subject: [PATCH] [Sync] 17783 (#112) --- serverless/Makefile | 21 +- serverless/panther-preflight-tools/README.md | 48 ++++ .../src/__init__.py | 6 + .../src/app.py | 185 +++++++++++++ .../src/requirements.txt | 3 + .../test/__init__.py | 6 + .../test/test_app.py | 53 ++++ .../readiness-check/src/__init__.py | 6 + .../readiness-check/src/app.py | 148 +++++++++++ .../readiness-check/src/cfn_expander.py | 98 +++++++ .../readiness-check/src/requirements.txt | 13 + .../readiness-check/test/__init__.py | 6 + .../readiness-check/test/example_policy.json | 245 ++++++++++++++++++ .../readiness-check/test/test_cfn_expander.py | 36 +++ .../panther-preflight-tools/template.yml | 65 +++++ 15 files changed, 926 insertions(+), 13 deletions(-) create mode 100644 serverless/panther-preflight-tools/README.md create mode 100644 serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/__init__.py create mode 100644 serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/app.py create mode 100644 serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/requirements.txt create mode 100644 serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/__init__.py create mode 100644 serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/test_app.py create mode 100644 serverless/panther-preflight-tools/readiness-check/src/__init__.py create mode 100644 serverless/panther-preflight-tools/readiness-check/src/app.py create mode 100644 serverless/panther-preflight-tools/readiness-check/src/cfn_expander.py create mode 100644 serverless/panther-preflight-tools/readiness-check/src/requirements.txt create mode 100644 serverless/panther-preflight-tools/readiness-check/test/__init__.py create mode 100644 serverless/panther-preflight-tools/readiness-check/test/example_policy.json create mode 100644 serverless/panther-preflight-tools/readiness-check/test/test_cfn_expander.py create mode 100644 serverless/panther-preflight-tools/template.yml diff --git a/serverless/Makefile b/serverless/Makefile index b43367f..9ed8d9b 100644 --- a/serverless/Makefile +++ b/serverless/Makefile @@ -3,26 +3,21 @@ bucket = panther-public-sam-artifacts iter: - set -e; \ - for samapp in `ls`; do \ - if [ -d $$samapp ]; then \ - cd $$samapp; \ - make --makefile=../Makefile $(action) samdir=$$samapp; \ - cd ..; \ - fi; \ - done + find . -depth 1 -type d | xargs -I % make -C % --makefile=../Makefile $(action) samdir=% publish: - sam build --use-container --build-image public.ecr.aws/sam/build-python3.11; sam package --s3-bucket $(bucket)-$(region) --output-template-file ../../cloudformation/panther-$(samdir)-$(region).yml + sam build --use-container --build-image public.ecr.aws/sam/build-python3.11; sam package --s3-bucket $(bucket)-$(region) --output-template-file ../../cloudformation/$(samdir)-$(region).yml setup: - python3 -m venv venv; venv/bin/pip install -r src/requirements.txt; venv/bin/pip install pytest pylint + find . -depth 1 -type d | xargs -I % python3 -m venv %/venv + find . -depth 1 -type d | xargs -I % bash -c "%/venv/bin/pip install -r %/src/requirements.txt" + find . -depth 1 -type d | xargs -I % bash -c "%/venv/bin/pip install pytest pylint" clean: - rm -rf venv + rm -rf */venv test: - AWS_DEFAULT_REGION=us-west-2 venv/bin/pytest test/ + find . -depth 1 -type d | xargs -I % bash -c "pushd %; AWS_DEFAULT_REGION=us-west-2 venv/bin/pytest test/; popd" lint: - venv/bin/pylint -j 0 --max-line-length 140 --score no src/ + find . -depth 1 -type d | xargs -I % bash -c "pushd %; venv/bin/pylint -j 0 --max-line-length 140 --score no src/; popd" diff --git a/serverless/panther-preflight-tools/README.md b/serverless/panther-preflight-tools/README.md new file mode 100644 index 0000000..2ac1856 --- /dev/null +++ b/serverless/panther-preflight-tools/README.md @@ -0,0 +1,48 @@ +# Preflight Tools User Guide + +## Readiness Check + +**Prerequisite**: A deployed "PantherDeploymentRole" in the aws account + +Invoking the readiness check is simple. It does not require a payload and can either be invoked on the command line with something like this + +``` +aws lambda invoke --function-name "PantherReadinessCheck" --cli-binary-format raw-in-base64-out output.json +``` + +The result will end up in output.json in this example + +``` +[12:18] user@host $> aws lambda invoke --function-name "PantherReadinessCheck" --cli-binary-format raw-in-base64-out output.json +[12:18] user@host $> cat output.json +{"Message": "All evaluations were successful against the Deployment Role"} +``` + +Or in the console on the test page of the lambda utility: +https://console.aws.amazon.com/lambda/home#/functions/PantherReadinessCheck?tab=testing where the result will show up in the Details dropdown or in cloudwatch. + +The return value of the lambda will be a json object either with a success message, or a failure message and a series of failures that were detected. Please return this result to your panther representative. + +## Snowflake Credential Bootstrap + +After creating the PANTHERACCOUNTADMIN user and ensuring it has ACCOUNTADMIN privs, you may invoke the lambda to populate the initial credential secrets. +Authenticate to your aws environment and region where the template was stood up, then run this, filling out the host parameter with your login url + +``` +aws lambda invoke\ + --function-name "PantherSnowflakeCredentialBootstrap"\ + --log-type Tail\ + --payload '{"host": "https://myaccountid.snowflakecomputing.com"}'\ + --cli-binary-format raw-in-base64-out /dev/stderr > /dev/null +``` + +This invocation should yeild a link and instructions to update the newly minted secret directly with your credentials. +After that is done, please run the validation step below as-is and return the result to your panther representative. + +``` +aws lambda invoke\ + --function-name "PantherSnowflakeCredentialBootstrap"\ + --log-type Tail\ + --payload '{"validate": true}'\ + --cli-binary-format raw-in-base64-out /dev/stderr > /dev/null +``` diff --git a/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/__init__.py b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/__init__.py new file mode 100644 index 0000000..36447f8 --- /dev/null +++ b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. \ No newline at end of file diff --git a/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/app.py b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/app.py new file mode 100644 index 0000000..9506efa --- /dev/null +++ b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/app.py @@ -0,0 +1,185 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. +""" +Lambda function to assist the user in setting up their AWS account +to accept a Panther deployment configured to use a connected, pre-existing +snowflake account +""" + +from dataclasses import dataclass +import json +import os +from typing import Mapping +from urllib.parse import urlparse, ParseResult + +import boto3 +from botocore.exceptions import ClientError +import snowflake.connector + +SECRETNAME = "panther-managed-accountadmin-secret" +SF_DOMAIN = ".snowflakecomputing.com" +USERNAME = "PANTHERACCOUNTADMIN" +PASSWORD_PLACEHOLDER = "PleaseReplaceMe" + +# What region are we in +AWS_REGION = os.environ.get("AWS_REGION", "") +AWS_DEFAULT_REGION = os.environ.get("AWS_DEFAULT_REGION", "") +if not AWS_REGION and not AWS_DEFAULT_REGION: + raise EnvironmentError("Could not detect region") +REGION = AWS_REGION if AWS_REGION else AWS_DEFAULT_REGION + +SECRET_URL = f"https://{REGION}.console.aws.amazon.com/secretsmanager/secret?name={SECRETNAME}®ion={REGION}" + +EDIT_SECRET_PROMPT = f"""Please navigate to {SECRET_URL} in your authenticated browser and click \ +"Retrieve secret value" then "Edit". Add your password in place of the placeholder and save \ +the secret. Then return to your terminal and execute the lambda again with "validate":true""" + + +@dataclass +class PantherSnowflakeCredential(): + """ + Represent the credentials used by panther to authenticate to snowflake + """ + arn: str = "" + host: str = "" + account: str = "" + user: str = "" + password: str = PASSWORD_PLACEHOLDER + port: str = "443" + + @staticmethod + def secret_exists(client: boto3.Session) -> bool: + """ + Checks for the existence of the managed accountadmin secret + return: true if exists, false if not + """ + try: + client.describe_secret(SecretId=SECRETNAME) + return True + except ClientError as error: + if error.response["Error"]["Code"] == "ResourceNotFoundException": + return False + raise + + def create_secret(self, client: boto3.Session) -> None: + """ + Json-ifies the class and writes to the secret + return: ARN of newly created secret + """ + secret_string = json.dumps({ + "account": self.account, + "host": self.host, + "port": self.port, + "user": self.user, + "password": self.password + }) + resp = client.create_secret( + Name=SECRETNAME, + Description="Panther Labs, accountadmin snowflake credentials", + SecretString=secret_string, + ) + self.arn = resp['ARN'] + + def test(self) -> None: + """ + Connects to snowflake to validate credentials + """ + snowflake.connector.connect( + user=self.user, password=self.password, account=self.account) + + +def credentials_from_secret(client: boto3.Session) -> PantherSnowflakeCredential: + """ + Populates a credential object from a known-existing secret + """ + if not PantherSnowflakeCredential.secret_exists(client): + raise ValueError( + "The snowflake credential secret was expected to exist, but does not.") + + resp = client.get_secret_value( + SecretId=SECRETNAME + ) + secret = json.loads(resp["SecretString"]) + return PantherSnowflakeCredential( + arn=resp["ARN"], + account=secret["account"], + host=secret["host"], + port=secret["port"], + user=secret["user"], + password=secret["password"] + ) + + +def parse_event_into_creds(event: Mapping[str, str]) -> PantherSnowflakeCredential: + """ + Validate, massage the input event and store it as a credential object + return: Instance of PantherSnowflakeCredentials representing the given input, save password + """ + for field in ["host"]: + if field not in event.keys(): + raise ValueError( + f"Failed validating input, missing field '{field}' in payload") + + user = event.get("user", USERNAME) + host = event["host"] + + if user != USERNAME: + raise ValueError(f"User did not match required string {USERNAME}") + + parsed: ParseResult = urlparse(host) + host = parsed.netloc + if not host: + host = parsed.path + if not host: + raise ValueError( + "Failed validating input for 'host' field: should be a hostname or uri with protocol") + if not host.endswith(SF_DOMAIN): + raise ValueError( + f"Failed validating input for 'host' field: host must end with {SF_DOMAIN}") + + return PantherSnowflakeCredential( + host=host, + account=host.split(SF_DOMAIN)[0], + user=user + # Password is later populated by the user manually in the UI + # Port always defaults to 443 + ) + + +def lambda_handler(event: Mapping[str, str], _) -> str: + """ + Lambda entrypoint + """ + client = boto3.client("secretsmanager", region_name=REGION) + # Two execution modes for the lambda. Seed and validate the secret + if event.get("validate", False): + print("======VALIDATION MODE======") + # Check creds are changed + creds = credentials_from_secret(client) + if creds.password == PASSWORD_PLACEHOLDER: + raise ValueError( + f"It appears the secret was not modified from its placeholder value. {EDIT_SECRET_PROMPT}") + + # Run cred test + try: + creds.test() + except: + print( + "Failed testing the snowflake credentials! Please check for correctness of host,user,password in the secret") + raise + return f"Validation succeeded for the secret. Please report back to your panther rep with this value: '{creds.arn}'" + + print("======SEED CREDS======") + # Check that secret doesn't already exist + if PantherSnowflakeCredential.secret_exists(client): + raise FileExistsError( + f"The proposed secret '{SECRETNAME}' already exists in this account/region! Refusing to overwrite it.") + # Parse the event input + creds = parse_event_into_creds(event) + # Create secret + creds.create_secret(client) + return f"Creating the initial secret was successful. {EDIT_SECRET_PROMPT}" diff --git a/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/requirements.txt b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/requirements.txt new file mode 100644 index 0000000..db81d60 --- /dev/null +++ b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/src/requirements.txt @@ -0,0 +1,3 @@ +snowflake-connector-python==3.3.1 +boto3==1.29.2 +botocore==1.32.2 diff --git a/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/__init__.py b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/__init__.py new file mode 100644 index 0000000..36447f8 --- /dev/null +++ b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. \ No newline at end of file diff --git a/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/test_app.py b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/test_app.py new file mode 100644 index 0000000..7d7b31f --- /dev/null +++ b/serverless/panther-preflight-tools/connected-snowflake-credential-bootstrap/test/test_app.py @@ -0,0 +1,53 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. + +from pytest import mark +from src import app + +BASE_EVENT = { + "user": "PANTHERACCOUNTADMIN" +} + +@mark.parametrize("event,valid", [ + [{"asdfasdf": "asdfasdf"}, False], + [{"user":"test"}, False], + [{"host":"test"}, False], + [{"user":"PANTHERACCOUNTADMIN","host":"ryan.snowflakecomputing.com"}, True], +]) +def test_parse_event_into_creds_validation_exception(event, valid): + try: + app.parse_event_into_creds(event) + except: + assert not valid + return + + assert valid + return + +@mark.parametrize("event,expected_host", [ + [BASE_EVENT|{"host": "ryan.snowflakecomputing.com"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "//ryan.snowflakecomputing.com"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "http://ryan.snowflakecomputing.com"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "https://ryan.snowflakecomputing.com"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "snowflake://ryan.snowflakecomputing.com"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "https://ryan.snowflakecomputing.com/"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "https://ryan.snowflakecomputing.com/login"}, "ryan.snowflakecomputing.com"], + [BASE_EVENT|{"host": "pantherlabs-ryan.snowflakecomputing.com"}, "pantherlabs-ryan.snowflakecomputing.com"], +]) +def test_parse_event_into_creds_host(event, expected_host): + creds = app.parse_event_into_creds(event) + assert creds.host == expected_host + +@mark.parametrize("event,expected_account", [ + [BASE_EVENT|{"host": "ryan.snowflakecomputing.com"}, "ryan"], + [BASE_EVENT|{"host": "https://ryan.snowflakecomputing.com/login"}, "ryan"], + [BASE_EVENT|{"host": "pantherlabs-ryan.snowflakecomputing.com"}, "pantherlabs-ryan"], + [BASE_EVENT|{"host": "pantherlabs-ryan_clone.snowflakecomputing.com"}, "pantherlabs-ryan_clone"], +]) +def test_parse_event_into_creds_account(event, expected_account): + creds = app.parse_event_into_creds(event) + assert creds.account == expected_account diff --git a/serverless/panther-preflight-tools/readiness-check/src/__init__.py b/serverless/panther-preflight-tools/readiness-check/src/__init__.py new file mode 100644 index 0000000..36447f8 --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/src/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. \ No newline at end of file diff --git a/serverless/panther-preflight-tools/readiness-check/src/app.py b/serverless/panther-preflight-tools/readiness-check/src/app.py new file mode 100644 index 0000000..c5ee24b --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/src/app.py @@ -0,0 +1,148 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. +""" +Lambda function to check readiness of the current aws account to receive a Panther deployment +""" + +from collections import defaultdict +from itertools import product +from functools import reduce +import logging +from math import ceil +from typing import Dict, List + +import boto3 + +from cfn_expander import get_deployment_role_policies_with_expanded_actions # pylint: disable=import-error + + +log = logging.getLogger("readiness-checker") +log.setLevel(logging.INFO) +h = logging.StreamHandler() +h.setLevel(logging.INFO) +log.addHandler(h) + + +def simulate(client: boto3.client, policy_source_arn: str, actions: List[str], resources: List[str]): + """ + Simulate a set of actions against a set of resources given a policy using policysim + """ + return client.simulate_principal_policy( + PolicySourceArn=policy_source_arn, + ActionNames=actions, + ResourceArns=resources + ) + + +def get_aws_account() -> str: + """ + Discover the aws account ID based on sts.get_caller_identity + """ + sts_client = boto3.client("sts") + return sts_client.get_caller_identity().get("Account") + + +def lambda_handler(*_) -> str: + """ + Lambda entrypoint. Accepts no input values. The "where" of it's running is + the most important aspect. + + AWS Account ID is detected via an sts reflection call + AWS Region is detected via AWS_* environment variables + + Uses the deployed copy of the DeploymentRole + """ + # pylint: disable=too-many-locals + + # What account are we in + aws_account = get_aws_account() + + # Pull down the template-filled deployment role policy, and get an action-expanded and template-filled version of the policy + deployment_role_expanded_policies = get_deployment_role_policies_with_expanded_actions(role_name="PantherDeploymentRole") + # Set up a few views on the expanded policy statements + expanded_denies = filter(lambda p: p.get("Effect", "") == "Deny", reduce( + list.__add__, map(lambda p: p.get('Statement'), deployment_role_expanded_policies))) + expanded_allows = filter(lambda p: p.get("Effect", "") == "Allow", reduce( + list.__add__, map(lambda p: p.get('Statement'), deployment_role_expanded_policies))) + + # Denial management. Actively exclude simulations we detect will rightfully end in denial + denies_by_resource = defaultdict(set) # map of denies with "resource" inclusive items -> set + deny_not_resources_by_action = defaultdict(set) # map of denies with "notResource" exclusive items -> set + for d in expanded_denies: + if "Resource" in d.keys(): + for p in product(d.get("Resource"), d.get("Action")): + denies_by_resource[p[0]].add(p[1]) + if "NotResource" in d.keys(): + for p in product(d.get("NotResource"), d.get("Action")): + deny_not_resources_by_action[p[1]].add(p[0]) + + global_denies = denies_by_resource.get("*", set()) + + def _get_denies(resource: str) -> set: + """ + Returns denies that were specified by the policy for a specific resource + Union of: + - Denies that match the resource into an inclusive resource set + - Global "*" denials + - Denies defined in NotResource lists that don't include the resource + """ + return denies_by_resource.get(resource, set()) |\ + global_denies |\ + set(map(lambda u: u[0], filter(lambda t: resource not in t[1], deny_not_resources_by_action.items()))) + + def _denied_evaluation(evaluation: Dict) -> bool: + # Helper to determine if an evaluation was a success + return evaluation.get("EvalDecision", "not_found") != "allowed" or \ + not evaluation.get('OrganizationsDecisionDetail', {}).get("AllowedByOrganizations", True) + + # Evaluate all the allows, diffing out the matched denies + iam_client = boto3.client("iam") + failed_evaluations = [] + for expanded_policy in expanded_allows: + p_actions = expanded_policy.get("Action", []) + # Product of resources and actions in a request can't exceed 1000 + # Break down actions into chunks of 1k or less + # Break multiple resources out individually + # Run all those combos of resources and associated action blocks + for i in range(ceil(len(p_actions)/1000)): + p_resources = expanded_policy.get("Resource", []) + for resource in p_resources: + actions = set(p_actions[i*1000:(i+1)*1000]) + denials = _get_denies(resource) + log.debug("Denies in place for <%s>: %s", resource, ", ".join(denials)) + # Doing this if any expanded actions match an explicit deny. No sense in simulating an expected denial. + denyless_actions = list(actions - denials) + if denyless_actions: + log.debug("simulating actions %s against %s", ", ".join(denyless_actions), resource) + result = simulate(iam_client, f"arn:aws:iam::{aws_account}:role/PantherDeploymentRole", denyless_actions, [resource]) + log.debug(result.get("EvaluationResults", [])) + failed_evaluations.extend( + list(filter(_denied_evaluation, result.get("EvaluationResults", []))) + ) + + # Output + if failed_evaluations: + output = {"Message": "Some evaluations were not allowed!", "Failures": []} + for evaluation in failed_evaluations: + action = evaluation['EvalActionName'] + resource = evaluation['EvalResourceName'] + decision = evaluation['EvalDecision'] + organization = evaluation.get('OrganizationsDecisionDetail', {}).get("AllowedByOrganizations", True) + msg = f"Failure: Action: {action}, Resource: {resource}, Result: {decision}, AllowedByOrganization: {organization}" + log.warning(msg) + output["Failures"].append({ + "Action": action, + "Resource": resource, + "Result": decision, + "AllowedByOrganizationPolicy": organization + }) + return output + return {"Message": "All evaluations were successful against the Deployment Role"} + + +if __name__ == "__main__": + print(lambda_handler()) diff --git a/serverless/panther-preflight-tools/readiness-check/src/cfn_expander.py b/serverless/panther-preflight-tools/readiness-check/src/cfn_expander.py new file mode 100644 index 0000000..e177816 --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/src/cfn_expander.py @@ -0,0 +1,98 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. +""" +Utilities for fetching, formatting, and expanding policies residing in a role +""" + +from typing import Dict, List +import boto3 + +from policyuniverse.expander_minimizer import _expand_wildcard_action + + +def resolve_policy_statement_resources(policy: Dict) -> None: + """ + Mutates a given policy; + Unifies Resource and NotResource fields to lists + """ + def _normalize_resource_value(resources) -> List[str]: + # Helper to normalize a str or list into a list + return resources if isinstance(resources, list) else [resources] if isinstance(resources, str) else [] + + for statement in policy['Statement']: + resource_key = 'Resource' if statement.get('Resource') else 'NotResource' if statement.get('NotResource') else None + + if resource_key: + statement[resource_key] = _normalize_resource_value(statement.get(resource_key)) + + +def expand_policy_statement_actions(policy: Dict) -> None: + """ + Mutates a given policy; + Expanding all wildcard actions defined in statements to a list of discrete actions + """ + for statement in policy['Statement']: + actions_list = [] + if isinstance(statement.get('Action'), list): + for action in statement.get('Action'): + expanded_actions = _expand_wildcard_action(action) + actions_list.extend(expanded_actions) + else: + actions_list.extend(_expand_wildcard_action(statement.get('Action'))) + statement['Action'] = actions_list + + +def get_deployment_role_policies(role_name: str = "PantherDeploymentRole") -> List[Dict]: + """ + Fetches the policies from the deployment role existing in the account + """ + client = boto3.client("iam") + # Collect the policy docs for inline policies + inline_policy_names = client.list_role_policies(RoleName=role_name).get("PolicyNames", []) + inline_policydocs = list( + map( + lambda p: client.get_role_policy(RoleName=role_name, PolicyName=p).get("PolicyDocument", "{}"), + inline_policy_names + ) + ) + + # Collect the policy docs for attached managed policies + attached_policy_arns = list( + map( + lambda p: p.get("PolicyArn", ""), + client.list_attached_role_policies(RoleName=role_name).get("AttachedPolicies", []) + ) + ) + attached_policydocs = list( + map( + lambda q: client.get_policy_version( + VersionId=q.get("DefaultVersionId"), + PolicyArn=q.get("Arn") + ).get("PolicyVersion", {}).get("Document"), + map( + lambda p: client.get_policy(PolicyArn=p).get("Policy", {}), + attached_policy_arns + ) + ) + ) + + # Assemble them together and unify the resource fields. + policydocs = inline_policydocs + attached_policydocs + for policydoc in policydocs: + resolve_policy_statement_resources(policydoc) + return policydocs + + +def get_deployment_role_policies_with_expanded_actions(role_name: str = "PantherDeploymentRole") -> Dict: + """ + Fetches the policies from the deployment role existing in the account + Expands actions from wildcards into discrete actions + """ + policydocs = get_deployment_role_policies(role_name=role_name) + for policydoc in policydocs: + expand_policy_statement_actions(policydoc) + return policydocs diff --git a/serverless/panther-preflight-tools/readiness-check/src/requirements.txt b/serverless/panther-preflight-tools/readiness-check/src/requirements.txt new file mode 100644 index 0000000..c3a5864 --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/src/requirements.txt @@ -0,0 +1,13 @@ +boto3==1.34.59 +botocore==1.34.59 +certifi==2024.2.2 +charset-normalizer==3.3.2 +idna==3.6 +jmespath==1.0.1 +policyuniverse==1.5.1.20231109 +python-dateutil==2.9.0.post0 +PyYAML==6.0.1 +requests==2.31.0 +s3transfer==0.10.0 +six==1.16.0 +urllib3==2.0.7 diff --git a/serverless/panther-preflight-tools/readiness-check/test/__init__.py b/serverless/panther-preflight-tools/readiness-check/test/__init__.py new file mode 100644 index 0000000..36447f8 --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/test/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. \ No newline at end of file diff --git a/serverless/panther-preflight-tools/readiness-check/test/example_policy.json b/serverless/panther-preflight-tools/readiness-check/test/example_policy.json new file mode 100644 index 0000000..33c58ba --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/test/example_policy.json @@ -0,0 +1,245 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "acm:*", + "apigateway:*", + "application-autoscaling:*ScalableTarget*", + "application-autoscaling:*ScalingPolicies", + "application-autoscaling:*ScalingPolicy", + "backup-storage:*", + "backup:*", + "batch:*", + "cloudformation:*", + "cloudfront:UpdateDistribution", + "cloudtrail:*", + "cloudwatch:*Alarm*", + "cloudwatch:*Dashboard*", + "cloudwatch:*Metric*", + "cloudwatch:*Tag*", + "cloudwatch:List*", + "codebuild:*", + "cognito-idp:*", + "dynamodb:*Backup*", + "dynamodb:*Stream*", + "dynamodb:*Table*", + "dynamodb:*Tag*", + "dynamodb:*TimeToLive*", + "ec2:*", + "ecr:GetAuthorizationToken", + "ecs:*Cluster*", + "ecs:*Service*", + "ecs:*Tag*", + "ecs:*Task*", + "elasticfilesystem:*", + "elasticloadbalancing:*", + "es:*", + "events:*", + "iam:*ServerCertificate", + "iam:Get*", + "iam:List*", + "kinesis:AddTagsToStream", + "kinesis:CreateStream", + "kinesis:DescribeStreamSummary", + "kinesis:EnableEnhancedMonitoring", + "kinesis:IncreaseStreamRetentionPeriod", + "kinesis:ListTagsForStream", + "kms:*", + "lambda:*EventSourceMapping", + "lambda:*LayerVersion*", + "lambda:List*", + "logs:*", + "organizations:DescribeOrganization", + "s3:*AccelerateConfiguration", + "s3:*AccountPublicAccessBlock", + "s3:*Bucket*", + "s3:*EncryptionConfiguration", + "s3:*InventoryConfiguration", + "s3:*LifecycleConfiguration", + "s3:*MetricsConfiguration", + "s3:*ReplicationConfiguration", + "s3:CreateAccessPoint", + "s3:PutObject*", + "secretsmanager:Describe*", + "secretsmanager:List*", + "servicequotas:*", + "sns:*", + "sqs:*Permission*", + "sqs:*Queue*", + "sqs:SendMessage", + "states:*", + "wafv2:*", + "wafv2:CreateRuleGroup", + "wafv2:CreateWebACL", + "wafv2:GetRuleGroup", + "wafv2:ListTagsForResource", + "wafv2:TagResource", + "wafv2:UpdateRuleGroup" + ], + "Resource": "*" + }, + { + "Effect": "Allow", + "Action": "secretsmanager:*", + "Resource": "arn:aws:secretsmanager:*:123412341234:secret:panther*" + }, + { + "Effect": "Allow", + "Action": "firehose:*", + "Resource": "arn:aws:firehose:*:123412341234:deliverystream/*" + }, + { + "Effect": "Allow", + "Action": ["dynamodb:Scan", "dynamodb:Get*"], + "Resource": [ + "arn:aws:dynamodb:*:123412341234:table/panther-analysis", + "arn:aws:dynamodb:*:123412341234:table/panther-organization" + ] + }, + { + "Effect": "Allow", + "Action": [ + "iam:*InstanceProfile*", + "iam:AttachRolePolicy", + "iam:CreateRole", + "iam:DeleteRole", + "iam:DeleteRolePolicy", + "iam:DetachRolePolicy", + "iam:Get*", + "iam:List*", + "iam:PassRole", + "iam:PutRolePolicy", + "iam:TagRole", + "iam:UpdateAssumeRolePolicy", + "iam:UpdateRole", + "iam:UpdateRoleDescription", + "iam:UntagRole" + ], + "Resource": [ + "arn:aws:iam::123412341234:role/dynamo-scaling-*", + "arn:aws:iam::123412341234:role/firehose-http-input-data-bucket-*", + "arn:aws:iam::123412341234:role/panther-*", + "arn:aws:iam::123412341234:role/Panther*", + "arn:aws:iam::123412341234:role/pip-layer-builder-codebuild-*", + "arn:aws:iam::123412341234:instance-profile/Panther*", + "arn:aws:iam::123412341234:role/datadog*", + "arn:aws:iam::123412341234:role/Datadog*" + ] + }, + { + "Effect": "Allow", + "Action": ["iam:*"], + "Resource": [ + "arn:aws:iam::123412341234:policy/Panther*", + "arn:aws:iam::123412341234:policy/analytics-*", + "arn:aws:iam::123412341234:policy/data-*", + "arn:aws:iam::123412341234:policy/datadog-*", + "arn:aws:iam::123412341234:policy/deny-data-access-*", + "arn:aws:iam::123412341234:policy/dynamo-scaling-*", + "arn:aws:iam::123412341234:policy/firehose-*", + "arn:aws:iam::123412341234:policy/panther-*", + "arn:aws:iam::123412341234:policy/read-*", + "arn:aws:iam::123412341234:policy/support-*", + "arn:aws:iam::123412341234:policy/write-to-firehose-*", + "arn:aws:iam::123412341234:role/aws-service-role/batch.amazonaws.com/AWSServiceRoleForBatch", + "arn:aws:iam::123412341234:role/aws-service-role/cloudtrail.amazonaws.com/AWSServiceRoleForCloudTrail", + "arn:aws:iam::123412341234:role/aws-service-role/dynamodb.application-autoscaling.amazonaws.com/AWSServiceRoleForApplicationAutoScaling_DynamoDBTable", + "arn:aws:iam::123412341234:role/aws-service-role/ecs.amazonaws.com/AWSServiceRoleForECS", + "arn:aws:iam::123412341234:role/aws-service-role/elasticloadbalancing.amazonaws.com/AWSServiceRoleForElasticLoadBalancing", + "arn:aws:iam::123412341234:role/aws-service-role/elasticfilesystem.amazonaws.com/AWSServiceRoleForAmazonElasticFileSystem", + "arn:aws:iam::123412341234:role/aws-service-role/guardduty.amazonaws.com/AWSServiceRoleForAmazonGuardDuty", + "arn:aws:iam::123412341234:role/aws-service-role/opensearchservice.amazonaws.com/AWSServiceRoleForAmazonOpenSearchService", + "arn:aws:iam::123412341234:role/aws-service-role/ops.apigateway.amazonaws.com/AWSServiceRoleForAPIGateway", + "arn:aws:iam::123412341234:role/aws-service-role/servicequotas.amazonaws.com/AWSServiceRoleForServiceQuotas" + ] + }, + { + "Effect": "Allow", + "Action": ["sts:AssumeRole"], + "Resource": "arn:aws:iam::*:role/PulumiRoute53" + }, + { + "Effect": "Allow", + "Action": "lambda:*", + "Resource": [ + "arn:aws:lambda:us-west-2:123412341234:event-source-mapping:*", + "arn:aws:lambda:us-west-2:123412341234:function:panther-*", + "arn:aws:lambda:us-west-2:123412341234:layer:panther-*", + "arn:aws:lambda:us-west-2:123412341234:function:datadog-*" + ] + }, + { + "Effect": "Allow", + "Action": "lambda:invokeFunction", + "Resource": "arn:aws:lambda:us-west-2:123412341234:function:panther-source-api" + }, + { + "Effect": "Allow", + "Action": "lambda:GetLayerVersion", + "Resource": [ + "arn:aws:lambda:us-west-2:464622532012:layer:Datadog-Extension*", + "arn:aws:lambda:us-west-2:464622532012:layer:Datadog-Python*", + "arn:aws:lambda:us-west-2:580247275435:layer:LambdaInsightsExtension*" + ] + }, + { + "Effect": "Allow", + "Action": [ + "ecr:BatchCheckLayerAvailability", + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage" + ], + "Resource": [ + "arn:aws:ecr:us-west-2:*:repository/panther-enterprise", + "arn:aws:ecr:us-west-2:*:repository/panther-internal-rc" + ] + }, + { + "Effect": "Allow", + "Action": ["s3:Get*", "s3:ListBucket"], + "Resource": [ + "arn:aws:s3:::panther-enterprise-us-west-2*", + "arn:aws:s3:::panther-internal-rc-us-west-2*" + ] + }, + { + "Effect": "Allow", + "Action": "s3:*", + "Resource": ["arn:aws:s3:::panther*-analysisversions-*", "arn:aws:s3:::analysis-versions-*"] + }, + { + "Effect": "Deny", + "Action": "elasticloadbalancing:DeleteLoadBalancer", + "NotResource": [ + "arn:aws:elasticloadbalancing:us-west-2:123412341234:loadbalancer/app/http-ingest-alb*" + ] + }, + { + "Effect": "Deny", + "Action": "dynamodb:DeleteTable", + "NotResource": [ + "arn:aws:dynamodb:*:123412341234:table/*alerts-risk-factors", + "arn:aws:dynamodb:*:123412341234:table/*alerts-indicators", + "arn:aws:dynamodb:*:123412341234:table/*alert-search-rehydrate-jobs" + ] + }, + { + "Effect": "Deny", + "Action": [ + "cognito-idp:DeleteUserPool*", + "dynamodb:DeleteBackup", + "dynamodb:DeleteItem", + "dynamodb:DeleteTableReplica", + "kms:DeleteAlias", + "kms:DeleteCustomKeyStore", + "kms:DeleteImportedKeyMaterial", + "kms:ScheduleKeyDeletion", + "s3:DeleteBucket", + "sns:DeleteTopic" + ], + "Resource": "*" + } + ] +} diff --git a/serverless/panther-preflight-tools/readiness-check/test/test_cfn_expander.py b/serverless/panther-preflight-tools/readiness-check/test/test_cfn_expander.py new file mode 100644 index 0000000..b1ae996 --- /dev/null +++ b/serverless/panther-preflight-tools/readiness-check/test/test_cfn_expander.py @@ -0,0 +1,36 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. + +from src import cfn_expander +import json +from copy import deepcopy + +with open("test/example_policy.json", "r") as file: + test_policy = json.loads(file.read()) + + +def test_resolve_policy_statement_resources(): + local_policy = deepcopy(test_policy) + cfn_expander.resolve_policy_statement_resources(local_policy) + + for statement in local_policy.get("Statement"): + resources = statement.get("Resource", None) + if not resources: + resources = statement.get("NotResource") + assert isinstance(resources, list) + return + + +def test_expand_policy_statement_actions(): + local_policy = deepcopy(test_policy) + cfn_expander.expand_policy_statement_actions(local_policy) + + for statement in local_policy.get("Statement"): + actions = statement.get("Action") + for action in actions: + assert "*" not in action + return diff --git a/serverless/panther-preflight-tools/template.yml b/serverless/panther-preflight-tools/template.yml new file mode 100644 index 0000000..622c107 --- /dev/null +++ b/serverless/panther-preflight-tools/template.yml @@ -0,0 +1,65 @@ +# Copyright (C) 2022 Panther Labs, Inc. +# +# The Panther SaaS is licensed under the terms of the Panther Enterprise Subscription +# Agreement available at https://panther.com/enterprise-subscription-agreement/. +# All intellectual property rights in and to the Panther SaaS, including any and all +# rights to access the Panther SaaS, are governed by the Panther Enterprise Subscription Agreement. + +AWSTemplateFormatVersion: 2010-09-09 +Transform: AWS::Serverless-2016-10-31 +Description: Various tools for performing actions and checks in prospective panther AWS accounts + +Resources: + SnowflakeCredentialBootstrap: + Type: AWS::Serverless::Function + Properties: + FunctionName: PantherSnowflakeCredentialBootstrap + Runtime: python3.11 + CodeUri: connected-snowflake-credential-bootstrap/src/ + Handler: app.lambda_handler + Policies: + - Statement: + - Effect: Allow + Action: + - secretsmanager:DescribeSecret + - secretsmanager:CreateSecret + Resource: + - '*' + - Statement: + - Effect: Allow + Action: + - secretsmanager:GetSecretValue + Resource: + - !Sub arn:${AWS::Partition}:secretsmanager:*:${AWS::AccountId}:secret:panther-managed-accountadmin-secret* + MemorySize: 512 + Timeout: 120 + Description: Validate and store accountadmin snowflake credentials for use by panther + ReadinessCheck: + Type: AWS::Serverless::Function + Properties: + FunctionName: PantherReadinessCheck + Runtime: python3.11 + CodeUri: readiness-check/src/ + Handler: app.lambda_handler + Policies: + - Statement: + - Effect: Allow + Action: + - sts:GetCallerIdentity + - sts:GetSessionToken + - iam:SimulatePrincipalPolicy + - iam:GetPolicy + - iam:GetPolicyVersion + Resource: + - '*' + - Effect: Allow + Action: + - iam:ListRolePolicies + - iam:ListAttachedRolePolicies + - iam:GetRolePolicy + Resource: + - !Sub 'arn:${AWS::Partition}:iam::${AWS::AccountId}:role/PantherDeploymentRole' + + MemorySize: 512 + Timeout: 120 + Description: Simulate the panther DeploymentRole policy against the current aws environment