From 9c4040d5db94b0b4772fe0e39a2dcd82687123e9 Mon Sep 17 00:00:00 2001 From: Joshua McKiddy Date: Wed, 21 Jul 2021 13:24:45 -0700 Subject: [PATCH 1/3] Updates for 1.3.1 --- README.md | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 8d7bc919..a2e0b3de 100644 --- a/README.md +++ b/README.md @@ -207,7 +207,7 @@ python3 assisted_log_enabler.py --mode single_account --cloudtrail 2. Within the AWS Console, go to AWS CloudFormation. 3. Within AWS CloudFormation, go to StackSets. 4. Within the StackSets screen, select Create StackSet. -5. In Step 1, under Specify Template, selecte Upload a template file, and use the AWS CloudFormation template provided in the permissions folder. [Link to the file](https://github.com/awslabs/assisted-log-enabler-for-aws/blob/main/permissions/ALE_child_account_role.yaml) +5. In Step 1, under Specify Template, select Upload a template file, and use the AWS CloudFormation template provided in the permissions folder. [Link to the file](https://github.com/awslabs/assisted-log-enabler-for-aws/blob/main/permissions/ALE_child_account_role.yaml) 6. In Step 2, under StackSet Name, add a descriptive name. 7. In Step 2, under Parameters, add the parameters required: * AssistedLogEnablerPolicyName: You can leave this default, but you can also change it if desired. @@ -218,23 +218,35 @@ python3 assisted_log_enabler.py --mode single_account --cloudtrail 9. In Step 4, under Deployment targets, select the option that fits for your AWS Organization. * If you Deploy to Organization, it will deploy to all AWS accounts except the root AWS account. If you want to include that one, you can either deploy the template to the root AWS account directly, or use the other option (details below). * If you Deploy to organizational units (OUs), you can deploy directly to OUs that you define, including the root OU. -10. In Step 4, under Specify Regions, select US East (N.Virginia) +10. In Step 4, under Specify Regions, select US East (N.Virginia). * There's no need to select multiple regions here. This template only deploys AWS IAM resources, which are Global. 11. In Step 4, under Deployment options, leave the default settings. 12. In Step 5, review the settings you've set in the previous steps. If all is correct, check the box that states "I acknowledge that AWS CloudFormation might create IAM resources with custom names." * Once this is submitted, you'll need to wait until the StackSet is fully deployed. If there are errors, please examine the error and ensure that all the information from the above steps are correct. -13. Once the StackSet is successfully deployed, click on the icon for AWS Cloudshell next to the search bar. +13. Within AWS CloudFormation, go to Stacks. +14. Within the Stacks screen, go to the Create Stack dropdown, and select With new resources. +15. In Step 1, select Upload a template file, select Choose File, and use the AWS CloudFormation template provided in the permissions folder. [Link to the file](https://github.com/awslabs/assisted-log-enabler-for-aws/blob/main/permissions/ALE_child_account_role.yaml) +16. In Step 2, under Stack Name, add a descriptive name. +17. In Step 2, under Parameters, add the parameters required: + * AssistedLogEnablerPolicyName: You can leave this default, but you can also change it if desired. + * OrgId: Provide the AWS Organization ID + * SourceAccountNumber: Provide the source AWS account number that the Assisted Log Enabler for AWS will be running. +18. In Step 3, add any tags that you desire, as well as any permissions options that you want to select. + * The service-managed permissions work just fine for Assisted Log Enabler for AWS, but you can use self-service permissions if desired. +19. In Step 5, review the settings you've set in the previous steps. If all is correct, check the box that states "I acknowledge that AWS CloudFormation might create IAM resources with custom names." + * Once this is submitted, you'll need to wait until the StackSet is fully deployed. If there are errors, please examine the error and ensure that all the information from the above steps are correct. +20. Once both the StackSet and Stack are successfully deployed, click on the icon for AWS Cloudshell next to the search bar. * Ensure that you're in a region where AWS CloudShell is currently available. -14. Once the session begins, download the Assisted Log Enabler within the AWS CloudShell session. +21. Once the session begins, download the Assisted Log Enabler within the AWS CloudShell session. ``` git clone https://github.com/awslabs/assisted-log-enabler-for-aws.git ``` -15. Unzip the file, and change the directory to the unzipped folder: +22. Unzip the file, and change the directory to the unzipped folder: ``` unzip assisted-log-enabler-for-aws-main.zip cd assisted-log-enabler-for-aws-main ``` -16. Run the following command to run the Assisted Log Enabler in multi account mode, for the AWS service or services you want to check for: +23. Run the following command to run the Assisted Log Enabler in multi account mode, for the AWS service or services you want to check for: ``` # For all services: python3 assisted_log_enabler.py --mode multi_account --all @@ -292,7 +304,7 @@ For answers to cost-related questions involved with this solution, refer to the ## Feedback -Please use the Issues section to submit any feedback, such as features or recommendations, as well as any bugs that are encountered. +Please use the [Issues](https://github.com/awslabs/assisted-log-enabler-for-aws/issues) section to submit any feedback, such as features or recommendations, as well as any bugs that are encountered. ## Security From 57b132a9da75baee4743d0ce2f87f17ca575e773 Mon Sep 17 00:00:00 2001 From: Joshua McKiddy Date: Wed, 21 Jul 2021 13:35:11 -0700 Subject: [PATCH 2/3] Updates to 1.3.1 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a2e0b3de..3c34252d 100644 --- a/README.md +++ b/README.md @@ -205,7 +205,7 @@ python3 assisted_log_enabler.py --mode single_account --cloudtrail * Ensure that the AWS Account you're in is the account you want to store the logs. Additionally, ensure that the AWS account you're in has access to the AWS Organizations information within your AWS environment. * You may have to register your AWS account as a delegated administrator within AWS CloudFormation, in order to run this code in an AWS account of your choosing. Please see the following link for more details: [Register a delegated administrator](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html) 2. Within the AWS Console, go to AWS CloudFormation. -3. Within AWS CloudFormation, go to StackSets. +3. To deploy the IAM Permissions within all child accounts: Within AWS CloudFormation, go to StackSets. 4. Within the StackSets screen, select Create StackSet. 5. In Step 1, under Specify Template, select Upload a template file, and use the AWS CloudFormation template provided in the permissions folder. [Link to the file](https://github.com/awslabs/assisted-log-enabler-for-aws/blob/main/permissions/ALE_child_account_role.yaml) 6. In Step 2, under StackSet Name, add a descriptive name. @@ -223,7 +223,7 @@ python3 assisted_log_enabler.py --mode single_account --cloudtrail 11. In Step 4, under Deployment options, leave the default settings. 12. In Step 5, review the settings you've set in the previous steps. If all is correct, check the box that states "I acknowledge that AWS CloudFormation might create IAM resources with custom names." * Once this is submitted, you'll need to wait until the StackSet is fully deployed. If there are errors, please examine the error and ensure that all the information from the above steps are correct. -13. Within AWS CloudFormation, go to Stacks. +13. To deploy the IAM Permissions within the AWS Account where Assisted Log Enabler for AWS is being ran: Within AWS CloudFormation, go to Stacks. 14. Within the Stacks screen, go to the Create Stack dropdown, and select With new resources. 15. In Step 1, select Upload a template file, select Choose File, and use the AWS CloudFormation template provided in the permissions folder. [Link to the file](https://github.com/awslabs/assisted-log-enabler-for-aws/blob/main/permissions/ALE_child_account_role.yaml) 16. In Step 2, under Stack Name, add a descriptive name. From 351e1af124e03ee3e5edb0a6831e045a75da99e7 Mon Sep 17 00:00:00 2001 From: Joshua McKiddy Date: Thu, 22 Jul 2021 16:19:50 -0700 Subject: [PATCH 3/3] Updates for 1.3.1 --- CHANGELOG.md | 12 +++++- README.md | 3 ++ subfunctions/ALE_multi_account.py | 55 ++++++++++++++++---------- subfunctions/ALE_single_account.py | 62 ++++++++++++++++++------------ 4 files changed, 86 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f7e62bf8..2a3db1d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -114,4 +114,14 @@ * Updated Cleanup section to reflect new cleanup capabilities. * Updated IAM Permissions examples within the README. * AWS CloudFormation template for deploying IAM Permissions to run cleanup code. -* Header in files to reflect "Assisted Log Enabler for AWS", instead of "Assisted Log Enabler (ALE)". \ No newline at end of file +* Header in files to reflect "Assisted Log Enabler for AWS", instead of "Assisted Log Enabler (ALE)". + +## [1.3.1] - 2021-07-22 + +### Added +* Randomization to the end of the Amazon S3 bucket name in both single and multi account modes. +* Instructions for deploying the AWS CloudFormation Stack individually, within the AWS Organizations root account for multi-account deployment. +* Link for the AWS Security Analytics Bootstrap within the README. + +### Changed +* Feedback section within README to contain link to Issues section. \ No newline at end of file diff --git a/README.md b/README.md index 3c34252d..15cd1c55 100644 --- a/README.md +++ b/README.md @@ -293,6 +293,9 @@ NEW! A cleanup mode is available within the Assisted Log Enabler for AWS (curren python3 assisted_log_enabler.py --mode cleanup --single_r53querylogs ``` +## Additional Tools +For analysing logs created by Assisted Log Enabler for AWS, consider taking a look at the AWS Security Analytics Bootstrap, a tool that provides an Amazon Athena analysis environment that's quick to deploy, ready to use, and easy to maintain. [Link](https://github.com/awslabs/aws-security-analytics-bootstrap) + ## Costs For answers to cost-related questions involved with this solution, refer to the following links: diff --git a/subfunctions/ALE_multi_account.py b/subfunctions/ALE_multi_account.py index bfd74908..e6a96ade 100644 --- a/subfunctions/ALE_multi_account.py +++ b/subfunctions/ALE_multi_account.py @@ -12,6 +12,8 @@ import datetime import argparse import csv +import string +import random from botocore.exceptions import ClientError from datetime import timezone @@ -32,6 +34,14 @@ region_list = ['af-south-1', 'ap-east-1', 'ap-south-1', 'ap-northeast-1', 'ap-northeast-2', 'ap-northeast-3', 'ap-southeast-1', 'ap-southeast-2', 'ca-central-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-west-3', 'eu-north-1', 'eu-south-1', 'me-south-1', 'sa-east-1', 'us-east-1', 'us-east-2', 'us-west-1', 'us-west-2'] +# 0. Define random string for S3 Bucket Name +def random_string_generator(): + lower_letters = string.ascii_lowercase + numbers = string.digits + unique_end = (''.join(random.choice(lower_letters + numbers) for char in range(6))) + return unique_end + + # 1. Obtain the AWS Accounts inside of AWS Organizations def org_account_grab(): """Function to list accounts inside of AWS Organizations""" @@ -59,18 +69,18 @@ def get_account_number(): # 3. Create a Bucket and Lifecycle Policy -def create_bucket(organization_id, account_number): +def create_bucket(organization_id, account_number, unique_end): """Function to create the bucket for storing logs""" try: logging.info("Creating bucket in %s" % account_number) logging.info("CreateBucket API Call") if region == 'us-east-1': logging_bucket_dict = s3.create_bucket( - Bucket="aws-log-collection-" + account_number + "-" + region + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end ) else: logging_bucket_dict = s3.create_bucket( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, CreateBucketConfiguration={ 'LocationConstraint': region } @@ -78,7 +88,7 @@ def create_bucket(organization_id, account_number): logging.info("Bucket Created.") logging.info("Setting lifecycle policy.") lifecycle_policy = s3.put_bucket_lifecycle_configuration( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, LifecycleConfiguration={ 'Rules': [ { @@ -100,22 +110,22 @@ def create_bucket(organization_id, account_number): ) logging.info("Lifecycle Policy successfully set.") create_ct_path = s3.put_object( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, Key='cloudtrail/AWSLogs/' + account_number + '/') create_ct_path_vpc = s3.put_object( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, Key='vpcflowlogs/') create_ct_path_r53 = s3.put_object( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, Key='r53querylogs/') bucket_policy = s3.put_bucket_policy( - Bucket="aws-log-collection-" + account_number + "-" + region, - Policy='{"Version": "2012-10-17", "Statement": [{"Sid": "AWSCloudTrailAclCheck20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '"},{"Sid": "AWSCloudTrailWrite20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/cloudtrail/AWSLogs/' + account_number + '/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}},{"Sid": "AWSLogDeliveryAclCheck","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '"},{"Sid": "AWSLogDeliveryWriteVPC","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/vpcflowlogs/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}},{"Sid": "AWSLogDeliveryWriteR53","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/r53querylogs/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}}]}' + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, + Policy='{"Version": "2012-10-17", "Statement": [{"Sid": "AWSCloudTrailAclCheck20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '"},{"Sid": "AWSCloudTrailWrite20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/cloudtrail/AWSLogs/' + account_number + '/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}},{"Sid": "AWSLogDeliveryAclCheck","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '"},{"Sid": "AWSLogDeliveryWriteVPC","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/vpcflowlogs/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}},{"Sid": "AWSLogDeliveryWriteR53","Effect": "Allow","Principal": {"Service": "delivery.logs.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/r53querylogs/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}}]}' ) logging.info("Setting the S3 bucket Public Access to Blocked") logging.info("PutPublicAccessBlock API Call") bucket_private = s3.put_public_access_block( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, PublicAccessBlockConfiguration={ 'BlockPublicAcls': True, 'IgnorePublicAcls': True, @@ -129,7 +139,7 @@ def create_bucket(organization_id, account_number): # 4. Find VPCs and turn flow logs on if not on already. -def flow_log_activator(account_number, OrgAccountIdList, region_list): +def flow_log_activator(account_number, OrgAccountIdList, region_list, unique_end): """Function to define the list of VPCs without logging turned on""" logging.info("Creating a list of VPCs without Flow Logs on.") for org_account in OrgAccountIdList: @@ -176,7 +186,7 @@ def flow_log_activator(account_number, OrgAccountIdList, region_list): ResourceType='VPC', TrafficType='ALL', LogDestinationType='s3', - LogDestination='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/vpcflowlogs', + LogDestination='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/vpcflowlogs', LogFormat='${version} ${account-id} ${interface-id} ${srcaddr} ${dstaddr} ${srcport} ${dstport} ${protocol} ${packets} ${bytes} ${start} ${end} ${action} ${log-status} ${vpc-id} ${type} ${tcp-flags} ${subnet-id} ${sublocation-type} ${sublocation-id} ${region} ${pkt-srcaddr} ${pkt-dstaddr} ${instance-id} ${az-id} ${pkt-src-aws-service} ${pkt-dst-aws-service} ${flow-direction} ${traffic-path}' ) logging.info("VPC Flow Logs are turned on for account " + org_account + ".") @@ -245,7 +255,7 @@ def eks_logging(region_list, OrgAccountIdList): # 6. Turn on Route 53 Query Logging. -def route_53_query_logs(region_list, account_number, OrgAccountIdList): +def route_53_query_logs(region_list, account_number, OrgAccountIdList, unique_end): """Function to turn on Route 53 Query Logs for VPCs""" for org_account in OrgAccountIdList: for aws_region in region_list: @@ -294,7 +304,7 @@ def route_53_query_logs(region_list, account_number, OrgAccountIdList): logging.info("CreateResolverQueryLogConfig API Call") create_query_log = route53resolver_ma.create_resolver_query_log_config( Name='Assisted_Log_Enabler_Query_Logs_' + aws_region, - DestinationArn='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/r53querylogs', + DestinationArn='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/r53querylogs', CreatorRequestId=timestamp_date_string, Tags=[ { @@ -325,30 +335,33 @@ def run_eks(): def run_vpc_flow_logs(): """Function that runs the defined VPC Flow Log logging code""" + unique_end = random_string_generator() account_number = get_account_number() OrgAccountIdList, organization_id = org_account_grab() - create_bucket(organization_id, account_number) - flow_log_activator(account_number, OrgAccountIdList, region_list) + create_bucket(organization_id, account_number, unique_end) + flow_log_activator(account_number, OrgAccountIdList, region_list, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") def run_r53_query_logs(): """Function that runs the defined R53 Query Logging code""" + unique_end = random_string_generator() account_number = get_account_number() OrgAccountIdList, organization_id = org_account_grab() - create_bucket(organization_id, account_number) - route_53_query_logs(region_list, account_number, OrgAccountIdList) + create_bucket(organization_id, account_number, unique_end) + route_53_query_logs(region_list, account_number, OrgAccountIdList, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") def lambda_handler(event, context): """Function that runs all of the previously defined functions""" + unique_end = random_string_generator() account_number = get_account_number() OrgAccountIdList, organization_id = org_account_grab() - create_bucket(organization_id, account_number) - flow_log_activator(account_number, OrgAccountIdList, region_list) + create_bucket(organization_id, account_number, unique_end) + flow_log_activator(account_number, OrgAccountIdList, region_list, unique_end) eks_logging(region_list, OrgAccountIdList) - route_53_query_logs(region_list, account_number, OrgAccountIdList) + route_53_query_logs(region_list, account_number, OrgAccountIdList, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") diff --git a/subfunctions/ALE_single_account.py b/subfunctions/ALE_single_account.py index bfd5917a..3222715e 100644 --- a/subfunctions/ALE_single_account.py +++ b/subfunctions/ALE_single_account.py @@ -10,6 +10,8 @@ import boto3 import time import datetime +import string +import random from botocore.exceptions import ClientError from datetime import timezone @@ -29,8 +31,16 @@ region_list = ['af-south-1', 'ap-east-1', 'ap-south-1', 'ap-northeast-1', 'ap-northeast-2', 'ap-northeast-3', 'ap-southeast-1', 'ap-southeast-2', 'ca-central-1', 'eu-central-1', 'eu-west-1', 'eu-west-2', 'eu-west-3', 'eu-north-1', 'eu-south-1', 'me-south-1', 'sa-east-1', 'us-east-1', 'us-east-2', 'us-west-1', 'us-west-2'] +# 0. Define random string for S3 Bucket Name +def random_string_generator(): + lower_letters = string.ascii_lowercase + numbers = string.digits + unique_end = (''.join(random.choice(lower_letters + numbers) for char in range(6))) + return unique_end + + # 1. Create a Bucket and Lifecycle Policy -def create_bucket(): +def create_bucket(unique_end): """Function to create the bucket for storing logs""" try: account_number = sts.get_caller_identity()["Account"] @@ -38,11 +48,11 @@ def create_bucket(): logging.info("CreateBucket API Call") if region == 'us-east-1': logging_bucket_dict = s3.create_bucket( - Bucket="aws-log-collection-" + account_number + "-" + region + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end ) else: logging_bucket_dict = s3.create_bucket( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, CreateBucketConfiguration={ 'LocationConstraint': region } @@ -51,7 +61,7 @@ def create_bucket(): logging.info("Setting lifecycle policy.") logging.info("PutBucketLifecycleConfiguration API Call") lifecycle_policy = s3.put_bucket_lifecycle_configuration( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, LifecycleConfiguration={ 'Rules': [ { @@ -74,17 +84,17 @@ def create_bucket(): logging.info("Lifecycle Policy successfully set.") logging.info("PutObject API Call") create_ct_path = s3.put_object( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, Key='cloudtrail/AWSLogs/' + account_number + '/') logging.info("PutBucketPolicy API Call") bucket_policy = s3.put_bucket_policy( - Bucket="aws-log-collection-" + account_number + "-" + region, - Policy='{"Version": "2012-10-17", "Statement": [{"Sid": "AWSCloudTrailAclCheck20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '"},{"Sid": "AWSCloudTrailWrite20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/cloudtrail/AWSLogs/' + account_number + '/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}}]}' + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, + Policy='{"Version": "2012-10-17", "Statement": [{"Sid": "AWSCloudTrailAclCheck20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:GetBucketAcl","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '"},{"Sid": "AWSCloudTrailWrite20150319","Effect": "Allow","Principal": {"Service": "cloudtrail.amazonaws.com"},"Action": "s3:PutObject","Resource": "arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/cloudtrail/AWSLogs/' + account_number + '/*","Condition": {"StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"}}}]}' ) logging.info("Setting the S3 bucket Public Access to Blocked") logging.info("PutPublicAccessBlock API Call") bucket_private = s3.put_public_access_block( - Bucket="aws-log-collection-" + account_number + "-" + region, + Bucket="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, PublicAccessBlockConfiguration={ 'BlockPublicAcls': True, 'IgnorePublicAcls': True, @@ -98,7 +108,7 @@ def create_bucket(): # 2. Find VPCs and turn flow logs on if not on already. -def flow_log_activator(region_list, account_number): +def flow_log_activator(region_list, account_number, unique_end): """Function that turns on the VPC Flow Logs, for VPCs identifed without them""" for aws_region in region_list: ec2 = boto3.client('ec2', region_name=aws_region) @@ -129,7 +139,7 @@ def flow_log_activator(region_list, account_number): ResourceType='VPC', TrafficType='ALL', LogDestinationType='s3', - LogDestination='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/vpcflowlogs', + LogDestination='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/vpcflowlogs', LogFormat='${version} ${account-id} ${interface-id} ${srcaddr} ${dstaddr} ${srcport} ${dstport} ${protocol} ${packets} ${bytes} ${start} ${end} ${action} ${log-status} ${vpc-id} ${type} ${tcp-flags} ${subnet-id} ${sublocation-type} ${sublocation-id} ${region} ${pkt-srcaddr} ${pkt-dstaddr} ${instance-id} ${az-id} ${pkt-src-aws-service} ${pkt-dst-aws-service} ${flow-direction} ${traffic-path}' ) logging.info("VPC Flow Logs are turned on.") @@ -138,7 +148,7 @@ def flow_log_activator(region_list, account_number): # 3. Check to see if a CloudTrail trail is configured, and turn it on if it is not. -def check_cloudtrail(account_number): +def check_cloudtrail(account_number, unique_end): """Function to check if CloudTrail is enabled""" logging.info("Checking to see if CloudTrail is on, and will activate if needed.") try: @@ -150,7 +160,7 @@ def check_cloudtrail(account_number): logging.info("CreateTrail API Call") cloudtrail_activate = cloudtrail.create_trail( Name='aws-cloudtrail-em-' + account_number, - S3BucketName="aws-log-collection-" + account_number + "-" + region, + S3BucketName="aws-log-collection-" + account_number + "-" + region + "-" + unique_end, S3KeyPrefix='cloudtrail', IsMultiRegionTrail=True, EnableLogFileValidation=True @@ -215,7 +225,7 @@ def eks_logging(region_list): # 5. Turn on Route 53 Query Logging. -def route_53_query_logs(region_list, account_number): +def route_53_query_logs(region_list, account_number, unique_end): """Function to turn on Route 53 Query Logs for VPCs""" for aws_region in region_list: logging.info("Turning on Route 53 Query Logging on for VPCs in region " + aws_region + ".") @@ -243,7 +253,7 @@ def route_53_query_logs(region_list, account_number): logging.info("CreateResolverQueryLogConfig API Call") create_query_log = route53resolver.create_resolver_query_log_config( Name='Assisted_Log_Enabler_Query_Logs_' + aws_region, - DestinationArn='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '/r53querylogs', + DestinationArn='arn:aws:s3:::aws-log-collection-' + account_number + '-' + region + '-' + unique_end + '/r53querylogs', CreatorRequestId=timestamp_date_string, Tags=[ { @@ -273,32 +283,36 @@ def run_eks(): def run_cloudtrail(): """Function that runs the defined CloudTrail logging code""" - account_number = create_bucket() - check_cloudtrail(account_number) + unique_end = random_string_generator() + account_number = create_bucket(unique_end) + check_cloudtrail(account_number, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") def run_vpc_flow_logs(): """Function that runs the defined VPC Flow Log logging code""" - account_number = create_bucket() - flow_log_activator(region_list, account_number) + unique_end = random_string_generator() + account_number = create_bucket(unique_end) + flow_log_activator(region_list, account_number, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") def run_r53_query_logs(): """Function that runs the defined R53 Query Logging code""" - account_number = create_bucket() - route_53_query_logs(region_list, account_number) + unique_end = random_string_generator() + account_number = create_bucket(unique_end) + route_53_query_logs(region_list, account_number, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.") def lambda_handler(event, context): """Function that runs all of the previously defined functions""" - account_number = create_bucket() - flow_log_activator(region_list, account_number) - check_cloudtrail(account_number) + unique_end = random_string_generator() + account_number = create_bucket(unique_end) + flow_log_activator(region_list, account_number, unique_end) + check_cloudtrail(account_number, unique_end) eks_logging(region_list) - route_53_query_logs(region_list, account_number) + route_53_query_logs(region_list, account_number, unique_end) logging.info("This is the end of the script. Please feel free to validate that logs have been turned on.")