generated from terraform-modules/template_aws_module
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
25 changed files
with
1,231 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,2 @@ | ||
| python | ||
| packages/python |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| locals { | ||
| _module_name = "" | ||
| } |
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,180 @@ | ||
| """ | ||
| DARHTS GuardDuty Move | ||
| This takes an EventBridge notification event based on the results of a GuardDuty S3 Malware scan. | ||
| The result will be NO_THREATS_FOUND, THREATS_FOUND, or other conditions. If NO_THREATS_FOUND, | ||
| we will move the file from the source bucket from the event (also defined as BUCKET_IN) to the clean | ||
| bucket (BUCKET_CLEAN), otherwise move it to the quarantine bucket (BUCKET_QUARANTINE). By move, | ||
| we mean copy it to the destination bucket and remove it from the source bucket. We need to preserve | ||
| the object tags in the copy process. | ||
| """ | ||
|
|
||
| import json | ||
| #import sys | ||
| from datetime import datetime,timezone | ||
| #import logging | ||
| import time | ||
| #import inspect | ||
| import boto3 | ||
| import os | ||
| #import zlib | ||
| #import io | ||
| #from pathlib import Path | ||
| from botocore.exceptions import ClientError | ||
| #from pprint import pformat,pprint | ||
| ##from collections import OrderedDict, defaultdict | ||
| ##from dateutil.parser import parse as date_parse | ||
|
|
||
| # powertools | ||
| from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent | ||
| from aws_lambda_powertools import Logger | ||
| from aws_lambda_powertools.utilities.typing import LambdaContext | ||
| from aws_lambda_powertools import Metrics | ||
| from aws_lambda_powertools.metrics import MetricUnit | ||
|
|
||
| # jam into another file later | ||
| def boolean_env(value): | ||
| try: | ||
| return value.lower() in [ 'yes', 'y', 'true', '1' ] | ||
| except: | ||
| return False | ||
|
|
||
| # Setting Global Variables | ||
| APPNAME = 'darhts-guardduty-move' | ||
| account_id = None | ||
| region = None | ||
| VERSION = '1.0.9' | ||
| #TF_MODULE_VERSION=os.environ.get('TF_MODULE_VERSION','') | ||
|
|
||
| logger_level=os.environ.get('POWERTOOLS_LOG_LEVEL', 'INFO') | ||
| logger_service=os.environ.get('POWERTOOLS_SERVICE_NAME',APPNAME) | ||
| logger = Logger(service=logger_service,level=logger_level) | ||
|
|
||
| session = None | ||
| # disabling custom metrics for now | ||
| # metrics = Metrics() | ||
|
|
||
| GUARDDUTY_MOVE_VERBOSE=boolean_env(os.environ.get('GUARDDUTY_MOVE_VERBOSE', 'False')) | ||
| GUARDDUTY_MOVE_DEBUG=os.environ.get('POWERTOOLS_LOG_LEVEL','INFO') == 'DEBUG' | ||
| GUARDDUTY_MOVE_BUCKET_IN=os.environ.get('GUARDDUTY_MOVE_BUCKET_IN', 'None') | ||
| GUARDDUTY_MOVE_BUCKET_CLEAN=os.environ.get('GUARDDUTY_MOVE_BUCKET_CLEAN', 'None') | ||
| GUARDDUTY_MOVE_BUCKET_QUARANTINE=os.environ.get('GUARDDUTY_MOVE_BUCKET_QUARANTINE', 'None') | ||
|
|
||
| print(f'Loading function v{VERSION}: {datetime.now().time().isoformat()}') | ||
|
|
||
| def output_settings(): | ||
| settings={ | ||
| 'POWERTOOLS_LOG_LEVEL':POWERTOOLS_LOG_LEVEL, | ||
| 'POWERTOOLS_SERVICE_NAME':POWERTOOLS_SERVICE_NAME, | ||
| 'GUARDDUTY_MOVE_VERBOSE':GUARDDUTY_MOVE_VERBOSE, | ||
| 'GUARDDUTY_MOVE_DEBUG':GUARDDUTY_MOVE_DEBUG, | ||
| 'GUARDDUTY_MOVE_BUCKET_IN':GUARDDUTY_MOVE_BUCKET_IN, | ||
| 'GUARDDUTY_MOVE_BUCKET_CLEAN':GUARDDUTY_MOVE_BUCKET_CLEAN, | ||
| 'GUARDDUTY_MOVE_BUCKET_QUARANTINE':GUARDDUTY_MOVE_BUCKET_QUARANTINE, | ||
| } | ||
| for k,v in settings.items(): | ||
| logger.info(f' {k} = {v}') | ||
| return | ||
|
|
||
| def initialize_clients(): | ||
| """ | ||
| Set up all of the API clients from the main session, done before invocation of the handlers | ||
| :return: | ||
| """ | ||
|
|
||
| global session | ||
| global s3 | ||
|
|
||
| logger.info(f"initializing boto3 session and clients") | ||
| if session is None: | ||
| session = boto3.session.Session() | ||
|
|
||
| logger.debug(f" boto3 client: s3") | ||
| s3 = session.client('s3') | ||
|
|
||
| return | ||
|
|
||
| initialize_clients() | ||
|
|
||
| # https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/#s3-eventbridge-notification | ||
| # https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#standard-structured-keys | ||
|
|
||
| def scan_exit(time_start,status): | ||
| global APPNAME | ||
| time_end=datetime.now() | ||
| tx=time_end-time_start | ||
| logger.info(f'End processing Scan Notification at start {time_start} end {time_end} elapsed {tx} status {status}') | ||
| if not status: | ||
| raise RuntimeError(f'Error processing {APPNAME}') | ||
| return status | ||
|
|
||
| # @metrics.log_metrics | ||
| @logger.inject_lambda_context | ||
| @event_source(data_class=EventBridgeEvent) | ||
| def lambda_handler(event, context): | ||
| """ | ||
| :param event: | ||
| :param context: | ||
| :return: | ||
| """ | ||
|
|
||
| global session | ||
| global s3 | ||
|
|
||
| logger.debug(f'event {event} context {context}') | ||
| if GUARDDUTY_MOVE_VERBOSE: | ||
| try: | ||
| output_settings() | ||
| except: | ||
| logger.error(f'Cannot execute output_settings()') | ||
| pass | ||
|
|
||
| time_start=datetime.now() | ||
| # timestamp=time_start.astimezone(timezone.utc).isoformat(timespec='seconds')+'Z' | ||
| timestamp=event.time | ||
| logger.info(f'Start processing Scan Notification at start {time_start}') | ||
|
|
||
| try: | ||
| detail = event.detail | ||
| s3_details = detail.get('s3ObjectDetails') | ||
| bucket_name = s3_details.get('bucketName') | ||
| file_key = s3_details.get('objectKey') | ||
|
|
||
| scan_details = detail.get('scanResultDetails') | ||
| scan_status = scan_details.get('scanResultStatus') | ||
| except: | ||
| logger.error('Cannot get bucket name and object key from event') | ||
| raise | ||
| logger.info(f'Received Scan Notification bucket={bucket_name} key={file_key} status={scan_status}') | ||
|
|
||
| # select bucket based on scan_status. no threats is clean, anything else is quarantine | ||
| not_malicious = scan_status == 'NO_THREATS_FOUND' | ||
| dst_bucket = GUARDDUTY_MOVE_BUCKET_CLEAN if not_malicious else GUARDDUTY_MOVE_BUCKET_QUARANTINE | ||
| # dst_key = f"{malware_name}/{key}" if is_malicious else key | ||
| dst_key = file_key | ||
|
|
||
| logger.info(f'Copying file from_bucket={bucket_name} to_bucket={dst_bucket} key={file_key} status={scan_status}') | ||
| try: | ||
| response = s3.copy_object( | ||
| Bucket=dst_bucket, | ||
| CopySource={"Bucket": bucket_name, "Key": file_key}, | ||
| Key=dst_key, | ||
| TaggingDirective='COPY' | ||
| ) | ||
| except: | ||
| logger.error(f'Unable to copy from_bucket={bucket_name} to_bucket={dst_bucket} key={file_key} status={scan_status}') | ||
| raise | ||
| # logger.error(str(sys.exc_info()[0])) | ||
| # return scan_exit(time_start,False) | ||
|
|
||
| # if copied, then we remove the original | ||
| logger.info(f'Deleting file from_bucket={bucket_name} key={file_key} status={scan_status}') | ||
| try: | ||
| response = s3.delete_object(Bucket=bucket_name, Key=file_key) | ||
| except: | ||
| logger.error(f'Unable to delete source bucket={bucket_name} key={file_key} status={scan_status}') | ||
| raise | ||
| # logger.error(str(sys.exc_info()[0])) | ||
| # return scan_exit(time_start,False) | ||
|
|
||
| return scan_exit(time_start,True) |
Binary file not shown.
Oops, something went wrong.