diff --git a/code/.gitignore b/code/.gitignore new file mode 100644 index 0000000..e5ec78a --- /dev/null +++ b/code/.gitignore @@ -0,0 +1,2 @@ +python +packages/python diff --git a/code/.lock b/code/.lock new file mode 100755 index 0000000..e69de29 diff --git a/code/_module_name.tf b/code/_module_name.tf new file mode 100644 index 0000000..d0947b0 --- /dev/null +++ b/code/_module_name.tf @@ -0,0 +1,3 @@ +locals { + _module_name = "" +} diff --git a/code/darhts-guardduty-move.package.zip b/code/darhts-guardduty-move.package.zip new file mode 100644 index 0000000..d937ad6 Binary files /dev/null and b/code/darhts-guardduty-move.package.zip differ diff --git a/code/darhts-guardduty-move.py b/code/darhts-guardduty-move.py new file mode 100755 index 0000000..8d88cbc --- /dev/null +++ b/code/darhts-guardduty-move.py @@ -0,0 +1,180 @@ +""" +DARHTS GuardDuty Move + +This takes an EventBridge notification event based on the results of a GuardDuty S3 Malware scan. +The result will be NO_THREATS_FOUND, THREATS_FOUND, or other conditions. If NO_THREATS_FOUND, +we will move the file from the source bucket from the event (also defined as BUCKET_IN) to the clean +bucket (BUCKET_CLEAN), otherwise move it to the quarantine bucket (BUCKET_QUARANTINE). By move, +we mean copy it to the destination bucket and remove it from the source bucket. We need to preserve +the object tags in the copy process. +""" + +import json +#import sys +from datetime import datetime,timezone +#import logging +import time +#import inspect +import boto3 +import os +#import zlib +#import io +#from pathlib import Path +from botocore.exceptions import ClientError +#from pprint import pformat,pprint +##from collections import OrderedDict, defaultdict +##from dateutil.parser import parse as date_parse + +# powertools +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +# jam into another file later +def boolean_env(value): + try: + return value.lower() in [ 'yes', 'y', 'true', '1' ] + except: + return False + +# Setting Global Variables +APPNAME = 'darhts-guardduty-move' +account_id = None +region = None +VERSION = '1.0.9' +#TF_MODULE_VERSION=os.environ.get('TF_MODULE_VERSION','') + +logger_level=os.environ.get('POWERTOOLS_LOG_LEVEL', 'INFO') +logger_service=os.environ.get('POWERTOOLS_SERVICE_NAME',APPNAME) +logger = Logger(service=logger_service,level=logger_level) + +session = None +# disabling custom metrics for now +# metrics = Metrics() + +GUARDDUTY_MOVE_VERBOSE=boolean_env(os.environ.get('GUARDDUTY_MOVE_VERBOSE', 'False')) +GUARDDUTY_MOVE_DEBUG=os.environ.get('POWERTOOLS_LOG_LEVEL','INFO') == 'DEBUG' +GUARDDUTY_MOVE_BUCKET_IN=os.environ.get('GUARDDUTY_MOVE_BUCKET_IN', 'None') +GUARDDUTY_MOVE_BUCKET_CLEAN=os.environ.get('GUARDDUTY_MOVE_BUCKET_CLEAN', 'None') +GUARDDUTY_MOVE_BUCKET_QUARANTINE=os.environ.get('GUARDDUTY_MOVE_BUCKET_QUARANTINE', 'None') + +print(f'Loading function v{VERSION}: {datetime.now().time().isoformat()}') + +def output_settings(): + settings={ + 'POWERTOOLS_LOG_LEVEL':POWERTOOLS_LOG_LEVEL, + 'POWERTOOLS_SERVICE_NAME':POWERTOOLS_SERVICE_NAME, + 'GUARDDUTY_MOVE_VERBOSE':GUARDDUTY_MOVE_VERBOSE, + 'GUARDDUTY_MOVE_DEBUG':GUARDDUTY_MOVE_DEBUG, + 'GUARDDUTY_MOVE_BUCKET_IN':GUARDDUTY_MOVE_BUCKET_IN, + 'GUARDDUTY_MOVE_BUCKET_CLEAN':GUARDDUTY_MOVE_BUCKET_CLEAN, + 'GUARDDUTY_MOVE_BUCKET_QUARANTINE':GUARDDUTY_MOVE_BUCKET_QUARANTINE, + } + for k,v in settings.items(): + logger.info(f' {k} = {v}') + return + +def initialize_clients(): + """ + Set up all of the API clients from the main session, done before invocation of the handlers + :return: + """ + + global session + global s3 + + logger.info(f"initializing boto3 session and clients") + if session is None: + session = boto3.session.Session() + + logger.debug(f" boto3 client: s3") + s3 = session.client('s3') + + return + +initialize_clients() + +# https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/#s3-eventbridge-notification +# https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#standard-structured-keys + +def scan_exit(time_start,status): + global APPNAME + time_end=datetime.now() + tx=time_end-time_start + logger.info(f'End processing Scan Notification at start {time_start} end {time_end} elapsed {tx} status {status}') + if not status: + raise RuntimeError(f'Error processing {APPNAME}') + return status + +# @metrics.log_metrics +@logger.inject_lambda_context +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event, context): + """ + :param event: + :param context: + :return: + """ + + global session + global s3 + + logger.debug(f'event {event} context {context}') + if GUARDDUTY_MOVE_VERBOSE: + try: + output_settings() + except: + logger.error(f'Cannot execute output_settings()') + pass + + time_start=datetime.now() +# timestamp=time_start.astimezone(timezone.utc).isoformat(timespec='seconds')+'Z' + timestamp=event.time + logger.info(f'Start processing Scan Notification at start {time_start}') + + try: + detail = event.detail + s3_details = detail.get('s3ObjectDetails') + bucket_name = s3_details.get('bucketName') + file_key = s3_details.get('objectKey') + + scan_details = detail.get('scanResultDetails') + scan_status = scan_details.get('scanResultStatus') + except: + logger.error('Cannot get bucket name and object key from event') + raise + logger.info(f'Received Scan Notification bucket={bucket_name} key={file_key} status={scan_status}') + +# select bucket based on scan_status. no threats is clean, anything else is quarantine + not_malicious = scan_status == 'NO_THREATS_FOUND' + dst_bucket = GUARDDUTY_MOVE_BUCKET_CLEAN if not_malicious else GUARDDUTY_MOVE_BUCKET_QUARANTINE +# dst_key = f"{malware_name}/{key}" if is_malicious else key + dst_key = file_key + + logger.info(f'Copying file from_bucket={bucket_name} to_bucket={dst_bucket} key={file_key} status={scan_status}') + try: + response = s3.copy_object( + Bucket=dst_bucket, + CopySource={"Bucket": bucket_name, "Key": file_key}, + Key=dst_key, + TaggingDirective='COPY' + ) + except: + logger.error(f'Unable to copy from_bucket={bucket_name} to_bucket={dst_bucket} key={file_key} status={scan_status}') + raise +# logger.error(str(sys.exc_info()[0])) +# return scan_exit(time_start,False) + +# if copied, then we remove the original + logger.info(f'Deleting file from_bucket={bucket_name} key={file_key} status={scan_status}') + try: + response = s3.delete_object(Bucket=bucket_name, Key=file_key) + except: + logger.error(f'Unable to delete source bucket={bucket_name} key={file_key} status={scan_status}') + raise +# logger.error(str(sys.exc_info()[0])) +# return scan_exit(time_start,False) + + return scan_exit(time_start,True) diff --git a/code/darhts-guardduty-move.zip b/code/darhts-guardduty-move.zip new file mode 100644 index 0000000..e1cdc95 Binary files /dev/null and b/code/darhts-guardduty-move.zip differ diff --git a/code/darhts-guardduty-notify.py b/code/darhts-guardduty-notify.py new file mode 100755 index 0000000..fbfa70c --- /dev/null +++ b/code/darhts-guardduty-notify.py @@ -0,0 +1,317 @@ +""" +DARHTS GuardDuty Notify + +This takes an EventBridge notification event based on the results of a GuardDuty S3 Malware scan. +It then reports this to a Salesforce API, because we cannot do an API connect in Govcloud. +""" + +import json +#import sys +from datetime import datetime,timezone +#import logging +import time +#import inspect +import boto3 +import os +#import zlib +#import io +#from pathlib import Path +from botocore.exceptions import ClientError +#from pprint import pformat,pprint +##from collections import OrderedDict, defaultdict +##from dateutil.parser import parse as date_parse +import requests + +# powertools +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +# jam into another file later +def boolean_env(value): + try: + return value.lower() in [ 'yes', 'y', 'true', '1' ] + except: + return False + +# Setting Global Variables +APPNAME = 'darhts-guardduty-notify' +account_id = None +region = None +VERSION = '1.0.16' +#TF_MODULE_VERSION=os.environ.get('TF_MODULE_VERSION','') + +logger_level=os.environ.get('POWERTOOLS_LOG_LEVEL', 'INFO') +logger_service=os.environ.get('POWERTOOLS_SERVICE_NAME',APPNAME) +logger = Logger(service=logger_service,level=logger_level) + +session = None +# disabling custom metrics for now +# metrics = Metrics() + +GUARDDUTY_NOTIFY_VERBOSE=boolean_env(os.environ.get('GUARDDUTY_NOTIFY_VERBOSE', 'False')) +GUARDDUTY_NOTIFY_DEBUG=os.environ.get('POWERTOOLS_LOG_LEVEL','INFO') == 'DEBUG' +GUARDDUTY_NOTIFY_SECRET_NAME=os.environ.get('GUARDDUTY_NOTIFY_SECRET_NAME',None) +GUARDDUTY_NOTIFY_ENVIRONMENT=os.environ.get('GUARDDUTY_NOTIFY_ENVIRONMENT','dev') +GUARDDUTY_NOTIFY_AUTH_URL=os.environ.get('GUARDDUTY_NOTIFY_AUTH_URL',None) +GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION = os.environ.get('GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION', '63.0') +GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME = os.environ.get('GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME', None) + +#GUARDDUTY_NOTIFY_BUCKET_IN=os.environ.get('GUARDDUTY_NOTIFY_BUCKET_IN', 'None') +#GUARDDUTY_NOTIFY_BUCKET_CLEAN=os.environ.get('GUARDDUTY_NOTIFY_BUCKET_CLEAN', 'None') +#GUARDDUTY_NOTIFY_BUCKET_QUARANTINE=os.environ.get('GUARDDUTY_NOTIFY_BUCKET_QUARANTINE', 'None') + +print(f'Loading function v{VERSION}: {datetime.now().time().isoformat()}') + +def output_settings(): + settings={ + 'POWERTOOLS_LOG_LEVEL':POWERTOOLS_LOG_LEVEL, + 'POWERTOOLS_SERVICE_NAME':POWERTOOLS_SERVICE_NAME, + 'GUARDDUTY_NOTIFY_VERBOSE':GUARDDUTY_NOTIFY_VERBOSE, + 'GUARDDUTY_NOTIFY_DEBUG':GUARDDUTY_NOTIFY_DEBUG, + 'GUARDDUTY_NOTIFY_SECRET_NAME':GUARDDUTY_NOTIFY_SECRET_NAME, + 'GUARDDUTY_NOTIFY_ENVIRONMENT':GUARDDUTY_NOTIFY_ENVIRONMENT, + 'GUARDDUTY_NOTIFY_AUTH_URL':GUARDDUTY_NOTIFY_AUTH_URL, + 'GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION':GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION, + 'GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME':GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME, + } + for k,v in settings.items(): + logger.info(f' {k} = {v}') + return + +def initialize_clients(): + """ + Set up all of the API clients from the main session, done before invocation of the handlers + :return: + """ + + global session +# global s3 + global secretsmanager + + logger.info(f"initializing boto3 session and clients") + if session is None: + session = boto3.session.Session() + +# logger.debug(f" boto3 client: s3") +# s3 = session.client('s3') + + logger.debug(f" boto3 client: secretsmanager") + secretsmanager = session.client('secretsmanager') + + return + +initialize_clients() + +# https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/#s3-eventbridge-notification +# https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#standard-structured-keys + +def do_exit(time_start,status): + global APPNAME + time_end=datetime.now() + tx=time_end-time_start + logger.info(f'End processing Scan Notification at start {time_start} end {time_end} elapsed {tx} status {status}') + if not status: + raise RuntimeError(f'Error processing {APPNAME}') + return status + +# @metrics.log_metrics +@logger.inject_lambda_context +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event, context): + """ + :param event: + :param context: + :return: + """ + + global session + global secretsmanager + + logger.debug(f'event {event} context {context}') + if GUARDDUTY_NOTIFY_VERBOSE: + try: + output_settings() + except: + logger.error(f'Cannot execute output_settings()') + pass + + time_start=datetime.now() +# timestamp=time_start.astimezone(timezone.utc).isoformat(timespec='seconds')+'Z' + timestamp=event.time + logger.info(f'Start processing Scan Notification at start {time_start}') + + try: + event_detail = event.get('detail-type') + event_source = event.get('source') + + if event_source == 'aws.guardduty' and event_detail == 'GuardDuty Malware Protection Object Scan Result': + logger.info("This is a GuardDuty S3 Object Malware Scan Finding.") + + logger.info(f'Transform the event payload to match Salesforce Platform Event schema') + transformed_event = transform_event_for_salesforce(event) + logger.debug(f'transformed_event {transformed_event}') + + logger.info(f'Call the method to send the event to Salesforce') + send_to_salesforce(transformed_event) + + else: + logger.debug(f"Ignoring Event: Source: {event_source}. Detail: {event_detail}") + + except Exception as e: + logger.error(f"Error processing event: {str(e)}") + raise e +# return do_exit(time_start,False) + + return do_exit(time_start,True) + +# logger.info(f'Getting Salesforce token') +# salesforce_token=None +# salesforce_url = None +# if not salesforce_token: +# token_data = get_salesforce_oauth_token() +# logger.debug(f'token_data {token_data}') +# if token_data: +# salesforce_token = token_data.get('access_token') +# salesforce_url = token_data.get('instance_url') + f'/services/data/v{GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION}/sobjects/{PLATFORM_EVENT_NAME}' +# +# if not salesforce_url or not salesforce_token: +# logger.error(f'Salesforce URL or Token not set in environment variables.') +# return +# logger.debug(f'token: {salesforce_token}') +# logger.debug(f'url: {salesforce_url}') + +def get_salesforce_oauth_token(): + """ + Retrieves an OAuth token from Salesforce using client_id and client_secret. + """ + +# region_name = os.environ.get('AWS_REGION') + salesforce_auth_url=GUARDDUTY_NOTIFY_AUTH_URL + try: + logger.info(f'Getting secret from {GUARDDUTY_NOTIFY_SECRET_NAME}') + get_secret_value_response = secretsmanager.get_secret_value( + SecretId=GUARDDUTY_NOTIFY_SECRET_NAME + ) + secret = get_secret_value_response['SecretString'] + secret_dict = json.loads(secret) + # Extract Salesforce credentials from the secret + if isinstance(secret_dict, str): + secret_dict = json.loads(secret_dict) + if 'darhts_client_id' not in secret_dict or 'darhts_client_secret' not in secret_dict: + logger.error("Salesforce credentials not found in the secret.") + return None + # Extract the necessary fields from the secret + client_id = secret_dict['darhts_client_id'] # Salesforce client ID + client_secret = secret_dict['darhts_client_secret'] # Salesforce client secret + if not salesforce_auth_url: + print("Salesforce Credentials not set.") + return None + logger.info(f'Got secret from {GUARDDUTY_NOTIFY_SECRET_NAME} and URL for API') + + payload = { + 'grant_type': 'client_credentials', + 'client_id': client_id, + 'client_secret': client_secret + } + + response = requests.post(salesforce_auth_url, data=payload, headers={'Content-Type': 'application/x-www-form-urlencoded'}) +# if response.status_code == 200: + if response: + token_data = response.json() + access_token = token_data.get("access_token") + instance_url = token_data.get("instance_url") + + logger.debug(f'Access Token: {access_token}') + logger.debug(f'Instance URL: {instance_url}') + + return token_data + else: + logger.error(f'Failed to get token: {response.status_code} - {response.text}') + response.raise_for_status() + + except ClientError as e: + logger.error(f'Failed to retrieve secret from Secrets Manager: {e}') + raise e +# except urllib.error.URLError as e: + except requests.exceptions.HTTPError as e: + logger.error(f'HTTP error: {e}') + raise e + except requests.exceptions.RequestException as e: + logger.error(f'Failed to retrieve Salesforce OAuth token: {e}') + raise e + +def transform_event_for_salesforce(event): + """ + Transforms the GuardDuty event into a format suitable for Salesforce Platform Event GuardDutyObjectScan__e. + """ + transformed_event = { + 'AccountId__c': event['account'], + 'BucketName__c': event['detail']['s3ObjectDetails']['bucketName'], + 'ObjectKey__c': event['detail']['s3ObjectDetails']['objectKey'], + 'EventRegion__c': event['region'], + 'ScanStatus__c': event['detail']['scanStatus'], + 'ScanResultStatus__c': event['detail']['scanResultDetails']['scanResultStatus'], + 'EventTime__c': event['time'], + 'EventId__c': event['id'], + 'ResourceType__c': event['detail']['resourceType'], + 'Payload__c': str(event) # Store the entire event payload as a JSON string + } + + # Add threats if they exist + threats = event['detail']['scanResultDetails'].get('threats', []) + if threats: + transformed_event['Threats__c'] = json.dumps(threats) # Store threats as a JSON string + + return transformed_event + +def send_to_salesforce(event_payload): + """ + Sends the transformed event payload to Salesforce as a Platform Event. + """ + salesforce_url = None + salesforce_token = None +# GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION = os.environ.get('GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION', '63.0') +# GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME = os.environ.get('GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME', '63.0') + + if not salesforce_token: + token_data = get_salesforce_oauth_token() + if token_data: + salesforce_token = token_data.get('access_token') + salesforce_url = token_data.get('instance_url') + f"/services/data/v{GUARDDUTY_NOTIFY_SALESFORCE_API_VERSION}/sobjects/{GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME}" + + if not GUARDDUTY_NOTIFY_PLATFORM_EVENT_NAME: + logger.error("Platform event name not specified.") + raise ValueError('Missing Platform event name') + return + + if not salesforce_url or not salesforce_token: + logger.error("Salesforce URL or Token not set in environment variables.") + raise ValueError('Salesforce URL or Token not set in environment variables.') + return + + logger.info(f"Sending event to Salesforce URL: {salesforce_url}") + logger.debug(f"Event payload: {json.dumps(event_payload)}") + headers = { + 'Authorization': f'Bearer {salesforce_token}', + 'Content-Type': 'application/json' + } + + try: + encoded_payload = json.dumps(event_payload).encode('utf-8') + response = requests.post( salesforce_url, data=encoded_payload, headers=headers) +# if response.status_code == 200: + if response: + logger.debug(f"Successfully sent event to Salesforce. Response: {response.status_code}") + else: + logger.debug(f"Salesforce send-event Response: {response.status_code}") + response.raise_for_status() + except requests.exceptions.HTTPError as e: + logger.error(f'HTTP error: {e}') + raise e + except requests.exceptions.RequestException as e: + logger.error(f'Failed to retrieve Salesforce OAuth token: {e}') + raise e + diff --git a/code/darhts-guardduty-notify.zip b/code/darhts-guardduty-notify.zip new file mode 100644 index 0000000..62f1726 Binary files /dev/null and b/code/darhts-guardduty-notify.zip differ diff --git a/code/darhts-s3-notify.py b/code/darhts-s3-notify.py new file mode 100755 index 0000000..135eebc --- /dev/null +++ b/code/darhts-s3-notify.py @@ -0,0 +1,319 @@ +""" +DARHTS S3 Notify + +This takes an EventBridge notification event from S3 when an object is created in the clean bucket. +This file replication does not happen until the malware scan is clean AND a tag is updated. +It then reports this to a Salesforce API. +""" + +import json +#import sys +from datetime import datetime,timezone +#import logging +import time +#import inspect +import boto3 +import os +#import zlib +#import io +#from pathlib import Path +from botocore.exceptions import ClientError +#from pprint import pformat,pprint +##from collections import OrderedDict, defaultdict +##from dateutil.parser import parse as date_parse +import requests + +# powertools +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +# jam into another file later +def boolean_env(value): + try: + return value.lower() in [ 'yes', 'y', 'true', '1' ] + except: + return False + +# Setting Global Variables +APPNAME = 'darhts-s3-notify' +account_id = None +region = None +VERSION = '1.0.5' +#TF_MODULE_VERSION=os.environ.get('TF_MODULE_VERSION','') + +logger_level=os.environ.get('POWERTOOLS_LOG_LEVEL', 'INFO') +logger_service=os.environ.get('POWERTOOLS_SERVICE_NAME',APPNAME) +logger = Logger(service=logger_service,level=logger_level) + +session = None +# disabling custom metrics for now +# metrics = Metrics() + +S3_NOTIFY_VERBOSE=boolean_env(os.environ.get('S3_NOTIFY_VERBOSE', 'False')) +S3_NOTIFY_DEBUG=os.environ.get('POWERTOOLS_LOG_LEVEL','INFO') == 'DEBUG' +S3_NOTIFY_SECRET_NAME=os.environ.get('S3_NOTIFY_SECRET_NAME',None) +S3_NOTIFY_ENVIRONMENT=os.environ.get('S3_NOTIFY_ENVIRONMENT','dev') +S3_NOTIFY_AUTH_URL=os.environ.get('S3_NOTIFY_AUTH_URL',None) +S3_NOTIFY_SALESFORCE_API_VERSION = os.environ.get('S3_NOTIFY_SALESFORCE_API_VERSION', '63.0') +S3_NOTIFY_PLATFORM_EVENT_NAME = os.environ.get('S3_NOTIFY_PLATFORM_EVENT_NAME', None) + +#S3_NOTIFY_BUCKET_IN=os.environ.get('S3_NOTIFY_BUCKET_IN', 'None') +#S3_NOTIFY_BUCKET_CLEAN=os.environ.get('S3_NOTIFY_BUCKET_CLEAN', 'None') +#S3_NOTIFY_BUCKET_QUARANTINE=os.environ.get('S3_NOTIFY_BUCKET_QUARANTINE', 'None') + +print(f'Loading function v{VERSION}: {datetime.now().time().isoformat()}') + +def output_settings(): + settings={ + 'POWERTOOLS_LOG_LEVEL':POWERTOOLS_LOG_LEVEL, + 'POWERTOOLS_SERVICE_NAME':POWERTOOLS_SERVICE_NAME, + 'S3_NOTIFY_VERBOSE':S3_NOTIFY_VERBOSE, + 'S3_NOTIFY_DEBUG':S3_NOTIFY_DEBUG, + 'S3_NOTIFY_SECRET_NAME':S3_NOTIFY_SECRET_NAME, + 'S3_NOTIFY_ENVIRONMENT':S3_NOTIFY_ENVIRONMENT, + 'S3_NOTIFY_AUTH_URL':S3_NOTIFY_AUTH_URL, + 'S3_NOTIFY_SALESFORCE_API_VERSION':S3_NOTIFY_SALESFORCE_API_VERSION, + 'S3_NOTIFY_PLATFORM_EVENT_NAME':S3_NOTIFY_PLATFORM_EVENT_NAME, + } + for k,v in settings.items(): + logger.info(f' {k} = {v}') + return + +def initialize_clients(): + """ + Set up all of the API clients from the main session, done before invocation of the handlers + :return: + """ + + global session +# global s3 + global secretsmanager + + logger.info(f"initializing boto3 session and clients") + if session is None: + session = boto3.session.Session() + +# logger.debug(f" boto3 client: s3") +# s3 = session.client('s3') + + logger.debug(f" boto3 client: secretsmanager") + secretsmanager = session.client('secretsmanager') + + return + +initialize_clients() + +# https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/#s3-eventbridge-notification +# https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#standard-structured-keys + +def do_exit(time_start,status): + global APPNAME + time_end=datetime.now() + tx=time_end-time_start + logger.info(f'End processing Scan Notification at start {time_start} end {time_end} elapsed {tx} status {status}') + if not status: + raise RuntimeError(f'Error processing {APPNAME}') + return status + +# @metrics.log_metrics +@logger.inject_lambda_context +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event, context): + """ + Project: DARHTS + This function is triggered by an EventBridge event. + It checks for + 1. Darhts Clean Bucket Object Created Event + 2. Transforms the event + 3. Calls Salesforce Platform Event DARHTSCleanCreateObjectEvent__e. + + :param event: + :param context: + :return: + """ + + global session + global secretsmanager + + logger.debug(f'event {event} context {context}') + if S3_NOTIFY_VERBOSE: + try: + output_settings() + except: + logger.error(f'Cannot execute output_settings()') + pass + + time_start=datetime.now() +# timestamp=time_start.astimezone(timezone.utc).isoformat(timespec='seconds')+'Z' + timestamp=event.time + logger.info(f'Start processing Scan Notification at start {time_start}') + + try: + event_detail = event.get('detail-type') + event_source = event.get('source') + + if event_source == 'aws.s3' and event_detail == 'Object Created': + logger.info("This is an S3 Object Created Event.") + bucket_name = event['detail']['bucket']['name'] +# if bucket_name == "darhts-clean": + + logger.info(f'Transform the event payload to match Salesforce Platform Event schema') + transformed_event = transform_event_for_salesforce(event) + logger.debug(f'transformed_event {transformed_event}') + + logger.info(f'Call the method to send the event to Salesforce') + send_to_salesforce(transformed_event) + + else: + logger.debug(f"Ignoring Event: Source: {event_source}. Detail: {event_detail}") + + except Exception as e: + logger.error(f"Error processing event: {str(e)}") + raise e + + return do_exit(time_start,True) + +# logger.info(f'Getting Salesforce token') +# salesforce_token=None +# salesforce_url = None +# if not salesforce_token: +# token_data = get_salesforce_oauth_token() +# logger.debug(f'token_data {token_data}') +# if token_data: +# salesforce_token = token_data.get('access_token') +# salesforce_url = token_data.get('instance_url') + f'/services/data/v{S3_NOTIFY_SALESFORCE_API_VERSION}/sobjects/{PLATFORM_EVENT_NAME}' +# +# if not salesforce_url or not salesforce_token: +# logger.error(f'Salesforce URL or Token not set in environment variables.') +# return +# logger.debug(f'token: {salesforce_token}') +# logger.debug(f'url: {salesforce_url}') + +def get_salesforce_oauth_token(): + """ + Retrieves an OAuth token from Salesforce using client_id and client_secret. + """ + +# region_name = os.environ.get('AWS_REGION') + salesforce_auth_url=S3_NOTIFY_AUTH_URL + try: + logger.info(f'Getting secret from {S3_NOTIFY_SECRET_NAME}') + get_secret_value_response = secretsmanager.get_secret_value( + SecretId=S3_NOTIFY_SECRET_NAME + ) + secret = get_secret_value_response['SecretString'] + secret_dict = json.loads(secret) + # Extract Salesforce credentials from the secret + if isinstance(secret_dict, str): + secret_dict = json.loads(secret_dict) + if 'darhts_client_id' not in secret_dict or 'darhts_client_secret' not in secret_dict: + logger.error("Salesforce credentials not found in the secret.") + return None + # Extract the necessary fields from the secret + client_id = secret_dict['darhts_client_id'] # Salesforce client ID + client_secret = secret_dict['darhts_client_secret'] # Salesforce client secret + if not salesforce_auth_url: + print("Salesforce Credentials not set.") + return None + logger.info(f'Got secret from {S3_NOTIFY_SECRET_NAME} and URL for API') + + payload = { + 'grant_type': 'client_credentials', + 'client_id': client_id, + 'client_secret': client_secret + } + + response = requests.post(salesforce_auth_url, data=payload, headers={'Content-Type': 'application/x-www-form-urlencoded'}) +# if response.status_code == 200: + if response: + token_data = response.json() + access_token = token_data.get("access_token") + instance_url = token_data.get("instance_url") + + logger.debug(f'Access Token: {access_token}') + logger.debug(f'Instance URL: {instance_url}') + + return token_data + else: + logger.error(f'Failed to get token: {response.status_code} - {response.text}') + response.raise_for_status() + + except ClientError as e: + logger.error(f'Failed to retrieve secret from Secrets Manager: {e}') + raise e +# except urllib.error.URLError as e: + except requests.exceptions.HTTPError as e: + logger.error(f'HTTP error: {e}') + raise e + except requests.exceptions.RequestException as e: + logger.error(f'Failed to retrieve Salesforce OAuth token: {e}') + raise e + +def transform_event_for_salesforce(event): + """ + Transforms the GuardDuty event into a format suitable for Salesforce Platform Event DARHTSCleanCreateObjectEvent__e. + """ + transformed_event = { + 'AccountId__c': event['account'], + 'BucketName__c': event['detail']['bucket']['name'], + 'ObjectKey__c': event['detail']['object']['key'], + 'EventRegion__c': event['region'], + 'Reason__c': event['detail']['reason'], + 'Source__c': event['source'], + 'EventTime__c': event['time'], + 'EventId__c': event['id'], + 'Payload__c': str(event) # Store the entire event payload as a JSON string + } + + return transformed_event + +def send_to_salesforce(event_payload): + """ + Sends the transformed event payload to Salesforce as a Platform Event. + """ + salesforce_url = None + salesforce_token = None +# S3_NOTIFY_SALESFORCE_API_VERSION = os.environ.get('S3_NOTIFY_SALESFORCE_API_VERSION', '63.0') +# S3_NOTIFY_PLATFORM_EVENT_NAME = os.environ.get('S3_NOTIFY_PLATFORM_EVENT_NAME', '63.0') + + if not salesforce_token: + token_data = get_salesforce_oauth_token() + if token_data: + salesforce_token = token_data.get('access_token') + salesforce_url = token_data.get('instance_url') + f"/services/data/v{S3_NOTIFY_SALESFORCE_API_VERSION}/sobjects/{S3_NOTIFY_PLATFORM_EVENT_NAME}" + + if not S3_NOTIFY_PLATFORM_EVENT_NAME: + logger.error("Platform event name not specified.") + raise ValueError('Missing Platform event name') + return + + if not salesforce_url or not salesforce_token: + logger.error("Salesforce URL or Token not set in environment variables.") + raise ValueError('Salesforce URL or Token not set in environment variables.') + return + logger.info(f"Sending event to Salesforce URL: {salesforce_url}") + logger.debug(f"Event payload: {json.dumps(event_payload)}") + headers = { + 'Authorization': f'Bearer {salesforce_token}', + 'Content-Type': 'application/json' + } + + try: + encoded_payload = json.dumps(event_payload).encode('utf-8') + response = requests.post( salesforce_url, data=encoded_payload, headers=headers) +# if response.status_code == 200: + if response: + logger.debug(f"Successfully sent event to Salesforce. Response: {response.status_code}") + else: + logger.debug(f"Salesforce send-event Response: {response.status_code}") + response.raise_for_status() + except requests.exceptions.HTTPError as e: + logger.error(f'HTTP error: {e}') + raise e + except requests.exceptions.RequestException as e: + logger.error(f'Failed to retrieve Salesforce OAuth token: {e}') + raise e + diff --git a/code/darhts-s3-notify.zip b/code/darhts-s3-notify.zip new file mode 100644 index 0000000..cbe2f8d Binary files /dev/null and b/code/darhts-s3-notify.zip differ diff --git a/code/darhts-s3-tag.py b/code/darhts-s3-tag.py new file mode 100755 index 0000000..87bcab8 --- /dev/null +++ b/code/darhts-s3-tag.py @@ -0,0 +1,230 @@ +""" +DARHTS S3 Tag and Copy + +This takes an EventBridge notification event from S3 when object tags are added to the clean bucket. +If the tags on the object match the required set of tags for GD and sync, copy it to the dapps clean bucket. +""" + +import json +import sys +from datetime import datetime,timezone +#import logging +import time +#import inspect +import boto3 +import os +#import zlib +#import io +#from pathlib import Path +from botocore.exceptions import ClientError +#from pprint import pformat,pprint +##from collections import OrderedDict, defaultdict +##from dateutil.parser import parse as date_parse +import requests + +# powertools +from aws_lambda_powertools.utilities.data_classes import event_source, EventBridgeEvent +from aws_lambda_powertools import Logger +from aws_lambda_powertools.utilities.typing import LambdaContext +from aws_lambda_powertools import Metrics +from aws_lambda_powertools.metrics import MetricUnit + +# jam into another file later +def boolean_env(value): + try: + return value.lower() in [ 'yes', 'y', 'true', '1' ] + except: + return False + +# Setting Global Variables +APPNAME = 'darhts-s3-tag' +account_id = None +region = None +VERSION = '1.0.25' +#TF_MODULE_VERSION=os.environ.get('TF_MODULE_VERSION','') + +logger_level=os.environ.get('POWERTOOLS_LOG_LEVEL', 'INFO') +logger_service=os.environ.get('POWERTOOLS_SERVICE_NAME',APPNAME) +logger = Logger(service=logger_service,level=logger_level) + +session = None +# disabling custom metrics for now +# metrics = Metrics() + +S3_TAG_VERBOSE=boolean_env(os.environ.get('S3_TAG_VERBOSE', 'False')) +S3_TAG_DEBUG=os.environ.get('POWERTOOLS_LOG_LEVEL','INFO') == 'DEBUG' +S3_TAG_ENVIRONMENT=os.environ.get('S3_TAG_ENVIRONMENT','dev') +S3_TAG_BUCKET_CLEAN_IN=os.environ.get('S3_TAG_BUCKET_CLEAN_IN', 'None') +S3_TAG_BUCKET_CLEAN_OUT=os.environ.get('S3_TAG_BUCKET_CLEAN_OUT', 'None') +S3_TAG_TRIGGER_TAGS=os.environ.get('S3_TAG_TRIGGER_TAGS','') +if S3_TAG_TRIGGER_TAGS=='': + S3_TAG_TRIGGER_TAGS='{}' + +#S3_TAG_BUCKET_IN=os.environ.get('S3_TAG_BUCKET_IN', 'None') +#S3_TAG_BUCKET_CLEAN=os.environ.get('S3_TAG_BUCKET_CLEAN', 'None') +#S3_TAG_BUCKET_QUARANTINE=os.environ.get('S3_TAG_BUCKET_QUARANTINE', 'None') + +print(f'Loading function v{VERSION}: {datetime.now().time().isoformat()}') + +def output_settings(): + settings={ + 'POWERTOOLS_LOG_LEVEL':POWERTOOLS_LOG_LEVEL, + 'POWERTOOLS_SERVICE_NAME':POWERTOOLS_SERVICE_NAME, + 'S3_TAG_VERBOSE':S3_TAG_VERBOSE, + 'S3_TAG_DEBUG':S3_TAG_DEBUG, + 'S3_TAG_ENVIRONMENT':S3_TAG_ENVIRONMENT, + 'S3_TAG_BUCKET_CLEAN_IN':S3_TAG_BUCKET_CLEAN_IN, + 'S3_TAG_BUCKET_CLEAN_OUT':S3_TAG_BUCKET_CLEAN_OUT, + 'S3_TAG_TRIGGER_TAGS':S3_TAG_TRIGGER_TAGS, + } + for k,v in settings.items(): + logger.info(f' {k} = {v}') + return + +def initialize_clients(): + """ + Set up all of the API clients from the main session, done before invocation of the handlers + :return: + """ + + global session + global s3 + + logger.info(f"initializing boto3 session and clients") + if session is None: + session = boto3.session.Session() + +# logger.debug(f" boto3 client: s3") +# s3 = session.client('s3') + + logger.debug(f" boto3 client: secretsmanager") + s3 = session.client('s3') + + return + +initialize_clients() + +# https://docs.powertools.aws.dev/lambda/python/latest/utilities/data_classes/#s3-eventbridge-notification +# https://docs.powertools.aws.dev/lambda/python/latest/core/logger/#standard-structured-keys + +def do_exit(time_start,status): + global APPNAME + time_end=datetime.now() + tx=time_end-time_start + logger.info(f'End processing Scan Notification at start {time_start} end {time_end} elapsed {tx} status {status}') + if not status: + raise RuntimeError(f'Error processing {APPNAME}') + return status + +# @metrics.log_metrics +@logger.inject_lambda_context +@event_source(data_class=EventBridgeEvent) +def lambda_handler(event, context): + """ + Project: DARHTS + This function is triggered by an EventBridge event. + It checks for + 1. Darhts Clean Bucket Object Tag Added Event + 2. Checks that required tags are present + 3. If so, copies to the Dapps clean bucket + + :param event: + :param context: + :return: + """ + + global session + global s3 + + logger.debug(f'event {event} context {context}') + if S3_TAG_VERBOSE: + try: + output_settings() + except: + logger.error(f'Cannot execute output_settings()') + pass + + try: + expected_tags=json.loads(S3_TAG_TRIGGER_TAGS) + except Exception as e: + logger.error(f"Error converting trigger tags: {str(e)}") + raise e +# return do_exit(time_start,False) + +# expected_tags={ +# 'GuardDutyMalwareScanStatus':'NO_THREATS_FOUND', +# 'darhts_certified':'true', +# } + + time_start=datetime.now() +# timestamp=time_start.astimezone(timezone.utc).isoformat(timespec='seconds')+'Z' + timestamp=event.time + logger.info(f'Start processing Scan Notification at start {time_start}') + + try: + event_detail = event.get('detail-type') + event_source = event.get('source') + + if event_source == 'aws.s3' and event_detail == 'Object Tags Added': + logger.info("This is an S3 Object Tag Added Event.") + bucket_name = event['detail']['bucket']['name'] + object_key = event['detail']['object']['key'] + object_etag = event['detail']['object']['etag'] + logger.debug(f'bucket {bucket_name} object {object_key} {object_etag} object-version {event["detail"]["version"]}') + + else: + logger.debug(f"Ignoring Event: Source: {event_source}. Detail: {event_detail}") + logger.debug(str(event)) + + except Exception as e: + logger.error(f"Error processing event: {str(e)}") + raise e + + if len(expected_tags)==0: + logger.info(f'No tags provided in variable S3_TAG_TRIGGER_TAGS') + return do_exit(time_start,True) + + logger.info(f'checking tags on from_bucket={bucket_name} key={object_key}') + try: + response = s3.get_object_tagging( + Bucket=bucket_name, + Key=object_key + ) + logger.debug(f'response {response}') + + object_tags=response.get('TagSet',[]) + found_tags={} +# copy_object=[] + for t in object_tags: + (k,v)=(t.get('Key',None),t.get('Value',None)) + if k and v: + found_tags[k]=v + logger.debug(f'found_tags: {found_tags}') + matches=[found_tags.get(k,None)==v for k,v in expected_tags.items()] + if all(matches): +# if len(copy_object)==len(expected_tags) and all(copy_object): + logger.info(f'Tags matched {expected_tags}, object to be copied') + else: + logger.info(f'Tags not-matched ({matches}) {expected_tags}, object not copied') + return do_exit(time_start,True) + except Exception as e: + logger.error(f'Unable get tags from_bucket={bucket_name} key={object_key}') + raise e +# return do_exit(time_start,False) + + dst_bucket=S3_TAG_BUCKET_CLEAN_OUT + try: + logger.info(f'Copying file from_bucket={bucket_name} to_bucket={dst_bucket} key={object_key}') + response = s3.copy_object( + Bucket=dst_bucket, + CopySource={"Bucket": bucket_name, "Key": object_key}, + Key=object_key, + TaggingDirective='COPY' + ) + except: + logger.error(f'Unable to copy from_bucket={bucket_name} to_bucket={dst_bucket} key={object_key}') + raise +# logger.error(str(sys.exc_info()[0])) +# return do_exit(time_start,False) + + return do_exit(time_start,True) diff --git a/code/darhts-s3-tag.zip b/code/darhts-s3-tag.zip new file mode 100644 index 0000000..ac0d1cb Binary files /dev/null and b/code/darhts-s3-tag.zip differ diff --git a/code/locals.tf b/code/locals.tf new file mode 100644 index 0000000..8a74a49 --- /dev/null +++ b/code/locals.tf @@ -0,0 +1,7 @@ +locals { + base_tags = { + "boc:created_by" = "terraform" + # "boc:tf_module_version" = local._module_version + # "boc:tf_module_name " = local._module_name + } +} diff --git a/code/package.layer.tf b/code/package.layer.tf new file mode 100644 index 0000000..b5d0f23 --- /dev/null +++ b/code/package.layer.tf @@ -0,0 +1,36 @@ +locals { + app_layer_zip_file = "darhts-guardduty-move.package.zip" + app_layer_file = "python" + app_layer_file_list = fileset(format("%v/%v", path.root, local.app_layer_file), "**") + app_layer_file_list_hash = [for f in local.app_layer_file_list : filesha256(format("%v/%v/%v", path.root, local.app_layer_file, f))] + app_layer_hash = sha256(join(",", local.app_layer_file_list_hash)) +} + +# resource "null_resource" "app_layer_zip" { +# triggers = { +# hash = local.app_layer_hash +# } +# } +# +# data "archive_file" "app_layer_zip" { +# source_file = format("%v/%v", path.root, local.app_layer_file) +# output_path = format("%v/%v", path.root, local.app_layer_zip_file) +# type = "zip" +# depends_on = [null_resource.app_layer_zip] +# } + + +resource "null_resource" "app_layer_zip" { + triggers = { + file_name = local.app_layer_zip_file + source_dir = local.app_layer_file + hash = local.app_layer_hash + } + + provisioner "local-exec" { + working_dir = "${path.root}/packages" + command = "zip -r ../${self.triggers.file_name} ${self.triggers.source_dir}" + } +} + + diff --git a/code/package.move.tf b/code/package.move.tf new file mode 100644 index 0000000..f7caa49 --- /dev/null +++ b/code/package.move.tf @@ -0,0 +1,21 @@ +locals { + app_move_zip_file = "darhts-guardduty-move.zip" + app_move_file = "darhts-guardduty-move.py" + app_move_file_list = fileset(format("%v/%v", path.root, local.app_move_file), "**") + app_move_hash = filesha256(format("%v/%v", path.root, local.app_move_file)) +} + +resource "null_resource" "app_move_zip" { + triggers = { + hash = local.app_move_hash + } +} + +data "archive_file" "app_move_zip" { + source_file = format("%v/%v", path.root, local.app_move_file) + output_path = format("%v/%v", path.root, local.app_move_zip_file) + type = "zip" + depends_on = [null_resource.app_move_zip] +} + +# darhts-guardduty-move.py darhts-guardduty-notify.py darhts-s3-notify.py darhts-s3-tag.py diff --git a/code/package.notify.tf b/code/package.notify.tf new file mode 100644 index 0000000..a446654 --- /dev/null +++ b/code/package.notify.tf @@ -0,0 +1,20 @@ +locals { + app_notify_zip_file = "darhts-guardduty-notify.zip" + app_notify_file = "darhts-guardduty-notify.py" + app_notify_hash = filesha256(format("%v/%v", path.root, local.app_move_file)) +} + +resource "null_resource" "app_notify_zip" { + triggers = { + hash = local.app_notify_hash + } +} + +data "archive_file" "app_notify_zip" { + source_file = format("%v/%v", path.root, local.app_notify_file) + output_path = format("%v/%v", path.root, local.app_notify_zip_file) + type = "zip" + depends_on = [null_resource.app_notify_zip] +} + +# darhts-guardduty-move.py darhts-guardduty-notify.py darhts-s3-notify.py darhts-s3-tag.py diff --git a/code/package.s3-tag.tf b/code/package.s3-tag.tf new file mode 100644 index 0000000..4cc8a3d --- /dev/null +++ b/code/package.s3-tag.tf @@ -0,0 +1,20 @@ +locals { + app_s3_tag_zip_file = "darhts-s3-tag.zip" + app_s3_tag_file = "darhts-s3-tag.py" + app_s3_tag_hash = filesha256(format("%v/%v", path.root, local.app_move_file)) +} + +resource "null_resource" "app_s3_tag_zip" { + triggers = { + hash = local.app_s3_tag_hash + } +} + +data "archive_file" "app_s3_tag_zip" { + source_file = format("%v/%v", path.root, local.app_s3_tag_file) + output_path = format("%v/%v", path.root, local.app_s3_tag_zip_file) + type = "zip" + depends_on = [null_resource.app_s3_tag_zip] +} + +# darhts-guardduty-move.py darhts-guardduty-notify.py darhts-s3-notify.py darhts-s3-tag.py diff --git a/code/package.s3.tf b/code/package.s3.tf new file mode 100644 index 0000000..d7f7113 --- /dev/null +++ b/code/package.s3.tf @@ -0,0 +1,20 @@ +locals { + app_s3_notify_zip_file = "darhts-s3-notify.zip" + app_s3_notify_file = "darhts-s3-notify.py" + app_s3_notify_hash = filesha256(format("%v/%v", path.root, local.app_move_file)) +} + +resource "null_resource" "app_s3_notify_zip" { + triggers = { + hash = local.app_s3_notify_hash + } +} + +data "archive_file" "app_s3_notify_zip" { + source_file = format("%v/%v", path.root, local.app_s3_notify_file) + output_path = format("%v/%v", path.root, local.app_s3_notify_zip_file) + type = "zip" + depends_on = [null_resource.app_s3_notify_zip] +} + +# darhts-guardduty-move.py darhts-guardduty-notify.py darhts-s3-notify.py darhts-s3-tag.py diff --git a/code/packages/.gitignore b/code/packages/.gitignore new file mode 100644 index 0000000..ebe14c8 --- /dev/null +++ b/code/packages/.gitignore @@ -0,0 +1,2 @@ +python/ +.venv/ diff --git a/code/packages/README.md b/code/packages/README.md new file mode 100644 index 0000000..b01266d --- /dev/null +++ b/code/packages/README.md @@ -0,0 +1,4 @@ +# Setup + +uv venv --managed-python --python 3.12 +uv pip install -r requirements.txt --target python --link-mode=copy diff --git a/code/packages/pyproject.toml b/code/packages/pyproject.toml new file mode 100644 index 0000000..5c28732 --- /dev/null +++ b/code/packages/pyproject.toml @@ -0,0 +1,7 @@ +[project] +name = "main" +version = "1.0.0" +description = "DARHTS Lambda Layer" +readme = "README.md" +requires-python = ">=3.12" +dependencies = [] diff --git a/code/packages/requirements.txt b/code/packages/requirements.txt new file mode 100644 index 0000000..e08d193 --- /dev/null +++ b/code/packages/requirements.txt @@ -0,0 +1,2 @@ +aws-lambda-powertools +requests diff --git a/code/packages/setup.env b/code/packages/setup.env new file mode 100644 index 0000000..7261a1a --- /dev/null +++ b/code/packages/setup.env @@ -0,0 +1,3 @@ +export SSL_CERT_FILE=/etc/pki/tls/certs/ca-bundle.crt +export UV_PYTHON_INSTALL_DIR=/apps/uv/share +export UV_LINK_MODE=copy diff --git a/code/tf-run.data b/code/tf-run.data new file mode 100644 index 0000000..35501ea --- /dev/null +++ b/code/tf-run.data @@ -0,0 +1,29 @@ +VERSION 2.1.2 +TAG setup +REMOTE-STATE +COMMAND tf-directory-setup.py -l none -f +COMMAND setup-new-directory.sh + +TAG links +LINKTOP includes.d/variables.account_tags.tf +LINKTOP includes.d/variables.account_tags.auto.tfvars +LINKTOP includes.d/variables.infrastructure_tags.tf +LINKTOP includes.d/variables.infrastructure_tags.auto.tfvars +LINKTOP includes.d/variables.application_tags.tf +LINKTOP includes.d/variables.application_tags.auto.tfvars +# LINKTOP provider_configs.d/provider.ldap_new.auto.tfvars +# LINKTOP provider_configs.d/provider.ldap_new.tf +# LINKTOP provider_configs.d/provider.ldap_new.variables.tf +COMMAND rm -f provider.ldap.* +LINK variables.tf +LINK variables.auto.tfvars + +TAG init +COMMAND tf-init + +TAG start +#POLICY +ALL + +TAG state-link +COMMAND tf-directory-setup.py -l s3 diff --git a/code/versions.tf b/code/versions.tf new file mode 100644 index 0000000..dd0ebb9 --- /dev/null +++ b/code/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_version = ">= 1.0" + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.0" + } + } +}