Skip to content

Commit

Permalink
update remote state for codebuild where git repo doesn't exist
Browse files Browse the repository at this point in the history
  • Loading branch information
morga471 committed May 8, 2025
1 parent a302dec commit bb39deb
Show file tree
Hide file tree
Showing 7 changed files with 487 additions and 32 deletions.
11 changes: 0 additions & 11 deletions buildspecs/deploy.terragrunt.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,12 @@ env:
TF_VERSION: "1.5.5"
TG_VERSION: "0.72.0"
TOOLS_DIR: "/tmp/build-tools"
CERT_DIR: "/tmp/certs"
exported-variables:
- TERRAGRUNT_PATH

cache:
paths:
- '/tmp/build-tools/**/*'
- '/tmp/certs/**/*'

phases:
install:
Expand All @@ -24,15 +22,6 @@ phases:
- export http_proxy=$PROXY_CONFIG
- export https_proxy=$PROXY_CONFIG
- export NO_PROXY=.census.gov,169.254.169.254,148.129.0.0/16,10.0.0.0/8,172.16.0/12,.eks.amazonaws.com,.s3.amazonaws.com,.amazonaws.com,.gcr.io,.pkg.dev

# Set up certificate for proxy access
- mkdir -p $CERT_DIR
- cp buildspecs/census-pki.bundle.crt $CERT_DIR/
- export SSL_CERT_FILE=$CERT_DIR/census-pki.bundle.crt
- export REQUESTS_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt
- export NODE_EXTRA_CA_CERTS=$CERT_DIR/census-pki.bundle.crt
- export CURL_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt
- export AWS_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt

# Create tools directory if it doesn't exist
- mkdir -p $TOOLS_DIR/bin
Expand Down
323 changes: 323 additions & 0 deletions buildspecs/pip-cert.pem

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions buildspecs/pip.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
[global]
cert = ~/.pip/pip-cert.pem
# proxy = http://proxy.tco.census.gov:3128
index = https://nexus.it.census.gov:8443/repository/DataScience-Group/pypi
index-url = https://nexus.it.census.gov:8443/repository/DataScience-Group/simple
trusted-host = nexus.it.census.gov
pypi.python.org
pypi.org
files.pythonhosted.org
proxy.tco.census.gov
101 changes: 101 additions & 0 deletions buildspecs/sechub_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
import json
import boto3
import datetime
import os

# import sechub + sts boto3 client
securityhub = boto3.client('securityhub')
sts = boto3.client('sts')

# retrieve account id from STS GetCallerID
getAccount = sts.get_caller_identity()
awsAccount = str(getAccount['Account'])
# retrieve env vars from codebuild
awsRegion = os.environ['AWS_REGION']
codebuildBuildArn = os.environ['CODEBUILD_BUILD_ARN']
containerName = os.environ['docker_img_name']
containerTag = os.environ['docker_tag']

# open Trivy vuln report & parse out vuln info
with open('results.json') as json_file:
data = json.load(json_file)
if data[0]['Vulnerabilities'] is None:
print('No vulnerabilities')
else:
for p in data[0]['Vulnerabilities']:
cveId = str(p['VulnerabilityID'])
cveTitle = str(p['Title'])
cveDescription = str(p['Description'])
cveDescription = (cveDescription[:1021] + '..') if len(cveDescription) > 1021 else cveDescription
packageName = str(p['PkgName'])
installedVersion = str(p['InstalledVersion'])
fixedVersion = str(p['FixedVersion'])
trivySeverity = str(p['Severity'])
cveReference = str(p['References'][0])
# create ISO 8601 timestamp
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
# map Trivy severity to ASFF severity
if trivySeverity == 'LOW':
trivyProductSev = int(1)
trivyNormalizedSev = trivyProductSev * 10
elif trivySeverity == 'MEDIUM':
trivyProductSev = int(4)
trivyNormalizedSev = trivyProductSev * 10
elif trivySeverity == 'HIGH':
trivyProductSev = int(7)
trivyNormalizedSev = trivyProductSev * 10
elif trivySeverity == 'CRITICAL':
trivyProductSev = int(9)
trivyNormalizedSev = trivyProductSev * 10
else:
print('No vulnerability information found')
try:
response = securityhub.batch_import_findings(
Findings=[
{
'SchemaVersion': '2018-10-08',
'Id': containerName + ':' + containerTag + '/' + cveId,
'ProductArn': 'arn:aws:securityhub:' + awsRegion + ':' + ':product/aquasecurity/aquasecurity',
'GeneratorId': codebuildBuildArn,
'AwsAccountId': awsAccount,
'Types': [ 'Software and Configuration Checks/Vulnerabilities/CVE' ],
'CreatedAt': iso8601Time,
'UpdatedAt': iso8601Time,
'Severity': {
'Product': trivyProductSev,
'Normalized': trivyNormalizedSev
},
'Title': 'Trivy found a vulnerability to ' + cveId + ' in container ' + containerName,
'Description': cveDescription,
'Remediation': {
'Recommendation': {
'Text': 'More information on this vulnerability is provided in the hyperlink',
'Url': cveReference
}
},
'ProductFields': { 'Product Name': 'Trivy' },
'Resources': [
{
'Type': 'Container',
'Id': containerName + ':' + containerTag,
'Partition': 'aws',
'Region': awsRegion,
'Details': {
'Container': { 'ImageName': containerName + ':' + containerTag },
'Other': {
'CVE ID': cveId,
'CVE Title': cveTitle,
'Installed Package': packageName + ' ' + installedVersion,
'Patched Package': packageName + ' ' + fixedVersion
}
}
},
],
'RecordState': 'ACTIVE'
}
]
)
print(response)
except Exception as e:
print(e)
raise
64 changes: 50 additions & 14 deletions buildspecs/security.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@ version: 0.2
env:
variables:
TOOLS_DIR: "/tmp/build-tools"
CERT_DIR: "/tmp/certs"

cache:
paths:
- '/tmp/build-tools/**/*'
- '/tmp/certs/**/*'

phases:
install:
Expand All @@ -18,16 +16,21 @@ phases:
- echo "Setting up security scanning tools"
- export http_proxy=$PROXY_CONFIG
- export https_proxy=$PROXY_CONFIG
- export NO_PROXY=.census.gov,169.254.169.254,148.129.0.0/16,10.0.0.0/8,172.16.0/12,.eks.amazonaws.com,.s3.amazonaws.com,.amazonaws.com,.gcr.io,.pkg.dev
- export NO_PROXY=.census.gov,169.254.169.254,148.129.0.0/16,10.0.0.0/8,172.16.0/12,.amazonaws.com,pypi.org,files.pythonhosted.org

# Set up certificate for proxy access
- mkdir -p $CERT_DIR
- cp buildspecs/census-pki.bundle.crt $CERT_DIR/
- export SSL_CERT_FILE=$CERT_DIR/census-pki.bundle.crt
- export REQUESTS_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt
- export NODE_EXTRA_CA_CERTS=$CERT_DIR/census-pki.bundle.crt
- export CURL_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt
- export AWS_CA_BUNDLE=$CERT_DIR/census-pki.bundle.crt
# Download pip configuration files from S3
- |
echo "Downloading pip configuration files from S3"
mkdir -p /tmp/pip-tools
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/pip.conf /tmp/pip-tools/pip.conf
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/pip-cert.pem /tmp/pip-tools/pip-cert.pem
if [ ! -f "/tmp/pip-tools/pip.conf" ] || [ ! -f "/tmp/pip-tools/pip-cert.pem" ]; then
echo "Error: Failed to download pip configuration files from S3"
exit 1
fi
echo "Successfully downloaded pip configuration files"
# Install security scanning tools
- mkdir -p $TOOLS_DIR/bin
Expand All @@ -37,13 +40,46 @@ phases:
build:
commands:
- echo "Running security scans"
- checkov --directory . --framework terraform --quiet --compact
- checkov --directory . --framework terragrunt --quiet --compact
- checkov --directory . --quiet --compact -o cli -o junitxml -o gitlab_sast -o sarif -o spdx -o json -o cyclonedx --output-file-path console,reports/ --soft-fail --skip-results-upload --skip-download

# - checkov --directory . --framework terragrunt --quiet --compact
- tfsec . --no-color
# - trivy -f json -o results.json --exit-code 0 --severity HIGH,MEDIUM,LOW --quiet --auto-refresh $docker_img_name:$docker_tag
# -o {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx}, --output {cli,csv,cyclonedx,cyclonedx_json,json,junitxml,github_failed_only,gitlab_sast,sarif,spdx} Report output format. Add multiple outputs by using the flag multiple times (-o sarif -o cli)
# - trivy -f json -o results.json --exit-code 1 --severity HIGH,CRITICAL --quiet --auto-refresh $docker_img_name:$docker_tag
- echo Build started on `date`
# Scan the Dockerfile with Checkov

# skip CKV2_DOCKER_1 because sudo is intended in this container
# - checkov --file app/Dockerfile --output cli --output junitxml --output-file-path console,results.xml --skip-check CKV2_DOCKER_1 --skip-download

# # Register App with Nexus IQ
# - |
# BODY=$(jq --null-input --arg name "${APPLICATION_ID}-${IMAGE_REPOSITORY_NAME}" \
# --arg org "${NEXUS_IQ_ORG}" '{"name": $name, "publicId": $name, "organizationId": $org}')
# echo "${BODY}"
# echo "Registering application: ${APPLICATION_ID}-${IMAGE_REPOSITORY_NAME} in org ${NEXUS_IQ_ORG} with IQ..."
# curl --location -X POST ${NEXUS_IQ_URL}/api/v2/applications --header 'Content-Type: application/json' -u "${NEXUS_USER}:${NEXUS_PASS}" --data "${BODY}"

# # Create the image, no push
# - cd app && docker build --no-cache --pull -f Dockerfile -t $IMAGE_REPOSITORY_NAME:$IMAGE_TAG . && cd -
# - docker tag $IMAGE_REPOSITORY_NAME:$IMAGE_TAG $REPOSITORY_URI:$IMAGE_TAG

# # Save the image as a tar file to scan
# - docker save ${REPOSITORY_URI}:${IMAGE_TAG} > img.tar

# # Run Trivy against Docker Tar File
# - trivy image --input img.tar --output trivy.output --offline-scan --dependency-tree

# # Scan the tar file with the IQ CLI
# - |
# docker run -v ${PWD}:/target ${DOCKER_PROXY}/sonatype/nexus-iq-cli /sonatype/evaluate -s https://iq.udapp-appsec.us.amz.3mhis.net --authentication ${NEXUS_USER}:${NEXUS_PASS} --application-id ${APPLICATION_ID}-${IMAGE_REPOSITORY_NAME} --organization-id ${NEXUS_IQ_ORG} --result-file iq-output.json --stage build target/img.tar

post_build:
commands:
- echo "Security scan completed on `date`"
- echo trivy scan completed on `date`
# - python3 sechub_parser.py
# - echo Report Sent to Security Hub on `date`

artifacts:
files:
Expand Down
4 changes: 2 additions & 2 deletions buildspecs/terragrunt.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,15 @@ phases:
# Terraform
if [ ! -f "$TOOLS_DIR/bin/terraform" ]; then
echo "Copying Terraform from S3 artifacts bucket"
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/terraform/terraform.zip $TOOLS_DIR/
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/terraform.zip $TOOLS_DIR/
unzip -o $TOOLS_DIR/terraform.zip -d $TOOLS_DIR/bin/
chmod +x $TOOLS_DIR/bin/terraform
fi
# Terragrunt
if [ ! -f "$TOOLS_DIR/bin/terragrunt" ]; then
echo "Copying Terragrunt from S3 artifacts bucket"
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/terragrunt/terragrunt $TOOLS_DIR/bin/
aws s3 cp s3://${ARTIFACTS_BUCKET}/tools/terragrunt $TOOLS_DIR/bin/
chmod +x $TOOLS_DIR/bin/terragrunt
fi
Expand Down
6 changes: 1 addition & 5 deletions lab/root.hcl
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,13 @@ locals {
# Automatically load vpc-level variables
vpc_vars = read_terragrunt_config(find_in_parent_folders("vpc.hcl"))

# Check if copy_images.tf exists in the module directory
has_copy_images = fileexists("${get_original_terragrunt_dir()}/copy_images.tf")

# Add any other locals you want to expose
# only expose things not already included via local.xxx_vars.locals.*
root_locals_for_inputs = {
is_module_enabled = local.is_module_enabled
module_name = local.module_name
has_copy_images = local.has_copy_images
}

root = "${get_parent_terragrunt_dir()}../../"
# Extract the variables we need for easy access
account_id = local.account_vars.locals.aws_account_id
account_name = local.account_vars.locals.account_name
Expand Down

0 comments on commit bb39deb

Please sign in to comment.