From 79e60a2dc277e01eb4d826a65414cc0f2d81dd67 Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 4 Feb 2026 17:45:34 -0500 Subject: [PATCH] Initial commit: CloudFormation Custom Resource Lambda for repository creation - Migrated from Service Catalog EventBridge to CloudFormation Custom Resources - Lambda creates GitHub/GitLab repositories from templates - Supports dynamic parameter handling for any CloudFormation properties - Includes Packer pipeline for container image builds - Terraform deployment configuration in deploy/ directory - Complete CloudFormation template for Service Catalog integration - Documentation for deployment and usage --- .gitignore | 4 + CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md | 447 +++++++++++ CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md | 263 +++++++ CODEOWNERS | 2 + DEPLOYMENT.md | 288 +++++++ Dockerfile | 33 + MIGRATION.md | 204 +++++ Makefile | 44 ++ PACKER_PIPELINE_EXPLANATION.md | 125 ++++ PACKER_UPDATES.md | 128 ++++ README.md | 116 +++ __init__.py | 0 app.py | 46 ++ backend.tf | 6 + buildspec.yml.j2 | 93 +++ cloudformation-template.yaml | 108 +++ config_packer.hcl | 83 +++ deploy/main.tf | 101 +++ deploy/terraform.tfvars.example | 22 + deploy/variables.tf | 65 ++ design-docs/CUSTOM_TEMPLATES.MD | 138 ++++ design-docs/README.md | 142 ++++ design-docs/REPO_VARS_AND_SECRETS.md | 253 +++++++ docs/build_docs.sh | 26 + docs/callnotes.md | 24 + docs/callnotes.txt | 267 +++++++ docs/gitlab-migration.md | 71 ++ docs/requirements.txt | 6 + docs/source/conf.py | 53 ++ docs/source/index.rst | 94 +++ docs/source/modules/github_client.rst | 8 + docs/source/modules/lambda_handler.rst | 7 + docs/source/modules/models.rst | 30 + docs/source/modules/template_manager.rst | 16 + docs/tf-native-v2.md | 83 +++ docs/tf-native-v3.md | 75 ++ docs/tf-native-v4.md | 79 ++ docs/tf-native-v5.md | 72 ++ docs/tf-native.md | 72 ++ events/cloudformation-create-event.json | 17 + events/service-catalog-event.json | 57 ++ events/test-event.json | 47 ++ github/workflows/integration-tests.yml | 35 + lambda-template-repo-generator.code-workspace | 17 + main.tf | 57 ++ packer.pkr.hcl | 114 +++ pip-cert.pem | 323 ++++++++ pip.conf | 10 + playbook.yml | 22 + requirements.txt | 12 + scripts/cleanup_test_repos.py | 119 +++ scripts/lambda_setup.py | 144 ++++ scripts/validate_github_token.py | 321 ++++++++ service-catalog/product-template.yaml | 147 ++++ setup.py | 17 + .../.devcontainer/Dockerfile | 65 ++ .../.devcontainer/devcontainer.json | 45 ++ .../.github/runner-config.yml | 7 + .../.github/workflows/build.yml | 107 +++ .../.github/workflows/gh-token.yml | 24 + .../github-client-integration-test.yml | 40 + .../.github/workflows/integration-tests.yml | 40 + template-automation-lambda/.gitignore | 202 +++++ .../.terraform.lock.hcl | 24 + template-automation-lambda/.tflog | 246 ++++++ template_automation/ROADMAP.md | 1 + template_automation/__init__.py | 1 + .../__pycache__/__init__.cpython-311.pyc | Bin 0 -> 211 bytes .../__pycache__/app.cpython-311.pyc | Bin 0 -> 41766 bytes .../github_provider.cpython-311.pyc | Bin 0 -> 33390 bytes .../gitlab_provider.cpython-311.pyc | Bin 0 -> 16066 bytes .../repository_provider.cpython-311.pyc | Bin 0 -> 6684 bytes template_automation/app.py | 705 ++++++++++++++++++ template_automation/github_client.py | 669 +++++++++++++++++ template_automation/github_provider.py | 643 ++++++++++++++++ template_automation/gitlab_client.py | 634 ++++++++++++++++ template_automation/gitlab_provider.py | 330 ++++++++ template_automation/models.py | 217 ++++++ template_automation/old.py | 473 ++++++++++++ template_automation/pytest.ini | 11 + template_automation/repository_provider.py | 114 +++ template_automation/requirements.txt | 12 + template_automation/template_manager.py | 122 +++ .../templates/workflows/ansible.yml.j2 | 42 ++ .../templates/workflows/terraform.yml.j2 | 52 ++ template_automation/test_payload.json | 26 + template_automation/tests/__init__.py | 1 + template_automation/tests/conftest.py | 122 +++ .../integration/test_github_operations.py | 16 + template_automation/tests/pytest.ini | 5 + .../tests/test_github_client.py | 245 ++++++ .../tests/test_github_client_integration.py | 178 +++++ test_service_catalog.py | 71 ++ tests/conftest.py | 15 + .../test_github_client_integration.py | 143 ++++ tests/test_app.py | 110 +++ tests/test_github_client.py | 71 ++ tests/test_integration.py | 62 ++ varfiles/default.json | 1 + varfiles/default.tfvars | 16 + varfiles/packer.pkrvars.hcl | 0 variables.tf | 51 ++ 102 files changed, 11112 insertions(+) create mode 100644 .gitignore create mode 100644 CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md create mode 100644 CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md create mode 100644 CODEOWNERS create mode 100644 DEPLOYMENT.md create mode 100644 Dockerfile create mode 100644 MIGRATION.md create mode 100644 Makefile create mode 100644 PACKER_PIPELINE_EXPLANATION.md create mode 100644 PACKER_UPDATES.md create mode 100644 README.md create mode 100644 __init__.py create mode 100644 app.py create mode 100644 backend.tf create mode 100644 buildspec.yml.j2 create mode 100644 cloudformation-template.yaml create mode 100644 config_packer.hcl create mode 100644 deploy/main.tf create mode 100644 deploy/terraform.tfvars.example create mode 100644 deploy/variables.tf create mode 100644 design-docs/CUSTOM_TEMPLATES.MD create mode 100644 design-docs/README.md create mode 100644 design-docs/REPO_VARS_AND_SECRETS.md create mode 100755 docs/build_docs.sh create mode 100644 docs/callnotes.md create mode 100644 docs/callnotes.txt create mode 100644 docs/gitlab-migration.md create mode 100644 docs/requirements.txt create mode 100644 docs/source/conf.py create mode 100644 docs/source/index.rst create mode 100644 docs/source/modules/github_client.rst create mode 100644 docs/source/modules/lambda_handler.rst create mode 100644 docs/source/modules/models.rst create mode 100644 docs/source/modules/template_manager.rst create mode 100644 docs/tf-native-v2.md create mode 100644 docs/tf-native-v3.md create mode 100644 docs/tf-native-v4.md create mode 100644 docs/tf-native-v5.md create mode 100644 docs/tf-native.md create mode 100644 events/cloudformation-create-event.json create mode 100644 events/service-catalog-event.json create mode 100644 events/test-event.json create mode 100644 github/workflows/integration-tests.yml create mode 100644 lambda-template-repo-generator.code-workspace create mode 100644 main.tf create mode 100644 packer.pkr.hcl create mode 100644 pip-cert.pem create mode 100644 pip.conf create mode 100644 playbook.yml create mode 100644 requirements.txt create mode 100644 scripts/cleanup_test_repos.py create mode 100644 scripts/lambda_setup.py create mode 100755 scripts/validate_github_token.py create mode 100644 service-catalog/product-template.yaml create mode 100644 setup.py create mode 100644 template-automation-lambda/.devcontainer/Dockerfile create mode 100644 template-automation-lambda/.devcontainer/devcontainer.json create mode 100644 template-automation-lambda/.github/runner-config.yml create mode 100644 template-automation-lambda/.github/workflows/build.yml create mode 100644 template-automation-lambda/.github/workflows/gh-token.yml create mode 100644 template-automation-lambda/.github/workflows/github-client-integration-test.yml create mode 100644 template-automation-lambda/.github/workflows/integration-tests.yml create mode 100644 template-automation-lambda/.gitignore create mode 100644 template-automation-lambda/.terraform.lock.hcl create mode 100644 template-automation-lambda/.tflog create mode 100644 template_automation/ROADMAP.md create mode 100644 template_automation/__init__.py create mode 100644 template_automation/__pycache__/__init__.cpython-311.pyc create mode 100644 template_automation/__pycache__/app.cpython-311.pyc create mode 100644 template_automation/__pycache__/github_provider.cpython-311.pyc create mode 100644 template_automation/__pycache__/gitlab_provider.cpython-311.pyc create mode 100644 template_automation/__pycache__/repository_provider.cpython-311.pyc create mode 100644 template_automation/app.py create mode 100644 template_automation/github_client.py create mode 100644 template_automation/github_provider.py create mode 100644 template_automation/gitlab_client.py create mode 100644 template_automation/gitlab_provider.py create mode 100644 template_automation/models.py create mode 100644 template_automation/old.py create mode 100644 template_automation/pytest.ini create mode 100644 template_automation/repository_provider.py create mode 100644 template_automation/requirements.txt create mode 100644 template_automation/template_manager.py create mode 100644 template_automation/templates/workflows/ansible.yml.j2 create mode 100644 template_automation/templates/workflows/terraform.yml.j2 create mode 100644 template_automation/test_payload.json create mode 100644 template_automation/tests/__init__.py create mode 100644 template_automation/tests/conftest.py create mode 100644 template_automation/tests/integration/test_github_operations.py create mode 100644 template_automation/tests/pytest.ini create mode 100644 template_automation/tests/test_github_client.py create mode 100644 template_automation/tests/test_github_client_integration.py create mode 100755 test_service_catalog.py create mode 100644 tests/conftest.py create mode 100644 tests/integration/test_github_client_integration.py create mode 100644 tests/test_app.py create mode 100644 tests/test_github_client.py create mode 100644 tests/test_integration.py create mode 100644 varfiles/default.json create mode 100644 varfiles/default.tfvars create mode 100644 varfiles/packer.pkrvars.hcl create mode 100644 variables.tf diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..88c64d1c --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +# Packer pipeline zip files +template-automation-lambda-builder.zip +# Packer pipeline zip files +service-catalog-repo-generator-builder.zip diff --git a/CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md b/CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md new file mode 100644 index 00000000..709e416b --- /dev/null +++ b/CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md @@ -0,0 +1,447 @@ +# CloudFormation Custom Resource Integration Guide + +This guide explains how to use the Lambda function as a CloudFormation Custom Resource for Service Catalog. + +## Overview + +The Lambda function is invoked as a **CloudFormation Custom Resource** when someone provisions a Service Catalog product. This approach is: +- ✅ **Native to CloudFormation** - No EventBridge rules needed +- ✅ **Synchronous** - CloudFormation waits for repository creation +- ✅ **Output Support** - Repository URLs available as stack outputs +- ✅ **Error Handling** - Failures properly propagate to CloudFormation + +## How It Works + +``` +User provisions Service Catalog Product + ↓ +CloudFormation creates stack + ↓ +Custom Resource invokes Lambda + ↓ +Lambda creates GitHub repository + ↓ +Lambda returns success to CloudFormation + ↓ +Stack completes with repository URLs in outputs +``` + +## Step 1: Deploy the Lambda Function + +First, deploy the Lambda function using the Terraform configuration in the `deploy/` directory: + +```bash +cd deploy +terraform init +terraform apply +``` + +**Important**: Copy the `lambda_function_arn` output - you'll need this for your CloudFormation template! + +Example output: +``` +lambda_function_arn = "arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation" +``` + +## Step 2: Create CloudFormation Template + +Create a CloudFormation template that uses the Lambda as a Custom Resource. See `cloudformation-template.yaml` for a complete example: + +```yaml +AWSTemplateFormatVersion: '2010-09-09' +Description: 'Create GitHub Repository from Template' + +Parameters: + ProjectName: + Type: String + Description: Name of the GitHub repository to create + + OwningTeam: + Type: String + Description: GitHub team that should have admin access + Default: tf-module-admins + + Environment: + Type: String + Description: Environment for the project + AllowedValues: + - development + - staging + - production + +Resources: + # Custom Resource that invokes the Lambda + GitHubRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation + ProjectName: !Ref ProjectName + OwningTeam: !Ref OwningTeam + Environment: !Ref Environment + +Outputs: + RepositoryUrl: + Description: URL of the created GitHub repository + Value: !GetAtt GitHubRepository.RepositoryUrl + + PullRequestUrl: + Description: URL of the configuration pull request + Value: !GetAtt GitHubRepository.PullRequestUrl + + RepositoryName: + Description: Name of the created repository + Value: !GetAtt GitHubRepository.RepositoryName +``` + +### Key Points + +1. **ServiceToken**: Must be the ARN of your Lambda function +2. **Parameters**: All parameters (except `ServiceToken`) are passed to the Lambda +3. **Outputs**: Use `!GetAtt ResourceName.AttributeName` to get values from Lambda response +4. **Parameter Names**: Use PascalCase - Lambda automatically converts to snake_case + +### Available Output Attributes + +The Lambda returns these attributes that you can reference in CloudFormation outputs: + +- `RepositoryUrl` - URL of the created repository +- `RepositoryName` - Name of the repository +- `PullRequestUrl` - URL of the configuration pull request (GitHub) +- `MergeRequestUrl` - URL of the merge request (GitLab) + +## Step 3: Create Service Catalog Product + +### Option A: Using AWS Console + +1. Go to **AWS Service Catalog** → **Products** +2. Click **Create product** +3. Fill in product details: + - **Product name**: "Create GitHub Repository" + - **Description**: "Creates a new GitHub repository from template" + - **Owner**: Your team name + - **Distributor**: Your organization +4. Upload your CloudFormation template +5. Click **Create product** +6. Add the product to a portfolio +7. Grant access to users/groups + +### Option B: Using Terraform + +```hcl +resource "aws_servicecatalog_product" "github_repository" { + name = "GitHub Repository Creator" + owner = "Platform Team" + type = "CLOUD_FORMATION_TEMPLATE" + + provisioning_artifact_parameters { + name = "v1.0.0" + description = "Initial version" + type = "CLOUD_FORMATION_TEMPLATE" + template_url = "s3://my-bucket/cloudformation-template.yaml" + } + + tags = { + ManagedBy = "Terraform" + } +} + +resource "aws_servicecatalog_portfolio" "main" { + name = "Developer Self-Service" + description = "Self-service portal for developer resources" + provider_name = "Platform Team" +} + +resource "aws_servicecatalog_product_portfolio_association" "github_repo" { + portfolio_id = aws_servicecatalog_portfolio.main.id + product_id = aws_servicecatalog_product.github_repository.id +} +``` + +## Step 4: Test the Integration + +### Test 1: Direct Lambda Invocation + +Test the Lambda directly with a CloudFormation Custom Resource event: + +```bash +aws lambda invoke \ + --function-name service-catalog-repo-gen-template-automation \ + --payload file://events/cloudformation-create-event.json \ + --region us-gov-west-1 \ + response.json + +cat response.json +``` + +### Test 2: CloudFormation Stack + +Deploy a test stack directly: + +```bash +aws cloudformation create-stack \ + --stack-name test-repo-creation \ + --template-body file://cloudformation-template.yaml \ + --parameters \ + ParameterKey=ProjectName,ParameterValue=test-repo-$(date +%s) \ + ParameterKey=OwningTeam,ParameterValue=platform-team \ + ParameterKey=Environment,ParameterValue=development \ + --region us-gov-west-1 +``` + +Monitor the stack: + +```bash +aws cloudformation describe-stack-events \ + --stack-name test-repo-creation \ + --region us-gov-west-1 +``` + +### Test 3: Service Catalog + +1. Go to **AWS Service Catalog** → **Products** +2. Find your "Create GitHub Repository" product +3. Click **Launch product** +4. Fill in the parameters +5. Click **Launch** +6. Monitor the provisioned product status +7. Check the **Outputs** tab for repository URLs + +## Request Types + +The Lambda handles three CloudFormation request types: + +### Create + +Creates a new repository with configuration. + +```json +{ + "RequestType": "Create", + "ResourceProperties": { + "ProjectName": "new-repo", + "OwningTeam": "platform-team" + } +} +``` + +### Update + +Currently treated the same as Create. Future enhancement: update repository configuration. + +```json +{ + "RequestType": "Update", + "ResourceProperties": { + "ProjectName": "existing-repo", + "Environment": "production" + } +} +``` + +### Delete + +Acknowledged but does not delete the repository (manual cleanup required). + +```json +{ + "RequestType": "Delete", + "PhysicalResourceId": "new-repo-repository" +} +``` + +**Why?** Repositories contain code and history that shouldn't be automatically deleted. + +## Parameter Handling + +### Parameter Name Conversion + +CloudFormation uses PascalCase, but your config files need snake_case: + +| CloudFormation | Lambda Converts To | +|----------------|-------------------| +| `ProjectName` | `project_name` | +| `OwningTeam` | `owning_team` | +| `AwsRegion` | `aws_region` | +| `MyCustomParameter` | `my_custom_parameter` | + +### Dynamic Parameters + +ALL parameters (except `ServiceToken`) are stored in the repository's `config.json`: + +```yaml +# In CloudFormation +Properties: + ServiceToken: !Ref LambdaArn + ProjectName: my-app + Environment: production + AwsRegion: us-gov-west-1 + CustomField: some-value +``` + +Results in `config.json`: + +```json +{ + "attrs": { + "environment": "production", + "aws_region": "us-gov-west-1", + "custom_field": "some-value" + }, + "tags": {} +} +``` + +## Monitoring and Debugging + +### CloudWatch Logs + +Lambda logs are in CloudWatch Logs: + +```bash +aws logs tail /aws/lambda/service-catalog-repo-gen-template-automation \ + --follow \ + --region us-gov-west-1 +``` + +### CloudFormation Events + +View stack events to see Custom Resource status: + +```bash +aws cloudformation describe-stack-events \ + --stack-name your-stack-name \ + --region us-gov-west-1 +``` + +### Common Issues + +#### 1. Lambda not invoked + +**Symptom**: CloudFormation stuck at "CREATE_IN_PROGRESS" + +**Solution**: Check Lambda permissions - CloudFormation needs `lambda:InvokeFunction` permission + +```bash +aws lambda get-policy \ + --function-name service-catalog-repo-gen-template-automation \ + --region us-gov-west-1 +``` + +#### 2. Repository not created + +**Symptom**: CloudFormation fails with custom resource error + +**Solution**: Check Lambda CloudWatch logs for the actual error + +```bash +aws logs filter-log-events \ + --log-group-name /aws/lambda/service-catalog-repo-gen-template-automation \ + --filter-pattern "ERROR" \ + --region us-gov-west-1 +``` + +#### 3. Missing outputs + +**Symptom**: Stack outputs show "N/A" or empty values + +**Solution**: Ensure you're using the correct attribute names with `!GetAtt` + +Valid attributes: `RepositoryUrl`, `RepositoryName`, `PullRequestUrl`, `MergeRequestUrl` + +## Best Practices + +### 1. Use Parameters for Validation + +Add constraints to CloudFormation parameters: + +```yaml +Parameters: + ProjectName: + Type: String + AllowedPattern: ^[a-zA-Z0-9-]+$ + MinLength: 3 + MaxLength: 100 + ConstraintDescription: Must contain only alphanumeric characters and hyphens +``` + +### 2. Store Important Values + +Use SSM Parameter Store to track created repositories: + +```yaml +Resources: + RepositoryParameter: + Type: AWS::SSM::Parameter + Properties: + Name: !Sub '/repositories/${ProjectName}' + Type: String + Value: !GetAtt GitHubRepository.RepositoryUrl +``` + +### 3. Add Metadata for Better UI + +Use CloudFormation Metadata to improve Service Catalog UI: + +```yaml +Metadata: + AWS::CloudFormation::Interface: + ParameterGroups: + - Label: + default: "Repository Configuration" + Parameters: + - ProjectName + - OwningTeam + ParameterLabels: + ProjectName: + default: "Repository Name" +``` + +### 4. Use Conditions for Optional Features + +```yaml +Conditions: + CreateSSMParameter: !Not [!Equals [!Ref Environment, "development"]] + +Resources: + RepositoryParameter: + Type: AWS::SSM::Parameter + Condition: CreateSSMParameter + Properties: + Name: !Sub '/repositories/${ProjectName}' + Value: !GetAtt GitHubRepository.RepositoryUrl +``` + +## Advanced: Multiple Template Repositories + +To support multiple template types, add a parameter: + +```yaml +Parameters: + RepositoryType: + Type: String + Description: Type of repository template to use + AllowedValues: + - eks-cluster + - lambda-function + - terraform-module + +Resources: + GitHubRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: !Ref LambdaFunctionArn + ProjectName: !Ref ProjectName + TemplateType: !Ref RepositoryType +``` + +Then update Lambda environment variables or use different Lambda functions per template type. + +## Summary + +1. ✅ Deploy Lambda using Terraform in `deploy/` +2. ✅ Create CloudFormation template with Custom Resource +3. ✅ Create Service Catalog product from template +4. ✅ Grant users access to the product +5. ✅ Users provision products through Service Catalog UI +6. ✅ Lambda creates repositories automatically +7. ✅ Stack outputs show repository URLs diff --git a/CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md b/CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md new file mode 100644 index 00000000..61a0878e --- /dev/null +++ b/CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md @@ -0,0 +1,263 @@ +# CloudFormation Custom Resource Migration Summary + +## What Changed + +Successfully migrated the Lambda function from Service Catalog EventBridge events to **CloudFormation Custom Resources**. + +## Why This Approach? + +**CloudFormation Custom Resources** provide a better integration pattern for Service Catalog: + +### Before (EventBridge) +- ❌ Asynchronous - CloudFormation doesn't wait for repository creation +- ❌ No direct feedback to CloudFormation stack +- ❌ Can't use repository URLs in stack outputs +- ❌ Requires EventBridge rule configuration +- ❌ Harder to debug (events might get lost) + +### After (Custom Resource) +- ✅ **Synchronous** - CloudFormation waits for completion +- ✅ **Direct integration** - No middleware needed +- ✅ **Stack outputs** - Repository URLs available via `!GetAtt` +- ✅ **Native CloudFormation** - Uses built-in Custom Resource protocol +- ✅ **Better error handling** - Failures properly propagate to stack + +## Code Changes + +### 1. Lambda Handler (`template_automation/app.py`) + +**Changed Event Structure:** +```python +# Before: Service Catalog EventBridge event +event['detail']['provisioningParameters'] + +# After: CloudFormation Custom Resource event +event['ResourceProperties'] +``` + +**Added CloudFormation Response:** +```python +def send_cfn_response(event, context, status, response_data, physical_resource_id, reason): + """Send response back to CloudFormation via pre-signed URL""" + # Posts JSON response to ResponseURL +``` + +**Updated Input Model:** +```python +# Before: ServiceCatalogInput +class ServiceCatalogInput(BaseModel): + project_name: str + owning_team: Optional[str] + +# After: CloudFormationResourceInput +class CloudFormationResourceInput(BaseModel): + project_name: str + owning_team: Optional[str] +``` + +**Added Request Type Handling:** +- `Create` - Creates new repository +- `Update` - Currently same as Create (future: update config) +- `Delete` - Acknowledges but doesn't delete repository + +**Added Parameter Name Normalization:** +```python +# CloudFormation uses PascalCase, convert to snake_case +# ProjectName → project_name +# OwningTeam → owning_team +``` + +### 2. Deployment Configuration (`deploy/main.tf`) + +**Removed EventBridge Resources:** +```terraform +# Deleted: +# - aws_cloudwatch_event_rule.service_catalog +# - aws_cloudwatch_event_target.lambda +# - aws_lambda_permission.eventbridge +``` + +**Added CloudFormation Permission:** +```terraform +resource "aws_lambda_permission" "cloudformation" { + statement_id = "AllowCloudFormationInvoke" + action = "lambda:InvokeFunction" + function_name = module.service_catalog_repo_generator.lambda_function_name + principal = "cloudformation.amazonaws.com" +} +``` + +### 3. CloudFormation Template Created + +New file: `cloudformation-template.yaml` + +```yaml +Resources: + GitHubRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: !Sub 'arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:repo-generator' + ProjectName: !Ref ProjectName + OwningTeam: !Ref OwningTeam + Environment: !Ref Environment + +Outputs: + RepositoryUrl: + Value: !GetAtt GitHubRepository.RepositoryUrl + PullRequestUrl: + Value: !GetAtt GitHubRepository.PullRequestUrl +``` + +## Testing + +### Test Event Created + +New file: `events/cloudformation-create-event.json` + +```json +{ + "RequestType": "Create", + "ResponseURL": "https://cloudformation-custom-resource-response...", + "ResourceProperties": { + "ProjectName": "example-test-repository", + "OwningTeam": "platform-team" + } +} +``` + +### Test Command + +```bash +aws lambda invoke \ + --function-name service-catalog-repo-gen-template-automation \ + --payload file://events/cloudformation-create-event.json \ + response.json +``` + +## Documentation Created + +1. **`CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md`** - Complete integration guide + - How CloudFormation Custom Resources work + - Step-by-step deployment instructions + - CloudFormation template examples + - Service Catalog integration + - Troubleshooting guide + +2. **`cloudformation-template.yaml`** - Production-ready template + - Complete Service Catalog product definition + - Parameter validation + - Metadata for better UI + - Output definitions + +3. **Updated `README.md`** + - Changed from EventBridge to Custom Resource focus + - Updated architecture diagram + - New workflow explanation + +## How Users Interact + +### 1. Service Catalog UI +``` +User logs into AWS Console + ↓ +Opens Service Catalog + ↓ +Finds "Create GitHub Repository" product + ↓ +Fills form with: + - Project Name + - Owning Team + - Environment + - Any custom parameters + ↓ +Clicks "Launch Product" +``` + +### 2. Behind the Scenes +``` +CloudFormation creates stack + ↓ +Encounters Custom::RepositoryCreator resource + ↓ +Invokes Lambda with ResourceProperties + ↓ +Lambda creates GitHub repository + ↓ +Lambda creates config.json with parameters + ↓ +Lambda opens pull request + ↓ +Lambda responds SUCCESS to CloudFormation + ↓ +CloudFormation completes stack + ↓ +Stack outputs show repository URLs +``` + +### 3. User Gets Results +``` +Service Catalog shows "Available" + ↓ +User clicks on provisioned product + ↓ +Sees outputs: + - RepositoryUrl: https://github.com/org/new-repo + - PullRequestUrl: https://github.com/org/new-repo/pull/1 +``` + +## Migration Benefits + +1. **Simpler Architecture** - No EventBridge rules needed +2. **Better User Experience** - Synchronous feedback in Service Catalog +3. **Stack Outputs** - Repository URLs visible in CloudFormation +4. **Error Handling** - Failures properly shown in CloudFormation +5. **Standard Pattern** - Uses well-known Custom Resource protocol +6. **Reusable** - Can be used outside Service Catalog too + +## Files Changed + +- ✅ `template_automation/app.py` - Lambda handler updated +- ✅ `deploy/main.tf` - Deployment config updated +- ✅ `README.md` - Documentation updated +- ✅ `cloudformation-template.yaml` - Created +- ✅ `events/cloudformation-create-event.json` - Created +- ✅ `CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md` - Created +- ✅ `CLOUDFORMATION_CUSTOM_RESOURCE_MIGRATION.md` - This file + +## Next Steps + +1. **Deploy Lambda** with updated code: + ```bash + cd /home/a/arnol377/git/lambda-template-repo-generator + packer-pipeline build --config config_packer.hcl + ``` + +2. **Update Infrastructure**: + ```bash + cd deploy + terraform apply + ``` + +3. **Create Service Catalog Product** using `cloudformation-template.yaml` + +4. **Test** by provisioning a product through Service Catalog + +## Deployment Checklist + +- [ ] Build new Lambda container with updated code +- [ ] Push container to ECR +- [ ] Apply Terraform changes in `deploy/` +- [ ] Verify Lambda permission for CloudFormation +- [ ] Upload CloudFormation template to S3 +- [ ] Create/update Service Catalog product +- [ ] Test with a sample repository +- [ ] Verify outputs in CloudFormation stack +- [ ] Check created repository has config.json +- [ ] Verify pull request was created + +## Support + +For issues or questions: +1. Check Lambda CloudWatch logs: `/aws/lambda/service-catalog-repo-gen-template-automation` +2. Review CloudFormation stack events +3. See `CLOUDFORMATION_CUSTOM_RESOURCE_GUIDE.md` for troubleshooting diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..e6a93723 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,2 @@ +# These owners will be the default owners for everything in the repo. Unless a later match takes precedence +* @HappyPathway/terraform-reviewers diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md new file mode 100644 index 00000000..91d7c3c6 --- /dev/null +++ b/DEPLOYMENT.md @@ -0,0 +1,288 @@ +# Deployment Guide: Service Catalog Repository Generator + +This guide walks through deploying the Lambda function that Service Catalog will invoke to create GitHub repositories. + +## Overview + +The deployment has three main steps: +1. **Create ECR Repository** - Where the Lambda container image will be stored +2. **Build & Push Container** - Build the Lambda code into a container and push to ECR +3. **Deploy Lambda Function** - Deploy the Lambda and EventBridge integration using Terraform + +## Prerequisites + +- AWS CLI configured with appropriate credentials +- Terraform installed (>= 1.0) +- `packer-pipeline` tool installed +- GitHub/GitLab personal access token stored in AWS Secrets Manager +- S3 buckets created: + - `csvd-template-automation-builds` (for build artifacts) + - `image-pipeline-assets-dev` (for Packer binaries) + +## Step 1: Create ECR Repository + +```bash +cd /home/a/arnol377/git/lambda-template-repo-generator + +# Review the ECR repository configuration +terraform init +terraform plan + +# Create the ECR repository +terraform apply +``` + +**Output**: ECR repository at `229685449397.dkr.ecr.us-gov-west-1.amazonaws.com/service-catalog-repo-generator/lambda` + +## Step 2: Build and Push Lambda Container + +```bash +cd /home/a/arnol377/git/lambda-template-repo-generator + +# Build using packer-pipeline (this creates CodeBuild project and runs the build) +packer-pipeline build --config config_packer.hcl +``` + +This will: +- Create a CodeBuild project named `service-catalog-repo-generator-builder` +- Package your code and upload to S3 +- Run Packer to build the Docker image +- Push the image to ECR with tag `latest` + +**Verify the build**: +```bash +# Check the ECR repository for the image +aws ecr describe-images \ + --repository-name service-catalog-repo-generator/lambda \ + --region us-gov-west-1 +``` + +## Step 3: Deploy Lambda Function + +### 3a. Configure Deployment Variables + +```bash +cd /home/a/arnol377/git/lambda-template-repo-generator/deploy + +# Copy the example tfvars file +cp terraform.tfvars.example terraform.tfvars + +# Edit with your values +nano terraform.tfvars +``` + +Update the following values: +- `github_org_name` - Your GitHub organization +- `template_repo_name` - The template repository to clone from +- `github_token_secret_name` - Name of your Secrets Manager secret +- VPC settings if using GitHub Enterprise + +### 3b. Deploy Infrastructure + +```bash +# Initialize Terraform +terraform init + +# Review the plan +terraform plan + +# Deploy the Lambda function and EventBridge rule +terraform apply +``` + +This creates: +- ✅ Lambda function using your container image +- ✅ IAM roles and policies +- ✅ SSM parameters for configuration +- ✅ EventBridge rule to trigger from Service Catalog +- ✅ Lambda permissions for EventBridge +- ✅ API Gateway (if you want direct invocation as well) + +## Step 4: Create Service Catalog Product + +Now you need to create a Service Catalog product that triggers your Lambda. You have two options: + +### Option A: CloudFormation with Lambda-backed Custom Resource + +Create a CloudFormation template: + +```yaml +AWSTemplateFormatVersion: '2010-09-09' +Description: 'Service Catalog Product: Create GitHub Repository' + +Parameters: + ProjectName: + Type: String + Description: Name of the repository to create + OwningTeam: + Type: String + Description: GitHub team that should own the repository + Default: tf-module-admins + Environment: + Type: String + Description: Environment (dev, staging, prod) + AllowedValues: + - development + - staging + - production + AwsRegion: + Type: String + Description: AWS region for the project + Default: us-gov-west-1 + +Resources: + TriggerLambda: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: !Sub 'arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation' + ProjectName: !Ref ProjectName + OwningTeam: !Ref OwningTeam + Environment: !Ref Environment + AwsRegion: !Ref AwsRegion + +Outputs: + RepositoryUrl: + Description: URL of the created repository + Value: !GetAtt TriggerLambda.repository_url + PullRequestUrl: + Description: URL of the configuration pull request + Value: !GetAtt TriggerLambda.pull_request_url +``` + +### Option B: EventBridge Pattern (Already configured!) + +The EventBridge rule created in Step 3 already listens for Service Catalog events. Simply: + +1. Create a CloudFormation template that provisions **any** resource +2. Add your parameters (project_name, owning_team, etc.) +3. When Service Catalog provisions this product, it emits an event +4. Your EventBridge rule catches it and triggers the Lambda + +**Simple CloudFormation for this:** + +```yaml +AWSTemplateFormatVersion: '2010-09-09' +Description: 'Trigger repository creation via Service Catalog' + +Parameters: + ProjectName: + Type: String + Description: Name of the repository to create + OwningTeam: + Type: String + Description: GitHub team that should own the repository + Default: tf-module-admins + +Resources: + # This is just a placeholder - the real work is done by the Lambda + DummyParameter: + Type: AWS::SSM::Parameter + Properties: + Name: !Sub '/service-catalog/repositories/${ProjectName}' + Type: String + Value: !Ref ProjectName + Description: !Sub 'Service Catalog provisioned repository: ${ProjectName}' + +Outputs: + Message: + Value: !Sub 'Repository creation triggered for ${ProjectName}. Check GitHub/Lambda logs for details.' +``` + +## Step 5: Test the Integration + +### Manual Lambda Test + +```bash +# Invoke Lambda directly with a test event +aws lambda invoke \ + --function-name service-catalog-repo-gen-template-automation \ + --payload file://events/service-catalog-event.json \ + --region us-gov-west-1 \ + response.json + +cat response.json +``` + +### Service Catalog Test + +1. Go to AWS Service Catalog console +2. Create a portfolio and add your product +3. Provision the product with test parameters +4. Watch CloudWatch Logs for the Lambda execution +5. Check GitHub for the new repository + +## Monitoring and Troubleshooting + +### Check Lambda Logs + +```bash +# Get recent logs +aws logs tail /aws/lambda/service-catalog-repo-gen-template-automation \ + --follow \ + --region us-gov-west-1 +``` + +### Verify EventBridge Rule + +```bash +# Check if the rule is enabled +aws events describe-rule \ + --name service-catalog-repo-provisioning \ + --region us-gov-west-1 +``` + +### Test Event Pattern + +```bash +# Send a test event to EventBridge +aws events put-events \ + --entries file://test-event.json \ + --region us-gov-west-1 +``` + +## Updating the Lambda + +When you make code changes: + +```bash +# 1. Rebuild the container +cd /home/a/arnol377/git/lambda-template-repo-generator +packer-pipeline build --config config_packer.hcl + +# 2. Update Lambda to use the new image +cd deploy +terraform apply -target=module.service_catalog_repo_generator.aws_lambda_function.this + +# Or force Lambda to pull the latest image +aws lambda update-function-code \ + --function-name service-catalog-repo-gen-template-automation \ + --image-uri 229685449397.dkr.ecr.us-gov-west-1.amazonaws.com/service-catalog-repo-generator/lambda:latest \ + --region us-gov-west-1 +``` + +## Architecture Diagram + +``` +User → Service Catalog UI + ↓ + Provisions Product + ↓ + CloudFormation runs + ↓ + EventBridge emits event + ↓ + Lambda Function triggered + ↓ + Creates GitHub Repository + ↓ + Writes config.json + ↓ + Opens Pull Request +``` + +## Next Steps + +1. Create Service Catalog portfolio and products +2. Set up proper IAM permissions for users to provision products +3. Configure SNS notifications for repository creation +4. Add additional template repositories for different project types diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..142192f8 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,33 @@ +# DEPRECATED: This Dockerfile is not used for deployment. +# Lambda container image is built using Packer (see packer.pkr.hcl) +# ---------------------------------------------------------------- +# Service Catalog Repository Generator Lambda +# +# This Lambda function processes AWS Service Catalog provisioning events +# to automatically create and configure GitHub/GitLab repositories. +# +# Build method: Packer (packer.pkr.hcl) +# Event source: AWS EventBridge (Service Catalog events) +# ---------------------------------------------------------------- +# Keeping this file for reference only + +FROM public.ecr.aws/lambda/python:3.11 + +# Copy requirements first to leverage Docker cache +COPY requirements.txt /tmp/ +RUN pip install --no-cache-dir -r /tmp/requirements.txt -t /var/task + +# Copy all code files for package installation +COPY . /tmp/app/ + +# Install the package in development mode to make it available to Python +RUN pip install -e /tmp/app -t /var/task + +# Explicitly copy the template_automation package to the Lambda task root +COPY template_automation /var/task/template_automation/ + +# Copy the root app.py file (this is essential for AWS Lambda to find the handler) +COPY app.py /var/task/ + +# Lambda handler for Service Catalog events +CMD [ "template_automation.app.lambda_handler" ] diff --git a/MIGRATION.md b/MIGRATION.md new file mode 100644 index 00000000..4a890113 --- /dev/null +++ b/MIGRATION.md @@ -0,0 +1,204 @@ +# Service Catalog Migration Summary + +## Overview + +Successfully migrated the template-automation-lambda codebase to lambda-template-repo-generator with exclusive support for AWS Service Catalog events. + +## Changes Made + +### 1. Code Migration +- **Copied** all code from `/home/a/arnol377/git/template-automation-lambda` to `/home/a/arnol377/git/lambda-template-repo-generator` +- Preserved directory structure including: + - `template_automation/` - Main Python package + - `tests/` - Test suites + - `scripts/` - Utility scripts + - `events/` - Test events + - `docs/` - Documentation + - Infrastructure files (Dockerfile, Makefile, Terraform, etc.) + +### 2. Core Lambda Handler Changes (`template_automation/app.py`) + +#### Event Structure Parsing +**Before:** +```python +event_body = event.get('body', {}) +template_input = TemplateInput(**event_body) +``` + +**After:** +```python +if 'detail' not in event: + raise ValueError("Event missing 'detail' field - not a valid Service Catalog event") + +detail = event['detail'] +provisioning_params = detail['provisioningParameters'] +service_catalog_input = ServiceCatalogInput(**provisioning_params) +template_settings = service_catalog_input.to_template_settings() +``` + +#### Input Model +**Before:** `TemplateInput` with explicit `template_settings` field + +**After:** `ServiceCatalogInput` with dynamic field extraction +```python +class ServiceCatalogInput(BaseModel): + project_name: str + owning_team: Optional[str] = "tf-module-admins" + + model_config = {"extra": "allow"} # Accept any Service Catalog parameters + + def to_template_settings(self) -> Dict[str, Any]: + # Converts all extra fields to attrs/tags structure +``` + +#### Pull Request Messages +**Before:** Generic "Initialize repository from template" + +**After:** Service Catalog-specific messages with provisioning details +```python +title=f"Initialize {service_catalog_input.project_name} from Service Catalog" +description=f"...from Service Catalog provisioning.\n\nProvisioned Product: {detail.get('provisionedProductName')}" +``` + +#### Team Permissions +**Before:** Attempted for all providers + +**After:** Only for GitHub provider +```python +if service_catalog_input.owning_team and provider_type == "GitHubProvider": + provider.set_team_permission(...) +``` + +### 3. New Test Event Format + +Created `events/service-catalog-event.json` with EventBridge structure: +```json +{ + "version": "0", + "detail-type": "Service Catalog Product Provisioning", + "source": "aws.servicecatalog", + "detail": { + "eventName": "ProvisionProduct", + "provisioningParameters": { + "project_name": "...", + "owning_team": "...", + ... + } + } +} +``` + +### 4. Documentation + +- **README.md**: Completely rewritten for Service Catalog focus + - Architecture diagram showing Service Catalog → EventBridge → Lambda flow + - Service Catalog-specific event structure documentation + - Provisioning parameters specification + - EventBridge rule configuration examples + +- **Test Script**: Created `test_service_catalog.py` to validate event parsing + +### 5. Configuration File Output + +The Lambda now creates `config.json` in repositories with this structure: +```json +{ + "attrs": { + "aws_region": "...", + "environment": "...", + ... all other Service Catalog parameters + }, + "tags": { + ... if tags parameter provided + } +} +``` + +## Backwards Compatibility + +**NONE** - This is a clean break from the original implementation: + +- ❌ No support for direct Lambda invocation +- ❌ No support for API Gateway events +- ❌ No support for `template_settings` input format +- ❌ No support for `trigger_init_workflow` flag +- ✅ **ONLY** supports EventBridge events from AWS Service Catalog + +## Testing Results + +```bash +$ python3 test_service_catalog.py +Testing Service Catalog event parsing... +============================================================ +✓ Found 19 provisioning parameters +✓ ServiceCatalogInput validation successful +✓ Converted to template settings format +✓ All tests passed! +``` + +## Files Modified + +1. `template_automation/app.py` - Complete lambda_handler rewrite +2. `events/test-event.json` - Converted to Service Catalog format +3. `events/service-catalog-event.json` - New Service Catalog example +4. `README.md` - Complete rewrite +5. `test_service_catalog.py` - New test script + +## Files Unchanged + +- All provider implementations (`github_provider.py`, `gitlab_provider.py`) +- Repository provider interface (`repository_provider.py`) +- Models (`models.py`) +- Infrastructure files (Terraform, Dockerfile, etc.) +- Tests (existing tests may need updates) + +## Next Steps + +1. **Update Tests**: Modify existing tests to use Service Catalog event format +2. **EventBridge Rule**: Configure EventBridge to trigger Lambda on Service Catalog events +3. **Service Catalog Product**: Define product with appropriate parameters +4. **IAM Permissions**: Ensure Lambda has permissions to process EventBridge events +5. **Monitoring**: Set up CloudWatch alarms for Lambda failures + +## Deployment Considerations + +- **Container Image**: Existing Dockerfile and Packer configuration can be reused +- **Environment Variables**: No changes required (same as original) +- **IAM Role**: May need additional permissions for EventBridge event processing +- **Trigger**: Change from API Gateway/direct invoke to EventBridge rule + +## Sample EventBridge Rule + +```json +{ + "source": ["aws.servicecatalog"], + "detail-type": ["Service Catalog Product Provisioning"], + "detail": { + "eventName": ["ProvisionProduct"], + "status": ["SUCCEEDED"] + } +} +``` + +## Validation + +The code has been validated for: +- ✅ Python syntax (no compilation errors) +- ✅ Event structure parsing +- ✅ Parameter extraction and conversion +- ✅ Pydantic model validation (v2 compatibility) + +## Known Limitations + +1. Team permissions only work with GitHub provider (not GitLab) +2. Requires all Service Catalog parameters to be flat (nested objects become strings) +3. Special handling only for `tags` parameter (must be a dict) +4. No validation of Service Catalog event authenticity (trusts EventBridge) + +## Support + +For issues: +1. Check CloudWatch Logs with request ID +2. Verify event structure matches expected format +3. Confirm provisioning parameters include `project_name` +4. Check GitHub/GitLab provider configuration diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..bca92c5f --- /dev/null +++ b/Makefile @@ -0,0 +1,44 @@ +.PHONY: install test test-unit test-integration clean clean-test-repos + +# Variables +PYTHON = python3 +PIP = $(PYTHON) -m pip +PYTEST = $(PYTHON) -m pytest +REQUIREMENTS = eks_automation/requirements.txt +TEST_DIR = eks_automation/tests +UNIT_TEST_FILE = $(TEST_DIR)/test_github_client.py +INTEGRATION_TEST_FILE = $(TEST_DIR)/test_github_client_integration.py + +# Default target +all: test + +# Install dependencies +install: + $(PIP) install -r $(REQUIREMENTS) + +# Run all tests +test: test-unit test-integration + @echo "Running all tests..." + $(PYTEST) $(TEST_DIR) + +# Run unit tests +test-unit: + @echo "Running unit tests..." + $(PYTEST) $(UNIT_TEST_FILE) + +# Run integration tests +test-integration: + @echo "Running integration tests..." + $(PYTEST) $(INTEGRATION_TEST_FILE) + +# Clean up Python cache files +clean: + find . -type f -name '*.pyc' -delete + find . -type d -name '__pycache__' -exec rm -rf {} + + rm -rf .pytest_cache + rm -rf .coverage + +# Clean up temporary test repositories on GitHub +clean-test-repos: + echo "Cleaning up temporary test repositories..." + $(PYTHON) scripts/cleanup_test_repos.py diff --git a/PACKER_PIPELINE_EXPLANATION.md b/PACKER_PIPELINE_EXPLANATION.md new file mode 100644 index 00000000..d4d95187 --- /dev/null +++ b/PACKER_PIPELINE_EXPLANATION.md @@ -0,0 +1,125 @@ +# Packer Pipeline Build Issue - Resolution + +## Problem + +The CodeBuild build was failing with this error: +``` +Error while executing command: packer build \ + -var "repository_uri=$REPOSITORY_URI" \ + -var "tag=$TAG" \ + -var "terraform_version=$TERRAFORM_VERSION" \ + ${PACKER_TEMPLATE_FILE} +exit status 1 +``` + +The error indicated that Packer was being passed a `terraform_version` variable that didn't exist in our `packer.pkr.hcl` file. + +## Root Cause + +After inspecting the `packer-pipeline` codebase, the issue was identified in the buildspec template: + +**File**: `/home/a/arnol377/git/packer-pipeline/packer_pipeline/templates/buildspec.yml.j2` + +```yaml +# Set terraform version (default if not specified) +TERRAFORM_VERSION="${TERRAFORM_VERSION:-1.5.7}" + +echo "Building with repository_uri=$REPOSITORY_URI tag=$TAG terraform_version=$TERRAFORM_VERSION" + +# Run Packer build with required variables +packer build \ + -var "repository_uri=$REPOSITORY_URI" \ + -var "tag=$TAG" \ + -var "terraform_version=$TERRAFORM_VERSION" \ + ${PACKER_TEMPLATE_FILE} +``` + +The `packer-pipeline` tool's buildspec template is **designed to handle both**: +1. **Terraform module builds** (which need `terraform_version`) +2. **Lambda container builds** (which don't need `terraform_version`) + +However, it **always** passes `terraform_version` to Packer, regardless of the build type. + +## Solution + +The fix is simple: **Add the `terraform_version` variable to our `packer.pkr.hcl` file**, even though we don't use it for Lambda builds. + +### Updated packer.pkr.hcl + +```hcl +variable "terraform_version" { + type = string + default = "1.5.7" + description = "Terraform version (not used for Lambda builds, but required by packer-pipeline)" +} +``` + +This variable: +- ✅ Satisfies the packer-pipeline's buildspec requirements +- ✅ Has a default value so it doesn't need to be explicitly set +- ✅ Is never actually used in our Lambda container build process +- ✅ Makes our packer template compatible with the standard packer-pipeline workflow + +## Why This Approach? + +### Alternative Approaches Considered: + +1. **Modify packer-pipeline** ❌ + - Would require maintaining a fork + - Would break compatibility with other projects + - Not sustainable + +2. **Create custom buildspec template** ❌ + - More complex configuration + - Loses benefits of standardized pipeline + - More maintenance overhead + +3. **Add unused variable** ✅ **CHOSEN** + - Simple, clean solution + - Maintains compatibility with packer-pipeline + - No custom configuration needed + - Standard approach for multi-purpose templates + +## Verification + +```bash +$ cd /home/a/arnol377/git/lambda-template-repo-generator +$ packer validate packer.pkr.hcl +The configuration is valid. +``` + +## Key Takeaway + +The `packer-pipeline` tool is designed to be a **general-purpose** Packer automation tool that handles multiple project types. When using it, your Packer templates must accept all variables that the pipeline passes, even if your specific build doesn't use them. + +This is a common pattern in infrastructure-as-code tools where templates need to support multiple use cases. + +## Related Files + +- `packer.pkr.hcl` - Updated to include `terraform_version` variable +- `config_packer.hcl` - Configuration for packer-pipeline +- `/home/a/arnol377/git/packer-pipeline/packer_pipeline/templates/buildspec.yml.j2` - Source of the buildspec template + +## Build Process Flow + +``` +config_packer.hcl + ↓ +packer-pipeline tool + ↓ +generates buildspec.yml (from template) + ↓ +buildspec passes: repository_uri, tag, terraform_version + ↓ +packer.pkr.hcl must accept all three variables + ↓ +Build succeeds ✓ +``` + +## Next Steps + +The build should now succeed. If you still encounter issues, check: +1. CodeBuild IAM permissions +2. ECR repository exists and is accessible +3. S3 buckets are accessible +4. VPC configuration (if using private subnets) diff --git a/PACKER_UPDATES.md b/PACKER_UPDATES.md new file mode 100644 index 00000000..20efde7f --- /dev/null +++ b/PACKER_UPDATES.md @@ -0,0 +1,128 @@ +# Packer Configuration Updates for Service Catalog Lambda + +## Summary of Changes + +The Packer configuration has been updated to build a **Service Catalog-specific** Lambda container image that processes AWS Service Catalog provisioning events. + +## Files Modified + +### 1. `packer.pkr.hcl` + +**Changes:** +- Added comprehensive header comment explaining Service Catalog integration +- Updated build name from `template-automation-lambda` to `service-catalog-repo-generator` +- Updated CMD to use `template_automation.app.lambda_handler` instead of `app.lambda_handler` +- Enhanced variable descriptions to reference Service Catalog purpose +- Added installation messages in shell provisioner for clarity + +**Key Updates:** +```hcl +# Build name +build { + name = "service-catalog-repo-generator" + ... +} + +# Lambda handler +source "docker" "lambda" { + ... + changes = [ + "WORKDIR /var/task", + "CMD [ \"template_automation.app.lambda_handler\" ]" + ] +} +``` + +### 2. `config_packer.hcl` + +**Changes:** +- Updated header comment to reference Service Catalog Repository Generator +- Changed CodeBuild project name from `template-automation-lambda-builder` to `service-catalog-repo-generator-builder` +- Updated S3 key prefix from `packer-builds/template-automation-lambda` to `packer-builds/service-catalog-repo-generator` + +**Key Updates:** +```hcl +codebuild_project_name = "service-catalog-repo-generator-builder" +s3_key_prefix = "packer-builds/service-catalog-repo-generator" +``` + +### 3. `Dockerfile` + +**Changes:** +- Added Service Catalog context to header comments +- Updated CMD to use `template_automation.app.lambda_handler` +- Added explanation that this is for Service Catalog event processing + +## Build Process + +The Packer template builds a Lambda container image with these characteristics: + +1. **Base Image**: AWS Lambda Python 3.11 (`public.ecr.aws/lambda/python:3.11`) +2. **Handler**: `template_automation.app.lambda_handler` +3. **Dependencies**: Installed from `requirements.txt` including: + - pydantic (v2) for Service Catalog event validation + - boto3 for AWS service integration + - requests for GitHub/GitLab API calls + +## Build Command + +```bash +packer build \ + -var "repository_uri=" \ + -var "tag=" \ + -var "ecr_login_username=AWS" \ + -var "ecr_login_password=" \ + -var "ecr_login_server=" \ + packer.pkr.hcl +``` + +## Deployment Integration + +The built container image is designed to be: +1. **Triggered by**: AWS EventBridge rules filtering Service Catalog events +2. **Event format**: Service Catalog provisioning events with `provisioningParameters` +3. **Output**: Creates GitHub/GitLab repositories with configuration from Service Catalog + +## Validation + +Packer template validation confirmed: +```bash +$ packer validate packer.pkr.hcl +The configuration is valid. +``` + +## Next Steps + +To deploy the Lambda function: + +1. Build the container image using Packer +2. Push to ECR (automated by Packer post-processor) +3. Create Lambda function using the container image +4. Configure EventBridge rule to trigger on Service Catalog events +5. Set environment variables for GitHub/GitLab integration + +## Environment Variables Required + +The Lambda function requires these environment variables: + +- `TEMPLATE_REPO_NAME`: Source template repository +- `TEMPLATE_CONFIG_FILE`: Config file path (default: config.json) +- `GITHUB_API` or `GITLAB_API`: API base URL +- `GITHUB_ORG_NAME` or `GITLAB_GROUP_NAME`: Organization/group name +- `GITHUB_TOKEN_SECRET_NAME` or `GITLAB_TOKEN_SECRET_NAME`: Secrets Manager secret name +- `VERIFY_SSL`: SSL verification (default: true) + +## EventBridge Rule Example + +```json +{ + "source": ["aws.servicecatalog"], + "detail-type": ["Service Catalog Product Provisioning"], + "detail": { + "eventName": ["ProvisionProduct"], + "status": ["SUCCEEDED"] + } +} +``` + +This ensures the Lambda only processes successful Service Catalog provisioning events. diff --git a/README.md b/README.md new file mode 100644 index 00000000..cc5462a4 --- /dev/null +++ b/README.md @@ -0,0 +1,116 @@ +# Lambda Template Repository Generator (CloudFormation Custom Resource) + +## Overview + +This Lambda function automates the creation and configuration of new repositories from templates using **CloudFormation Custom Resources**. When invoked as a Custom Resource in a CloudFormation stack (typically through AWS Service Catalog), this Lambda function automatically creates a GitHub/GitLab repository with the appropriate configuration. + +This implementation uses CloudFormation Custom Resources to enable Service Catalog integration - the Lambda is invoked directly by CloudFormation during stack provisioning. + +## Key Features + +- **CloudFormation Native**: Works as a CloudFormation Custom Resource +- **Service Catalog Compatible**: Perfect for Service Catalog product definitions +- **Input Validation**: Uses Pydantic for parameter validation +- **Dynamic Parameters**: Accepts ANY parameters and stores them in config.json +- **Multi-Provider**: Supports both GitHub and GitLab +- **Automatic PR Creation**: Creates pull request with configuration + +## Architecture + +``` +Service Catalog Product (CloudFormation Template) + ↓ + CloudFormation Stack Provisioning + ↓ + Custom Resource Invokes Lambda + ↓ + Lambda Creates Repository + ↓ + GitHub/GitLab Repository + Configuration PR +``` + +## CloudFormation Event Structure + +The Lambda expects CloudFormation Custom Resource events in this format: + +```json +{ + "RequestType": "Create", + "ResponseURL": "pre-signed-url", + "StackId": "arn:aws:cloudformation:...", + "RequestId": "unique-id", + "LogicalResourceId": "MyRepository", + "ResourceType": "Custom::RepositoryCreator", + "ResourceProperties": { + "ServiceToken": "arn:aws:lambda:...:function:repo-generator", + "ProjectName": "my-new-repository", + "OwningTeam": "platform-team", + "Environment": "development", + "AwsRegion": "us-east-1" + } +} +``` + +## CloudFormation Template Example + +See `cloudformation-template.yaml` for a complete Service Catalog product template: + +```yaml +Resources: + GitHubRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: !Sub 'arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:repo-generator' + ProjectName: !Ref ProjectName + OwningTeam: !Ref OwningTeam + Environment: !Ref Environment + +Outputs: + RepositoryUrl: + Value: !GetAtt GitHubRepository.RepositoryUrl + PullRequestUrl: + Value: !GetAtt GitHubRepository.PullRequestUrl +``` + +## Required Parameters + +### Core Parameters (Required) + +- `ProjectName` (string): Name of the repository to create +- `OwningTeam` (string): GitHub team that should have admin access (default: "tf-module-admins") + +### Additional Parameters (Optional) + +All other parameters in `ResourceProperties` are collected and stored in the repository's `config.json` file. + +**Note**: CloudFormation uses PascalCase for parameters. The Lambda automatically converts them to snake_case: +- `ProjectName` → `project_name` +- `OwningTeam` → `owning_team` +- `AwsRegion` → `aws_region` + +## Workflow + +1. **User Provisions**: User provisions Service Catalog product +2. **CloudFormation Runs**: CloudFormation stack is created +3. **Lambda Invoked**: Custom Resource invokes Lambda with parameters +4. **Create Repository**: Lambda creates new repository in GitHub/GitLab +5. **Clone Template**: Copies contents from template repository +6. **Write Config**: Creates `config.json` with all parameters +7. **Create PR**: Opens pull request with the configuration +8. **Set Permissions**: Assigns team permissions (GitHub only) +9. **Return Success**: Lambda sends success response to CloudFormation +10. **Stack Complete**: CloudFormation stack completes with repository URLs in outputs + +## Configuration File Structure + +The Lambda creates a `config.json` file in the repository with this structure: + +```json +{ + "attrs": { + "aws_region": "us-east-1", + "environment": "development" + }, + "tags": {} +} +``` diff --git a/__init__.py b/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/app.py b/app.py new file mode 100644 index 00000000..cdfd70bf --- /dev/null +++ b/app.py @@ -0,0 +1,46 @@ +""" +Lambda function entrypoint that imports the handler from the template_automation package. + +This file resolves the 'attempted relative import with no known parent package' +error by placing an entrypoint at the root level of the Lambda package. +""" + +import sys +import os +import importlib.util + +# Add Lambda task root directory to Python path +sys.path.insert(0, '/var/task') + +# Try to read environment variables if they exist +if os.path.exists('/var/task/.env'): + with open('/var/task/.env', 'r') as env_file: + for line in env_file: + if '=' in line: + key, value = line.strip().split('=', 1) + os.environ[key] = value + if key == 'PYTHONPATH': + for path in value.split(':'): + if path and path not in sys.path: + sys.path.insert(0, path) + +# Fallback check for critical dependencies +for module in ['pydantic', 'jinja2', 'github']: + try: + importlib.import_module(module) + except ImportError: + print(f"Warning: {module} not found in standard paths. Looking in /var/task...") + # Look for the module in /var/task + module_paths = [ + f'/var/task/{module}', + f'/var/task/lib/python3.11/site-packages/{module}' + ] + for path in module_paths: + if os.path.exists(path): + sys.path.insert(0, os.path.dirname(path)) + break + +from template_automation.app import lambda_handler + +# Re-export the lambda_handler function for Lambda runtime to find it +__all__ = ['lambda_handler'] diff --git a/backend.tf b/backend.tf new file mode 100644 index 00000000..bf72db18 --- /dev/null +++ b/backend.tf @@ -0,0 +1,6 @@ +terraform { + backend "gcs" { + bucket = "hpw-terraform-state" + prefix = "template-automation-lambda" + } +} diff --git a/buildspec.yml.j2 b/buildspec.yml.j2 new file mode 100644 index 00000000..fd4ea2c7 --- /dev/null +++ b/buildspec.yml.j2 @@ -0,0 +1,93 @@ +version: 0.2 + +env: + variables: + PACKER_TEMPLATE_FILE: "{{ packer_template_file }}" + AWS_REGION: "{{ aws_region }}" + ECR_REPOSITORY: "{{ ecr_repository }}" + AWS_ACCOUNT_ID: "{{ aws_account_id }}" + {% if environment_variables %} + {% for key, value in environment_variables.items() %} + {{ key }}: "{{ value }}" + {% endfor %} + {% endif %} + +phases: + install: + commands: + - echo "Installing Packer and dependencies for Service Catalog Lambda build..." + {% if tools %} + {% for tool in tools %} + - echo "Installing {{ tool.name }} version {{ tool.version }}..." + - aws s3 cp s3://{{ assets_bucket }}/{{ tool.zip_path }} /tmp/{{ tool.zip_path }} + - unzip -o /tmp/{{ tool.zip_path }} -d /tmp/{{ tool.name }} + - chmod +x /tmp/{{ tool.name }}/{{ tool.binary_name }} + - mv /tmp/{{ tool.name }}/{{ tool.binary_name }} {{ tool.install_path }}/ + - {{ tool.binary_name }} version + {% endfor %} + {% endif %} + - echo "Packer installation complete" + + pre_build: + commands: + - echo "Initializing Packer plugins for Lambda container build..." + - packer init ${PACKER_TEMPLATE_FILE} + - echo "Packer plugins initialized successfully" + + build: + commands: + - echo "Building Service Catalog Lambda container image..." + + # Get ECR login credentials + - echo "Logging into ECR..." + - aws ecr get-login-password --region ${AWS_REGION} | docker login --username AWS --password-stdin ${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com + + # Build repository URI for ECR + - | + if [ -n "$ECR_REPOSITORY" ]; then + REPOSITORY_URI="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPOSITORY}" + else + echo "ERROR: ECR_REPOSITORY is required for container builds" + exit 1 + fi + + # Set image tag + - | + if [ -n "$IMAGE_VERSION_TAG" ]; then + TAG="$IMAGE_VERSION_TAG" + elif [ -n "$IMAGE_TAG" ]; then + TAG="$IMAGE_TAG" + else + TAG="latest" + fi + + # Get ECR credentials for Packer + - ECR_USERNAME="AWS" + - ECR_PASSWORD=$(aws ecr get-login-password --region ${AWS_REGION}) + - ECR_LOGIN_SERVER="${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com" + + # Use the cloned base image from ECR instead of public ECR + - BASE_IMAGE="{{ base_image }}" + + - echo "Building with repository_uri=$REPOSITORY_URI tag=$TAG base_image=$BASE_IMAGE" + + # Run Packer build with required variables for Lambda container + - | + packer build \ + -var "repository_uri=$REPOSITORY_URI" \ + -var "tag=$TAG" \ + -var "base_image=$BASE_IMAGE" \ + -var "ecr_login_username=$ECR_USERNAME" \ + -var "ecr_login_password=$ECR_PASSWORD" \ + -var "ecr_login_server=$ECR_LOGIN_SERVER" \ + ${PACKER_TEMPLATE_FILE} + + post_build: + commands: + - echo "Service Catalog Lambda container image build completed successfully" + - echo "Image pushed to $REPOSITORY_URI:$TAG" + - echo "Lambda function is ready for deployment with EventBridge and Service Catalog" + +artifacts: + files: + - '**/*' diff --git a/cloudformation-template.yaml b/cloudformation-template.yaml new file mode 100644 index 00000000..41ffe949 --- /dev/null +++ b/cloudformation-template.yaml @@ -0,0 +1,108 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: 'Service Catalog Product: Create GitHub Repository from Template' + +Metadata: + AWS::CloudFormation::Interface: + ParameterGroups: + - Label: + default: "Repository Configuration" + Parameters: + - ProjectName + - OwningTeam + - Label: + default: "Environment Configuration" + Parameters: + - Environment + - AwsRegion + ParameterLabels: + ProjectName: + default: "Repository Name" + OwningTeam: + default: "Owning Team" + Environment: + default: "Environment" + AwsRegion: + default: "AWS Region" + +Parameters: + ProjectName: + Type: String + Description: Name of the GitHub repository to create + MinLength: 1 + MaxLength: 100 + AllowedPattern: ^[a-zA-Z0-9-]+$ + ConstraintDescription: Must contain only alphanumeric characters and hyphens + + OwningTeam: + Type: String + Description: GitHub team that should have admin access to the repository + Default: tf-module-admins + + Environment: + Type: String + Description: Environment for the project + AllowedValues: + - development + - staging + - production + Default: development + + AwsRegion: + Type: String + Description: AWS region for the project + Default: us-gov-west-1 + AllowedValues: + - us-gov-west-1 + - us-gov-east-1 + + LambdaFunctionArn: + Type: String + Description: ARN of the Lambda function that creates repositories + Default: arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation + +Resources: + # Custom Resource that invokes the Lambda function + GitHubRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: !Ref LambdaFunctionArn + ProjectName: !Ref ProjectName + OwningTeam: !Ref OwningTeam + Environment: !Ref Environment + AwsRegion: !Ref AwsRegion + + # Optional: Store repository information in SSM Parameter Store + RepositoryParameter: + Type: AWS::SSM::Parameter + Properties: + Name: !Sub '/service-catalog/repositories/${ProjectName}' + Type: String + Value: !GetAtt GitHubRepository.RepositoryUrl + Description: !Sub 'GitHub repository created via Service Catalog for ${ProjectName}' + Tags: + Name: !Ref ProjectName + Environment: !Ref Environment + ManagedBy: ServiceCatalog + +Outputs: + RepositoryUrl: + Description: URL of the created GitHub repository + Value: !GetAtt GitHubRepository.RepositoryUrl + Export: + Name: !Sub '${AWS::StackName}-RepositoryUrl' + + RepositoryName: + Description: Name of the created repository + Value: !GetAtt GitHubRepository.RepositoryName + Export: + Name: !Sub '${AWS::StackName}-RepositoryName' + + PullRequestUrl: + Description: URL of the configuration pull request + Value: !GetAtt GitHubRepository.PullRequestUrl + Export: + Name: !Sub '${AWS::StackName}-PullRequestUrl' + + ParameterStorePath: + Description: SSM Parameter Store path containing repository information + Value: !Ref RepositoryParameter diff --git a/config_packer.hcl b/config_packer.hcl new file mode 100644 index 00000000..39d84354 --- /dev/null +++ b/config_packer.hcl @@ -0,0 +1,83 @@ +// config_packer.hcl - Packer Pipeline Configuration for Service Catalog Repository Generator Lambda + +packer_pipeline { + // Required parameters + packer_template_file = "packer.pkr.hcl" // Relative path within the repo to the Packer template + s3_bucket = "csvd-template-automation-builds" // S3 bucket for artifacts + assets_bucket = "image-pipeline-assets-dev" // S3 bucket containing tool assets + codebuild_project_name = "service-catalog-repo-generator-builder" // Name for the CodeBuild project + + // Tools configuration + tools = [ + { + name = "packer" + version = "1.13.0" + zip_path = "packer_1.13.0_linux_amd64.zip" + binary_name = "packer" + install_path = "/usr/local/bin" + } + ] + + // AWS Account Configuration + account_number = "229685449397" // AWS account number + partition = "aws-us-gov" // AWS partition (aws or aws-us-gov) + + // Role management + create_role = true // Enable automatic role creation + + // Region and partition configuration + aws_region = "us-gov-west-1" // AWS region + gov_cloud = true // Explicitly set GovCloud partition + + // Optional parameters with defaults + s3_key_prefix = "packer-builds/service-catalog-repo-generator" // Prefix for S3 keys + compute_type = "BUILD_GENERAL1_MEDIUM" // CodeBuild compute type + image = "aws/codebuild/amazonlinux2-x86_64-standard:3.0" // CodeBuild image + buildspec_template = "buildspec.yml.j2" // Buildspec template file + + // Post-build commands to push Docker image to ECR + additional_post_build_commands = "docker push ${AWS_ACCOUNT_ID}.dkr.ecr.${AWS_REGION}.amazonaws.com/${ECR_REPOSITORY}:${IMAGE_TAG}" + + // Exclude patterns for zip creation + exclude_dirs = [ + "design-docs", + "docs", + "dist", + "events", + "scripts", + "tests" + ] + + // VPC Configuration with the specified details + vpc_config { + vpc_id = "vpc-00576a396ec570b94" // Specified VPC + subnet_ids = ["subnet-0b1992a84536c581b"] // Subnet ID + security_group_ids = ["sg-0641c697588b9aa6b"] // Security group ID + } + + // Environment variables for the CodeBuild environment + environment_variables = { + REPOSITORY_NAME = "service-catalog-repo-generator-lambda" + ECR_REPOSITORY = "service-catalog-repo-generator/lambda" + AWS_ACCOUNT_ID = "229685449397" + IMAGE_TAG = "latest" + HTTP_PROXY = "http://proxy.tco.census.gov:3128" + HTTPS_PROXY = "http://proxy.tco.census.gov:3128" + NO_PROXY = "public.ecr.aws,pypi.org,github.e.it.census.gov,files.pythonhosted.org,nexus.it.census.gov,public.ecr.aws" + ECR_REGISTRY = "229685449397.dkr.ecr.us-gov-west-1.amazonaws.com" // ECR registry URL + } + + // ECR Image Cloning Configuration + ecr_registry_name = "service-catalog-repo-generator" // ECR registry prefix for cloned images + + ecr_clone_images = [ + { + name = "lambda-python" + tag = "3.11" + source_registry = "public.ecr.aws" + source_image = "lambda/python" + source_tag = "3.11" + enabled = true + } + ] +} \ No newline at end of file diff --git a/deploy/main.tf b/deploy/main.tf new file mode 100644 index 00000000..81132c5f --- /dev/null +++ b/deploy/main.tf @@ -0,0 +1,101 @@ +# Deployment configuration for Service Catalog Repository Generator Lambda +# This uses the terraform-aws-template-automation module to deploy the Lambda function + +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.0" + } + } +} + +provider "aws" { + region = var.aws_region +} + +# Data source to get the ECR image URI +data "aws_caller_identity" "current" {} +data "aws_region" "current" {} + +# Deploy the Lambda function and supporting infrastructure +module "service_catalog_repo_generator" { + source = "../../terraform-aws-template-automation" + + name_prefix = "service-catalog-repo-gen" + + # GitHub configuration + github_api_url = var.github_api_url + github_org_name = var.github_org_name + template_repo_name = var.template_repo_name + + # GitHub token from Secrets Manager + github_token = { + secret_name = var.github_token_secret_name + } + + # Lambda configuration + lambda_config = { + image_uri = "${data.aws_caller_identity.current.account_id}.dkr.ecr.${data.aws_region.current.name}.amazonaws.com/service-catalog-repo-generator/lambda:${var.image_tag}" + memory_size = 512 + timeout = 300 + + # VPC configuration (if needed for GitHub Enterprise access) + vpc_config = var.enable_vpc ? { + subnet_ids = var.subnet_ids + security_group_ids = var.security_group_ids + } : null + + # Environment variables for the Lambda + environment_variables = merge( + var.additional_env_vars, + { + VERIFY_SSL = "true" + } + ) + } + + tags = var.tags +} + +# Grant CloudFormation permission to invoke Lambda +# This allows any CloudFormation stack to invoke the Lambda as a Custom Resource +resource "aws_lambda_permission" "cloudformation" { + statement_id = "AllowCloudFormationInvoke" + action = "lambda:InvokeFunction" + function_name = module.service_catalog_repo_generator.lambda_function_name + principal = "cloudformation.amazonaws.com" +} + +# Outputs +output "lambda_function_arn" { + description = "ARN of the deployed Lambda function - use this as ServiceToken in CloudFormation" + value = module.service_catalog_repo_generator.lambda_function_arn +} + +output "lambda_function_name" { + description = "Name of the deployed Lambda function" + value = module.service_catalog_repo_generator.lambda_function_name +} + +output "cloudformation_template_example" { + description = "Example CloudFormation Custom Resource definition" + value = <<-EOT + Resources: + MyRepository: + Type: Custom::RepositoryCreator + Properties: + ServiceToken: ${module.service_catalog_repo_generator.lambda_function_arn} + ProjectName: my-new-repo + OwningTeam: platform-team + Environment: development + + Outputs: + RepositoryUrl: + Value: !GetAtt MyRepository.RepositoryUrl + PullRequestUrl: + Value: !GetAtt MyRepository.PullRequestUrl + EOT +} diff --git a/deploy/terraform.tfvars.example b/deploy/terraform.tfvars.example new file mode 100644 index 00000000..e2791f15 --- /dev/null +++ b/deploy/terraform.tfvars.example @@ -0,0 +1,22 @@ +# Example terraform.tfvars file for deploying the Service Catalog Repository Generator + +aws_region = "us-gov-west-1" + +github_api_url = "https://github.e.it.census.gov/api/v3" # Your GitHub Enterprise URL +github_org_name = "your-org-name" +template_repo_name = "template-repository-name" + +github_token_secret_name = "github/service-catalog-repo-generator-token" + +image_tag = "latest" # Or specific version like "1.0.0" + +# If you need VPC access for GitHub Enterprise +enable_vpc = true +subnet_ids = ["subnet-0b1992a84536c581b"] +security_group_ids = ["sg-0641c697588b9aa6b"] + +tags = { + ManagedBy = "Terraform" + Purpose = "ServiceCatalogRepoGenerator" + Environment = "production" +} diff --git a/deploy/variables.tf b/deploy/variables.tf new file mode 100644 index 00000000..86debb22 --- /dev/null +++ b/deploy/variables.tf @@ -0,0 +1,65 @@ +variable "aws_region" { + description = "AWS region where resources will be created" + type = string + default = "us-gov-west-1" +} + +variable "github_api_url" { + description = "GitHub API URL (Enterprise or public)" + type = string + default = "https://api.github.com" +} + +variable "github_org_name" { + description = "GitHub organization name" + type = string +} + +variable "template_repo_name" { + description = "Name of the template repository" + type = string +} + +variable "github_token_secret_name" { + description = "Name of the AWS Secrets Manager secret containing the GitHub token" + type = string +} + +variable "image_tag" { + description = "Tag of the Docker image in ECR" + type = string + default = "latest" +} + +variable "enable_vpc" { + description = "Whether to deploy Lambda in VPC" + type = bool + default = false +} + +variable "subnet_ids" { + description = "Subnet IDs for Lambda VPC configuration" + type = list(string) + default = [] +} + +variable "security_group_ids" { + description = "Security group IDs for Lambda VPC configuration" + type = list(string) + default = [] +} + +variable "additional_env_vars" { + description = "Additional environment variables for Lambda" + type = map(string) + default = {} +} + +variable "tags" { + description = "Tags to apply to all resources" + type = map(string) + default = { + ManagedBy = "Terraform" + Purpose = "ServiceCatalogRepoGenerator" + } +} diff --git a/design-docs/CUSTOM_TEMPLATES.MD b/design-docs/CUSTOM_TEMPLATES.MD new file mode 100644 index 00000000..52b42d4f --- /dev/null +++ b/design-docs/CUSTOM_TEMPLATES.MD @@ -0,0 +1,138 @@ +# Custom Template Management + +This document outlines approaches for managing custom GitHub repository templates, including support for subdirectory-based templates. + +## Template Sources + +### Full Repository Templates +The standard approach where an entire repository is used as a template. + +### Subdirectory Templates +Allows using a specific subdirectory from a template repository, enabling: +- Modular template organization +- Sharing common components +- Granular template selection + +## Usage + +### Creating from Full Repository + +```json +{ + "action": "create", + "project_name": "my-service", + "template_settings": { + "type": "service", + "environment": "prod", + "variables": { + "region": "us-west-2" + } + } +} +``` + +### Creating from Subdirectory + +```json +{ + "action": "create", + "project_name": "my-service", + "template_settings": { + "type": "service", + "environment": "prod", + "source_path": "templates/microservice", + "variables": { + "region": "us-west-2" + } + } +} +``` + +## Template Organization + +Example structure for a template repository using subdirectories: + +``` +template-repository/ +├── README.md +├── templates/ +│ ├── microservice/ # Template for microservices +│ │ ├── .github/ +│ │ │ └── workflows/ +│ │ ├── src/ +│ │ └── config/ +│ ├── terraform-module/ # Template for Terraform modules +│ │ ├── .github/ +│ │ └── examples/ +│ └── python-package/ # Template for Python packages +│ ├── .github/ +│ └── src/ +└── common/ # Shared components + ├── workflows/ + ├── scripts/ + └── config/ +``` + +## Implementation Details + +### Template Copying Logic + +1. Validate source path exists in template repository +2. If source path specified: + - Get contents of specified directory + - Strip source path prefix from target paths +3. If no source path: + - Get contents of entire repository +4. Copy files maintaining directory structure +5. Generate and store destroy token + +### Error Handling + +- Source path validation +- File copy failures +- Permission issues +- Missing files/directories + +## Security Considerations + +1. **Access Control**: + - Template repository access restrictions + - Source path validation +2. **Content Validation**: + - File type restrictions + - Size limits + - Path traversal prevention + +## Best Practices + +1. **Template Organization**: + - Use clear directory structure + - Include README in each template + - Document variables and requirements + +2. **Subdirectory Usage**: + - Group related templates + - Share common components + - Use consistent naming + +3. **Maintenance**: + - Regular template updates + - Version tagging + - Change documentation + +## Future Enhancements + +1. **Template Composition**: + - Combine multiple subdirectories + - Template inheritance + - Component overrides + +2. **Validation**: + - Template schema validation + - Required files checking + - Variable validation + +3. **Advanced Features**: + - Template versioning + - Hot-reload templates + - Template discovery API \ No newline at end of file diff --git a/design-docs/README.md b/design-docs/README.md new file mode 100644 index 00000000..f8c2601d --- /dev/null +++ b/design-docs/README.md @@ -0,0 +1,142 @@ +# Template Automation System Implementation Plan + +## System Architecture + +The Template Automation System is designed to be a generic, template-agnostic infrastructure that can automate the creation and configuration of any type of repository from a template. The system consists of two core components and can work with any number of template repositories. + +### Core Components + +#### terraform-aws-template-automation +This is the foundational Terraform module that deploys the automation infrastructure: +- Deploys the Lambda function and required AWS resources (API Gateway, IAM roles, etc.) +- Manages any required SSM parameters or Secrets +- Provides a reusable module that can be included in any AWS environment +- Template-agnostic - works with any type of repository template + +#### template-automation-lambda +This is the engine of the automation system: +- Implements the core repository templating logic in template_automation/app.py +- Packaged as a Docker image for Lambda deployment +- Handles repository creation, branch management, and PR automation +- Template-agnostic - can work with any properly structured template repository + +### Template Repositories + +#### template-eks-cluster (Example) +This is an example template repository that demonstrates how to structure a template for use with the automation system: +- Shows the pattern for creating EKS clusters +- Serves as a reference implementation +- Demonstrates best practices for template structure +- One of many possible templates that could be used with the system + +### Build Infrastructure Requirements +The Terraform configuration in this repository is specifically for building the Lambda container image in ECR. Due to tooling restrictions and access requirements, the build process must be executed in GitHub.com rather than in the target organization's environment. This means: + +- The container image build pipeline runs in GitHub.com +- Terraform in this repo manages only build-related resources (ECR repository, build IAM roles) +- The build process cannot access internal tools or resources of the target organization +- The resulting container image is then referenced by the terraform-aws-template-automation module for actual deployment + +## Overview +This document outlines the implementation plan for the Template Automation System, using an EKS cluster template as our first case study. While we'll be working with the `template-eks-cluster` repository to validate and demonstrate the system's capabilities, the core automation components (`template-automation-lambda` and `terraform-aws-template-automation`) are designed to work with any properly structured template repository. + +The EKS cluster template serves as an excellent first example because it: +- Demonstrates complex configuration processing requirements +- Shows how templates can define their own workflow automation +- Provides a real-world validation of the system's flexibility +- Establishes patterns that other templates can follow + +Most of the core automation work will take place in `template_automation/app.py`, while the EKS-specific template logic resides in the `template-eks-cluster` repository. This separation ensures that our automation system remains template-agnostic while allowing templates to define their own specialized behavior. + +## Implementation Phases + +### Phase 1: Lambda Function Core Updates +Updates to the Lambda function to establish template-agnostic repository management: + +- **Branch Management** + - Create new initialization branch instead of pushing directly to main + - Implement flexible branch creation in GitHub client (template_automation/app.py) + - Add robust error handling for branch operations + - Support template-specific branch naming (e.g., "init-cluster" for EKS templates) + +- **Pull Request Automation** + - Add automatic PR creation after pushing changes + - Implement configurable PR creation logic in GitHub client + - Support template-specific PR templates and descriptions + - Allow templates to define their PR strategies + +### Phase 2: Template Processing Framework +Enhance the framework for processing template repositories, using EKS template as reference: + +- **Configuration Processing** + - Create flexible configuration processing system + - Support multiple configuration formats (JSON, HCL, YAML) + - Allow templates to define custom processing logic + - Example: Implement config.js to HCL conversion via Ansible for EKS template + +- **GitHub Actions Framework** + - Create template-agnostic workflow framework + - Allow templates to define custom GitHub Actions + - Support environment-specific configurations + - Example: Implement EKS template's generate_hcl_files.yml playbook + +- **Runner Configuration** + - Implement account-specific runner selection + - Support lab environment runners + - Configure runners based on AWS account IDs + - Enable template-specific validation steps + +### Phase 3: Testing Implementation +Establish comprehensive testing framework for both core system and templates: + +- **Lab Environment Setup** + - Configure workflow for lab AWS account + - Set up isolated testing environment + - Create test configurations for various template types + - Example: Set up EKS cluster test configurations + +- **Core System Testing** + - Test template-agnostic functionality + - Validate GitHub integration components + - Test configuration processing framework + - Verify error handling and recovery + +- **End-to-End Testing** + - Implement full workflow testing + - Create demonstration environment + - Add integration tests for GitHub operations + - Test template-specific validations + - Example: Validate EKS cluster creation workflow + +### Phase 4: Documentation and Interface +Establish documentation and support infrastructure: + +- **Core System Documentation** + - Document Lambda invocation process + - Template structure requirements + - Configuration schema documentation + - Template processing hooks + +- **Template Development Guide** + - Template structure guidelines + - Best practices for template design + - Example implementations (using EKS template) + - Template testing guidelines + +- **Future Considerations** + - Additional template types beyond EKS + - Enhanced template processing capabilities + - Integration with other systems (e.g., CRF) + - Template marketplace concept + +## Success Criteria +- Core automation system successfully processes any valid template +- Templates can define their own processing logic and validation +- Comprehensive testing framework validates both system and templates +- Clear documentation helps users create new templates +- System demonstrates flexibility with multiple template types + +## Dependencies +- GitHub API access and permissions +- AWS account access for testing +- Runner configurations for different environments diff --git a/design-docs/REPO_VARS_AND_SECRETS.md b/design-docs/REPO_VARS_AND_SECRETS.md new file mode 100644 index 00000000..728942bb --- /dev/null +++ b/design-docs/REPO_VARS_AND_SECRETS.md @@ -0,0 +1,253 @@ +# Repository Variables and Secrets Management + +This document outlines the approach for managing GitHub Actions secrets and variables for newly created repositories using AWS Parameter Store and Secrets Manager. + +## Overview + +The template automation system will configure GitHub Actions secrets and variables by: +1. Reading secrets from AWS Secrets Manager +2. Reading variables from AWS Parameter Store +3. Setting them in the newly created repository using GitHub's API + +## Implementation + +### Parameter Structure + +#### AWS Parameter Store +``` +/template-automation/ + ├── variables/ + │ ├── global/ # Variables for all repos + │ │ ├── AWS_REGION + │ │ └── TERRAFORM_VERSION + │ └── by-type/ # Variables by repository type + │ ├── eks-cluster/ + │ │ ├── CLUSTER_VERSION + │ │ └── NODE_TYPE + │ └── terraform-module/ + │ ├── GO_VERSION + │ └── TFLINT_VERSION +``` + +#### AWS Secrets Manager +``` +template-automation/ + ├── secrets/global/ # Secrets for all repos + │ ├── AWS_ACCESS_KEY_ID + │ └── AWS_SECRET_ACCESS_KEY + └── secrets/by-type/ # Secrets by repository type + ├── eks-cluster/ + │ └── KUBECONFIG + └── terraform-module/ + └── SNYK_TOKEN +``` + +### Infrastructure Changes + +#### Lambda Configuration + +Add environment variables to the Lambda function: + +```hcl +# In terraform-aws-template-automation/main.tf +resource "aws_lambda_function" "template_automation" { + # ...existing configuration... + + environment { + variables = { + PARAM_STORE_PREFIX = "/template-automation" + SECRETS_PREFIX = "template-automation" + } + } +} +``` + +#### IAM Permissions + +Add required permissions to the Lambda role: + +```hcl +# In terraform-aws-template-automation/iam.tf +data "aws_iam_policy_document" "secrets_access" { + statement { + effect = "Allow" + actions = [ + "secretsmanager:GetSecretValue", + "secretsmanager:ListSecrets" + ] + resources = [ + "arn:aws:secretsmanager:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:secret:${var.secrets_prefix}/*" + ] + } +} + +data "aws_iam_policy_document" "ssm_access" { + statement { + effect = "Allow" + actions = [ + "ssm:GetParameter", + "ssm:GetParameters", + "ssm:GetParametersByPath" + ] + resources = [ + "arn:aws:ssm:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:parameter${var.param_store_prefix}/*" + ] + } +} +``` + +### Implementation Details + +#### In GitHubClient + +The `GitHubClient` class will be extended with methods to handle secrets and variables: + +```python +class GitHubClient: + def set_repository_secrets(self, repo_name: str, repo_type: str = None) -> None: + """Set GitHub Actions secrets for a repository.""" + # Get global secrets + secrets = self._get_aws_secrets("secrets/global") + + # Get type-specific secrets + if repo_type: + type_secrets = self._get_aws_secrets(f"secrets/by-type/{repo_type}") + secrets.update(type_secrets) + + # Set secrets in repository + repo = self.org.get_repo(repo_name) + for name, value in secrets.items(): + repo.create_secret(name, value) + + def set_repository_variables(self, repo_name: str, repo_type: str = None) -> None: + """Set GitHub Actions variables for a repository.""" + # Get global variables + variables = self._get_ssm_parameters("variables/global") + + # Get type-specific variables + if repo_type: + type_vars = self._get_ssm_parameters(f"variables/by-type/{repo_type}") + variables.update(type_vars) + + # Set variables in repository + repo = self.org.get_repo(repo_name) + for name, value in variables.items(): + repo.create_variable(name, value) +``` + +#### In Lambda Handler + +The handler will be updated to set secrets and variables during repository creation: + +```python +def lambda_handler(event: dict, context) -> dict: + # ...existing initialization code... + + # Create repository + repo = github.get_repository(repo_name, create=True) + + # Set secrets and variables + repo_type = template_input.template_settings.get("type") + github.set_repository_secrets(repo_name, repo_type) + github.set_repository_variables(repo_name, repo_type) + + # ...rest of handler code... +``` + +## Security Considerations + +1. **Secret Encryption**: All secrets are encrypted at rest in AWS +2. **IAM Access Control**: Fine-grained control over who can access secrets +3. **Audit Trail**: AWS CloudTrail tracks all secret access +4. **Repository Isolation**: Each repository gets its own copy of secrets +5. **Least Privilege**: Lambda has minimal required permissions + +## Usage Examples + +### Setting Up Repository Type Secrets + +1. Store secrets in AWS: +```bash +aws secretsmanager create-secret \ + --name "template-automation/secrets/by-type/eks-cluster/KUBECONFIG" \ + --secret-string "..." +``` + +2. Store variables in Parameter Store: +```bash +aws ssm put-parameter \ + --name "/template-automation/variables/by-type/eks-cluster/CLUSTER_VERSION" \ + --value "1.27" \ + --type "String" +``` + +### Creating a Repository with Secrets + +Create a new EKS cluster repository with the Lambda function: + +```json +{ + "action": "create", + "project_name": "production-eks", + "template_settings": { + "type": "eks-cluster", + "environment": "production" + } +} +``` + +The Lambda function will: +1. Create the repository +2. Generate a secure destroy token +3. Store the token in a `.destroy-token` file in the repository root +4. Set up global secrets and variables +5. Set up EKS-specific secrets and variables +6. Configure necessary GitHub Actions environment + +The response will include the repository URL: + +```json +{ + "status": "success", + "repository_url": "https://github.com/org/production-eks", + "message": "Repository created successfully. The destroy token is stored in .destroy-token file." +} +``` + +**Important**: The destroy token is stored in the `.destroy-token` file in your repository. You'll need this token to delete the repository later. The file looks like: + +```plaintext +# This file contains the token required to delete this repository. +# Store this token securely as it will be required for repository deletion. +# DO NOT delete or modify this file unless you want to prevent repository deletion. + +ESxK2ld9J4mCpA-ghi8932jk... +``` + +### Destroying a Repository + +To clean up a repository and its associated secrets/variables: + +```json +{ + "action": "destroy", + "project_name": "production-eks", + "destroy_token": "ESxK2ld9J4mCpA-ghi8932jk..." +} +``` + +The Lambda function will: +1. Validate the provided destroy token +2. Delete all repository secrets +3. Delete all repository variables +4. Delete the repository itself + +If an invalid destroy token is provided, the operation will fail with an error. + +## Future Enhancements + +1. **Secret Rotation**: Implement automatic secret rotation +2. **Environment Support**: Add environment-specific secrets (dev/staging/prod) +3. **Organization Variables**: Support for organization-level variables +4. **Validation Rules**: Add validation for secret/variable names and values +5. **Backup/Restore**: Implement backup and restore for secrets/variables diff --git a/docs/build_docs.sh b/docs/build_docs.sh new file mode 100755 index 00000000..b855b421 --- /dev/null +++ b/docs/build_docs.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -e + +# Ensure we're in the project root +cd "$(dirname "$0")/.." + +# Create and activate virtual environment if it doesn't exist +if [ ! -d "docs/venv" ]; then + python3 -m venv docs/venv +fi +source docs/venv/bin/activate + +# Install dependencies and package in development mode +pip install -r docs/requirements.txt +pip install -e . + +# Create documentation directories +mkdir -p docs/source/_static +mkdir -p docs/build + +# Generate documentation +export SPHINX_BUILD=1 +cd docs +sphinx-build -b html source build + +echo "Documentation built successfully in docs/build/index.html" diff --git a/docs/callnotes.md b/docs/callnotes.md new file mode 100644 index 00000000..dd5a747c --- /dev/null +++ b/docs/callnotes.md @@ -0,0 +1,24 @@ +# Meeting Notes + +## Participants +- Srinivasa R Nangunuri (CENSUS/CSVD FED) +- Matthew Creal Morgan (CENSUS/CSVD CTR) +- David John Arnold Jr (CENSUS/CSVD CTR) + +## Key Issues +1. Environment details not appearing in README file + - Needs to be fixed + - Estimated work time: couple of hours + +## Timeline +- Implementation needed within next 24 hours +- Current progress: 90% complete +- Work estimate: 2-3 hours +- Follow-up meeting scheduled for tomorrow, same time + +## Next Steps +- David to implement environment details fix either tonight or tomorrow morning +- Team to reconvene tomorrow at the same time to review changes + +## Status +Current completion status reported to leadership: 90% complete \ No newline at end of file diff --git a/docs/callnotes.txt b/docs/callnotes.txt new file mode 100644 index 00000000..d2240f3f --- /dev/null +++ b/docs/callnotes.txt @@ -0,0 +1,267 @@ +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +0 minutes 4 seconds0:04 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 0 minutes 4 seconds +Yeah. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +0 minutes 4 seconds0:04 +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 4 seconds +Yeah. +D +David John Arnold Jr (CENSUS/CSVD CTR) +0 minutes 5 seconds0:05 +David John Arnold Jr (CENSUS/CSVD CTR) 0 minutes 5 seconds +I had no idea. +David John Arnold Jr (CENSUS/CSVD CTR) 0 minutes 6 seconds +OK, right on sweet. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +0 minutes 8 seconds0:08 +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 8 seconds +It's alright, it's fine. +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 9 seconds +Don't worry about it. +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 10 seconds +OK. +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 11 seconds +So environment is not coming through in environment details in the README. +D +David John Arnold Jr (CENSUS/CSVD CTR) +0 minutes 16 seconds0:16 +David John Arnold Jr (CENSUS/CSVD CTR) 0 minutes 16 seconds +Yep. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +0 minutes 16 seconds0:16 +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 16 seconds +That's one. +Matthew Creal Morgan (CENSUS/CSVD CTR) 0 minutes 19 seconds +Umm. +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 10 seconds +You give me admin.org and I'll clean up all those reports. I can give you anything if I don't have admin on those repos, which I probably don't, I can't do anything about it. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +7 minutes 11 seconds7:11 +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 11 seconds +Yeah. +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 13 seconds +No, definitely not that. +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 20 seconds +Yeah, I don't. +D +David John Arnold Jr (CENSUS/CSVD CTR) +7 minutes 21 seconds7:21 +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 21 seconds +So. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +7 minutes 23 seconds7:23 +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 23 seconds +I don't think I have admin to do anything about it. +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 26 seconds +Yeah. I don't even have admin. +D +David John Arnold Jr (CENSUS/CSVD CTR) +7 minutes 27 seconds7:27 +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 27 seconds +33. +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 29 seconds +Exactly. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +7 minutes 29 seconds7:29 +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 29 seconds +So what we'll need to do is just put a list together of the repos that we need deleted, and then we'll pass that over to Youssef and he can take care of it. +D +David John Arnold Jr (CENSUS/CSVD CTR) +7 minutes 31 seconds7:31 +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 31 seconds +Play. +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 41 seconds +Yeah, alright. +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 41 seconds +I'm good with that. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +7 minutes 43 seconds7:43 +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 43 seconds +I know that that's a pain in the ***. +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 44 seconds +It'd be so much easier just doing our damn selves, but you know. +D +David John Arnold Jr (CENSUS/CSVD CTR) +7 minutes 47 seconds7:47 +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 47 seconds +Help. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +7 minutes 47 seconds7:47 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 7 minutes 47 seconds +Yeah. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +7 minutes 49 seconds7:49 +Matthew Creal Morgan (CENSUS/CSVD CTR) 7 minutes 49 seconds +OK, so so Srini, what's your report for leadership from this call? +D +David John Arnold Jr (CENSUS/CSVD CTR) +7 minutes 49 seconds7:49 +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 49 seconds +All right. +David John Arnold Jr (CENSUS/CSVD CTR) 7 minutes 52 seconds +Oh. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +7 minutes 59 seconds7:59 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 7 minutes 59 seconds +90% done. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 1 second8:01 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 1 second +There we go. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 2 seconds8:02 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 2 seconds +Yep, yes, I I I I'll always. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 4 seconds8:04 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 4 seconds +OK. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 6 seconds8:06 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 6 seconds +Do the like favorable report? +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 9 seconds +No, no worries. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 9 seconds8:09 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 9 seconds +OK. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 10 seconds8:10 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 10 seconds +Yeah, what? +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 11 seconds +What? See, there's no, nothing less, nothing more. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 14 seconds +Whatever we did, what we I I report. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 18 seconds8:18 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 18 seconds +So timeline, how much time do you need David to fix this up? +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 18 seconds8:18 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 18 seconds +But. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 26 seconds8:26 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 26 seconds +Is it a today today, tonight? +D +David John Arnold Jr (CENSUS/CSVD CTR) +8 minutes 26 seconds8:26 +David John Arnold Jr (CENSUS/CSVD CTR) 8 minutes 26 seconds +Let me go. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 27 seconds8:27 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 27 seconds +Let's reconvene tomorrow. +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 29 seconds +Or is it a couple hours? +D +David John Arnold Jr (CENSUS/CSVD CTR) +8 minutes 31 seconds8:31 +David John Arnold Jr (CENSUS/CSVD CTR) 8 minutes 31 seconds +It's probably a couple hours. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 33 seconds8:33 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 33 seconds +No, but we'll reconvene tomorrow. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 35 seconds +I I need to be on the call so. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 37 seconds8:37 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 37 seconds +OK. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 38 seconds8:38 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 38 seconds +Yeah, what? +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 38 seconds +What? What are the fixes you want to do, David? +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 39 seconds8:39 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 39 seconds +So. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 41 seconds8:41 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 41 seconds +Do it tonight if you can, or do it tomorrow. Either way, it's fine. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 45 seconds +Like we can. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 45 seconds +We can win tomorrow afternoon. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 48 seconds +At your login time, so that will be afternoon for me, so that's fine. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +8 minutes 51 seconds8:51 +Matthew Creal Morgan (CENSUS/CSVD CTR) 8 minutes 51 seconds +Same bat time, same bat channel. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +8 minutes 54 seconds8:54 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 8 minutes 54 seconds +Yeah. +D +David John Arnold Jr (CENSUS/CSVD CTR) +8 minutes 54 seconds8:54 +David John Arnold Jr (CENSUS/CSVD CTR) 8 minutes 54 seconds +Yeah. So where does the transcription for this recording show up? +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +9 minutes9:00 +Matthew Creal Morgan (CENSUS/CSVD CTR) 9 minutes +Once we close this call, it'll post in the channel and then you can pull it from there and I'll I'll shoot it to you also. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +9 minutes9:00 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 9 minutes +I'll yeah, I'll send. +D +David John Arnold Jr (CENSUS/CSVD CTR) +9 minutes 7 seconds9:07 +David John Arnold Jr (CENSUS/CSVD CTR) 9 minutes 7 seconds +All right, sweetie. +David John Arnold Jr (CENSUS/CSVD CTR) 9 minutes 10 seconds +Good deal. +MM +Matthew Creal Morgan (CENSUS/CSVD CTR) +9 minutes 10 seconds9:10 +Matthew Creal Morgan (CENSUS/CSVD CTR) 9 minutes 10 seconds +Cool. +SN +Srinivasa R Nangunuri (CENSUS/CSVD FED) +9 minutes 11 seconds9:11 +Srinivasa R Nangunuri (CENSUS/CSVD FED) 9 minutes 11 seconds +I can. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 9 minutes 11 seconds +So I can stop the recording right now. +Srinivasa R Nangunuri (CENSUS/CSVD FED) 9 minutes 13 seconds +It'll it'll. I stopped. \ No newline at end of file diff --git a/docs/gitlab-migration.md b/docs/gitlab-migration.md new file mode 100644 index 00000000..00f46795 --- /dev/null +++ b/docs/gitlab-migration.md @@ -0,0 +1,71 @@ +# GitLab Migration: What Actually Needs to Get Done + +## 1. Update Lambda Function to Use GitLab +- Replace all GitHub API usage (PyGithub) with GitLab API usage (`python-gitlab`). + - Uninstall PyGithub from requirements.txt, add python-gitlab. + - Update all import statements and API calls in your Lambda code (e.g., `app.py`, `template_automation/`). +- Update authentication to use a GitLab token (Personal Access Token with `api` scope). + - Store the token in AWS SSM or Secrets Manager, update Lambda environment/config to use it. +- Change all repo creation, file commit, and merge/pull request logic to use GitLab’s API and terminology. + - GitHub: `repo.create_pull` → GitLab: `project.mergerequests.create` + - GitHub: `repo.create_file` → GitLab: `project.files.create` + - GitHub: `org.create_repo_from_template` → GitLab: fork or create project, then push files +- Update config/env vars: + - Use `GITLAB_API_URL`, `GITLAB_GROUP_ID` (or `GITLAB_NAMESPACE`), and `GITLAB_TOKEN` instead of GitHub equivalents. +- Test Lambda end-to-end with a real GitLab group/project. + +## 2. Migrate CI/CD to AWS CodeBuild +- Convert your GitHub Actions workflow (e.g., `.github/workflows/initialize.yml`) to a `buildspec.yml` for CodeBuild. + - Each step in the workflow should become a phase in `buildspec.yml` (install, pre_build, build, post_build). + - Example: + ```yaml + version: 0.2 + phases: + install: + commands: + - pip install ansible + - pip install -r requirements.txt || true + build: + commands: + - ansible-playbook ansible/generate_hcl_files.yml -e "config_file=config.json" + - git add -A + - git diff --staged --quiet || git commit -m "Initialize repository structure from template" + - git push origin HEAD:repo-init || true + ``` +- Set up AWS CodeBuild projects for each repo that needs CI/CD. + - Use the AWS Console or Terraform to create the projects. + - Make sure CodeBuild has permissions to pull from GitLab and push to your repos. +- Set up triggers so CodeBuild runs on changes: + - Use GitLab webhooks to trigger a Lambda that starts CodeBuild, or use AWS CodeStar Connections if available for GitLab. + - Make sure the trigger covers the same events as your old GitHub Actions (e.g., PRs to `main`/`master`, manual triggers). +- Test CodeBuild by pushing a change to a test branch and verifying the pipeline runs and updates the repo as expected. + +## 3. Update Documentation +- Change all references from GitHub to GitLab in your README files and internal docs. +- Document the new workflow: + - How to trigger the pipeline (CodeBuild) + - How to configure the Lambda for GitLab + - Any new environment variables or secrets +- Remove or update any GitHub Actions badges, links, or instructions. + +## 4. Test Everything +- Test the Lambda function end-to-end with GitLab: + - Trigger a repo creation and make sure it works as expected. + - Check that files, branches, and merge requests are created correctly. +- Test CodeBuild pipelines: + - Make sure they run on the right events and update the repo as expected. + - Check logs for errors and fix any issues. +- Validate that new repos are created, initialized, and built as expected. + +## 5. Coordinate Cutover +- Wait for the repo migration team to finish moving code to GitLab. +- Switch all automation, scripts, and users to use the new GitLab URLs and CodeBuild pipelines. +- Monitor for issues and fix anything that breaks. +- Announce the cutover to your team and update any onboarding or support docs. + +--- + +**Summary:** +- Focus on Lambda code changes, CI/CD migration, documentation, and testing. +- Don’t worry about the actual repo migration (another team is handling it). +- Make sure everything works with GitLab and CodeBuild before switching over. \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..ae1149ec --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,6 @@ +sphinx +sphinx_rtd_theme +sphinx-autodoc-typehints +pydantic~=2.6 +boto3>=1.38.6 +requests diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..95510f0b --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,53 @@ +# Configuration file for the Sphinx documentation builder. + +# -- Project information ----------------------------------------------------- +project = 'Template Automation Lambda' +copyright = '2025, Template Automation Team' +author = 'Template Automation Team' +release = '1.0.0' + +# -- General configuration --------------------------------------------------- +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', # Support for Google-style docstrings + 'sphinx.ext.viewcode', # Add links to source code + 'sphinx.ext.intersphinx', # Link to other project's documentation + 'sphinx_autodoc_typehints', # Support for type hints +] + +# -- Options for autodoc ---------------------------------------------------- +autodoc_default_options = { + 'members': True, + 'undoc-members': True, + 'show-inheritance': True, + 'special-members': '__init__', + 'imported-members': False, # Don't document imported members +} + +# Don't document imported members in app module +autodoc_mock_imports = ['github'] +autodoc_member_order = 'bysource' + +# Napoleon settings for Google-style docstrings +napoleon_google_docstring = True +napoleon_numpy_docstring = False +napoleon_include_init_with_doc = True +napoleon_include_private_with_doc = True +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = True +napoleon_use_admonition_for_notes = True +napoleon_use_admonition_for_references = True +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True +napoleon_type_aliases = None + +# -- Options for HTML output ------------------------------------------------- +html_theme = 'sphinx_rtd_theme' +html_static_path = ['_static'] + +# Intersphinx mapping +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), + 'boto3': ('https://boto3.amazonaws.com/v1/documentation/api/latest', None), +} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..60273e14 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,94 @@ +Template Automation Lambda Documentation +===================================== + +Welcome to the Template Automation Lambda documentation. This system provides a flexible +template automation framework for creating and configuring repositories from templates. + +Quick Start +---------- + +The Template Automation Lambda is an AWS Lambda function that automates the process of creating +repositories from templates. It handles: + +- Repository creation from templates +- Template rendering with variable substitution +- Pull request creation with customizable settings +- Workflow automation triggers + +Installation +----------- + +To install the package and its dependencies: + +.. code-block:: bash + + pip install -r requirements.txt + pip install -e . + +Usage +----- + +Basic usage example: + +.. code-block:: python + + from template_automation.app import lambda_handler + + event = { + "project_name": "my-new-repo", + "owning_team": "devops", + "template_settings": { + "variables": { + "environment": "prod", + "region": "us-west-2" + } + } + } + + lambda_handler(event, {}) + +API Documentation +--------------- + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + modules/github_client + modules/template_manager + modules/models + modules/lambda_handler + +Core Components +------------- + +- :doc:`modules/github_client` - GitHub API interaction for repository and PR management +- :doc:`modules/template_manager` - Template rendering and configuration handling +- :doc:`modules/models` - Pydantic data models for input validation +- :doc:`modules/lambda_handler` - AWS Lambda function entry point + +Configuration +------------ + +The system uses several configuration models: + +- **GitHubConfig**: GitHub API and authentication settings +- **WorkflowConfig**: Template workflow configuration +- **PRConfig**: Pull request settings +- **TemplateInput**: Input parameters for template processing + +Environment Variables +------------------- + +Required environment variables: + +- ``GITHUB_TOKEN``: GitHub Personal Access Token +- ``GITHUB_ORG``: GitHub Organization name +- ``TEMPLATE_REPO``: Template repository name + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/source/modules/github_client.rst b/docs/source/modules/github_client.rst new file mode 100644 index 00000000..b7359243 --- /dev/null +++ b/docs/source/modules/github_client.rst @@ -0,0 +1,8 @@ +GitHub Client +============= + +.. automodule:: template_automation.github_client + :members: + :undoc-members: + :show-inheritance: + :special-members: __init__ diff --git a/docs/source/modules/lambda_handler.rst b/docs/source/modules/lambda_handler.rst new file mode 100644 index 00000000..9e1f993e --- /dev/null +++ b/docs/source/modules/lambda_handler.rst @@ -0,0 +1,7 @@ +Lambda Handler +============= + +.. automodule:: template_automation.app + :members: lambda_handler, get_github_token + :undoc-members: + :show-inheritance: diff --git a/docs/source/modules/models.rst b/docs/source/modules/models.rst new file mode 100644 index 00000000..610854a4 --- /dev/null +++ b/docs/source/modules/models.rst @@ -0,0 +1,30 @@ +Data Models +=========== + +.. automodule:: template_automation.models + :members: + :undoc-members: + :show-inheritance: + +Models +------ + +.. autoclass:: template_automation.models.GitHubConfig + :members: + :undoc-members: + :show-inheritance: + +.. autoclass:: template_automation.models.WorkflowConfig + :members: + :undoc-members: + :show-inheritance: + +.. autoclass:: template_automation.models.PRConfig + :members: + :undoc-members: + :show-inheritance: + +.. autoclass:: template_automation.models.TemplateInput + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/modules/template_manager.rst b/docs/source/modules/template_manager.rst new file mode 100644 index 00000000..ad859547 --- /dev/null +++ b/docs/source/modules/template_manager.rst @@ -0,0 +1,16 @@ +Template Manager +=============== + +.. automodule:: template_automation.template_manager + :members: + :undoc-members: + :show-inheritance: + +TemplateManager Class +------------------ + +.. autoclass:: template_automation.template_manager.TemplateManager + :members: + :undoc-members: + :show-inheritance: + :special-members: __init__ diff --git a/docs/tf-native-v2.md b/docs/tf-native-v2.md new file mode 100644 index 00000000..499762ad --- /dev/null +++ b/docs/tf-native-v2.md @@ -0,0 +1,83 @@ +# Plan for Migrating to a Terraform-Native GitHub Repository Management Workflow (v2) + +This document provides a corrected and more accurate plan to replace the current Python Lambda-based workflow with a Terraform-native approach for creating and managing GitHub repositories. + +## 1. Corrected Analysis of the Current State + +The current system uses a Python-based AWS Lambda function (`template-automation-lambda`) to automate repository creation. It does **not** use Ansible for repository configuration as previously assumed. + +The workflow is as follows: +1. The Lambda function is invoked with details for a new repository (e.g., `project_name`, `template_settings`). +2. The Python script within the Lambda performs a series of GitHub API calls to: + a. Create a new repository. + b. Clone the contents of a base template repository into it. + c. Add a custom `config.json` file. + d. Assign team permissions. + e. Create a pull request to merge the initial setup into the `main` branch. + +This process, while effective, involves a custom-built Python application, a Lambda deployment, an ECR container registry, and associated IAM roles, making it complex to maintain. + +## 2. Proposed Solution: A Purely Terraform-Native Workflow + +We will replace the entire Lambda-based system with a declarative Terraform configuration that uses the `terraform-github-repo` module. This module natively supports the actions currently performed by the Python script. + +The new process will be: +1. A developer defines a new repository by adding a `module` block to a central Terraform configuration file. +2. Running `terraform apply` will instruct Terraform to perform all the necessary setup steps. + +### Mapping Lambda Actions to Terraform Resources + +| Action (Current Python Lambda) | Terraform Equivalent (`terraform-github-repo` module) | +| :--- | :--- | +| 1. Create a new repository. | The `github_repository` resource. The module uses this internally. | +| 2. Clone a template repository. | The `template` block within the `github_repository` resource. This is a native feature for creating a repo from a template. | +| 3. Write a `config.json` file. | The `github_repository_file` resource. The module accepts a `files` variable to manage this. | +| 4. Assign team permissions. | The `github_team_repository` resource. The module has a `teams` input for this. | +| 5. Create a pull request. | The `github_pull_request` resource. This can be defined outside the module to initialize the repository. | + +## 3. Detailed Migration Plan + +### Phase 1: Scoping and Proof of Concept + +1. **Identify All Template Variables:** Document all the key-value pairs that are currently passed into the `template_settings` of the Lambda. These will become variables in our new Terraform module. + +2. **Create a Wrapper Module:** Create a new, internal Terraform module that wraps the `terraform-github-repo` module. This wrapper will provide a simplified interface for our developers and contain the logic for creating the initial pull request. + +3. **Develop the PoC:** In a test environment, write a Terraform configuration that uses this new wrapper module to create a single, non-critical repository. The configuration should: + * Use the `template` feature to create the repository from the existing template repo. + * Use the `files` feature to add the `config.json`. + * Use the `teams` feature to grant permissions. + * Define a `github_pull_request` resource to create the initial PR. + +### Phase 2: Implementation and Import + +1. **Build Out the Full Configuration:** Create a new Git repository to house the Terraform configuration for all repositories that will be managed this way. + +2. **Import Existing Repositories:** For repositories previously created by the Lambda, use `terraform import` to bring them under Terraform's state management. This is a critical step to prevent any disruption. + * `terraform import module.my_repo.github_repository.this[0] my-repo-name` + * `terraform import module.my_repo.github_team_repository.teams["tf-module-admins"] my-repo-name:tf-module-admins` + +3. **Parallel Run:** For a transition period, both systems can exist. New repositories should be created using the Terraform method, while the Lambda is left in place to manage older ones if needed. + +### Phase 3: Testing and Validation + +1. **Dry Run with `terraform plan`:** Before applying any changes to a production repository, run `terraform plan` and carefully review the output to ensure it matches expectations and doesn't plan any destructive changes. + +2. **Full Application:** Once validated, apply the configuration to manage all target repositories. + +### Phase 4: Decommissioning + +Once all repositories are successfully managed by Terraform and the new workflow is stable, the old infrastructure can be safely removed. + +1. **Disable the Lambda Trigger:** The first step is to disable the mechanism that invokes the Lambda function. +2. **Delete the Lambda Function:** Remove the `template-automation-lambda` function from AWS. +3. **Delete the ECR Repository:** Delete the ECR repository holding the Lambda's container images. +4. **Delete the Deployment Pipeline:** Remove the `template-repos-lambda-deployment` Terraform configuration and state. +5. **Archive Old Repositories:** Archive the `template-automation-lambda` and `template-repos-lambda-deployment` Git repositories to mark them as deprecated. + +## 4. Rollback Plan + +If issues arise, we can revert by: +1. Removing the problematic repository from Terraform's state using `terraform state rm`. +2. Re-enabling or re-deploying the Lambda function to take over management again. +3. Manually correcting any unintended changes made by Terraform. diff --git a/docs/tf-native-v3.md b/docs/tf-native-v3.md new file mode 100644 index 00000000..b723b86e --- /dev/null +++ b/docs/tf-native-v3.md @@ -0,0 +1,75 @@ +# Plan for Migrating to a Terraform-Native EKS Deployment Workflow (v4) + +This document outlines the plan to replace the current Lambda/Ansible-based system with a streamlined, Terraform-native workflow for creating and configuring repositories for EKS deployments. + +## 1. Analysis of the Current State + +The current process for provisioning a new EKS cluster repository involves multiple, loosely-coupled components: + +1. **`template-automation-lambda`**: A Python Lambda function that creates a new GitHub repository from the `template-eks-cluster` template. It clones the template, adds a `config.json` file with user-provided settings, and opens a pull request. +2. **`generate_hcl_files.yml`**: An Ansible playbook inside the newly created repository that is run manually after the initial PR is merged. It reads the `config.json` and generates a set of Terragrunt HCL files (`root.hcl`, `account.hcl`, `region.hcl`, etc.). +3. **`terraform-eks-deployment`**: A Terraform module that is referenced by the generated Terragrunt configuration to deploy the actual EKS cluster. + +This workflow is complex, involves manual steps, and relies on a mix of technologies (Python, Lambda, Ansible, Terraform). + +## 2. Proposed Solution: A Unified, Terraform-Native Workflow + +We will create a single, unified Terraform workflow that handles the entire process of repository creation and configuration declaratively. This eliminates the need for the Lambda function and the Ansible playbook. + +The new process will be: +1. A developer defines a new EKS cluster by adding a single `module` block to a central Terraform configuration. +2. Running `terraform apply` will automatically: + a. Create a new GitHub repository. + b. Generate and commit all the necessary Terragrunt HCL files and `README.md`. + c. Configure team permissions for the repository. + +### Core Component: The New `terragrunt-eks-repo` Wrapper Module + +The centerpiece of this new workflow is a new Terraform module, `terragrunt-eks-repo`. This module will be responsible for all the setup logic. + +| Action (Old Workflow) | Terraform Equivalent (New `terragrunt-eks-repo` Module) | +| :--- | :--- | +| 1. Create a new repository from a template. | The module will call the `terraform-github-repo` module internally, using its `template` feature to clone from `template-eks-cluster`. | +| 2. Generate HCL files from `config.json`. | The module will contain HCL templates (`.tf.tpl` files). It will use Terraform's `templatefile()` function to render the final HCL content directly from its input variables. | +| 3. Write files to the repository. | The rendered file content will be passed to the `files` input of the underlying `terraform-github-repo` module, which uses the `github_repository_file` resource to commit them. | +| 4. Assign team permissions. | The module will accept a `teams` variable and pass it to the `terraform-github-repo` module to configure permissions using the `github_team_repository` resource. | + +## 3. Detailed Migration Plan + +### Phase 1: Develop the `terragrunt-eks-repo` Module + +1. **Create Module Scaffolding:** Create a new directory for the `terragrunt-eks-repo` module. + +2. **Define Input Variables:** Create a `variables.tf` file. The variables will be derived directly from the `generate_hcl_files.yml` playbook's `config` object (e.g., `environment`, `region`, `cluster_name`, `account`, `vpc`, etc.). + +3. **Create HCL Templates:** Create a `templates` directory within the module. For each file generated by the Ansible playbook (`root.hcl`, `account.hcl`, `region.hcl`, `vpc.hcl`, `cluster.hcl`, and `README.md`), create a corresponding `.tf.tpl` template file. Convert the Jinja2 syntax to Terraform's `${...}` interpolation syntax. + +4. **Implement Module Logic (`main.tf`):** + * Use `locals` to render the file content for each template using the `templatefile()` function. + * Call the `terraform-github-repo` module. + * Pass the repository name, template configuration, and team permissions to the module. + * Map the rendered local variables to the `files` input of the `terraform-github-repo` module. This will instruct it to create the files in the new repository. + +### Phase 2: Implementation and Onboarding + +1. **Create a Central Management Repository:** Set up a new Git repository (e.g., `terragrunt-environments`) that will contain the Terraform configuration for creating all new EKS cluster repositories. + +2. **Onboard a Pilot Project:** In the new management repository, add a `main.tf` file. Add a module block that calls the newly created `terragrunt-eks-repo` module to provision a repository for a new test cluster. + +3. **Execute and Validate:** Run `terraform apply` to create the repository. Verify that: + * The repository is created on GitHub. + * It is correctly initialized from the `template-eks-cluster` template. + * All the Terragrunt HCL files and the `README.md` are present and correctly populated with the variable values. + * Team permissions are correctly assigned. + +### Phase 3: Decommissioning the Old Workflow + +Since we are not concerned with migrating existing repositories, the decommissioning process is straightforward. Once the new workflow is validated and adopted for all new cluster provisioning: + +1. **Disable the Lambda Function:** The Lambda trigger can be disabled in AWS. +2. **Archive Old Repositories:** The `template-automation-lambda` and `template-repos-lambda-deployment` Git repositories should be archived to prevent further use. +3. **Delete AWS Resources:** The old AWS resources (Lambda function, ECR repository, IAM roles) can be deleted via Terraform from the `template-repos-lambda-deployment` project. + +## 4. Rollback Plan + +As we are not migrating existing resources, a rollback is not applicable in the traditional sense. If the new workflow fails for a new repository, the state can be destroyed (`terraform destroy`), the module can be fixed, and the process can be re-run. The old Lambda-based system can be temporarily kept available for emergency use until the new workflow is fully proven. diff --git a/docs/tf-native-v4.md b/docs/tf-native-v4.md new file mode 100644 index 00000000..3d9245fa --- /dev/null +++ b/docs/tf-native-v4.md @@ -0,0 +1,79 @@ +# Plan for Migrating to a Terraform-Native EKS Deployment Workflow (v4) + +This document outlines the plan to replace the current Lambda/Ansible-based system with a streamlined, Terraform-native workflow by enhancing the `terraform-eks-deployment` module itself. + +## 1. Analysis of the Current State + +The current process for provisioning a new EKS cluster repository involves multiple components: + +1. **`template-automation-lambda`**: A Python Lambda function that creates a new GitHub repository from the `template-eks-cluster` template. +2. **`generate_hcl_files.yml`**: An Ansible playbook inside the new repository that is run manually to generate a set of Terragrunt HCL files (`root.hcl`, `account.hcl`, etc.). +3. **`terraform-eks-deployment`**: The Terraform module that is referenced by the generated Terragrunt configuration to deploy the actual EKS cluster. + +This workflow is complex, involves manual steps, and relies on a mix of technologies. + +## 2. Proposed Solution: A Unified, All-in-One EKS Deployment Module + +We will consolidate the entire workflow into the `terraform-eks-deployment` module. This module will be enhanced to handle not only the EKS deployment but also the initial GitHub repository creation and configuration. This eliminates the need for the Lambda function and the Ansible playbook. + +The new, unified process will be: +1. A developer defines a new EKS cluster by adding a single `module "eks_deployment"` block to a central Terraform configuration. +2. By setting `create_repository = true`, the developer instructs the module to perform the initial setup. +3. Running `terraform apply` will automatically: + a. Create a new GitHub repository using the `terraform-github-repo` module as a submodule. + b. Generate and commit all the necessary Terragrunt HCL files and a `README.md`. + c. Configure team permissions for the repository. + +The same module, when referenced from within the newly created repository's Terragrunt files, will have `create_repository = false` and will proceed with deploying the EKS cluster as it does today. + +### Core Component: The Enhanced `terraform-eks-deployment` Module + +| Action (Old Workflow) | Terraform Equivalent (Inside `terraform-eks-deployment`) | +| :--- | :--- | +| 1. Create a new repository from a template. | A new submodule block calling `terraform-github-repo` will be added, controlled by a `create_repository` flag. | +| 2. Generate HCL files from `config.json`. | The module will contain a new `templates` directory with HCL templates (`.tf.tpl`). It will use `templatefile()` to render the final HCL content from its input variables. | +| 3. Write files to the repository. | The rendered file content will be passed to the `files` input of the `terraform-github-repo` submodule. | +| 4. Assign team permissions. | The module will accept a `teams` variable and pass it to the `terraform-github-repo` submodule. | + +## 3. Detailed Migration Plan + +### Phase 1: Enhance the `terraform-eks-deployment` Module + +1. **Add Input Variables:** In `variables.tf`, add new variables: + * `create_repository`: A boolean to control whether to execute the repository creation logic. Default to `false`. + * `repository_name`: The name of the GitHub repository to create. + * `repository_teams`: A map of teams and their permissions for the new repository. + * Variables derived from the `generate_hcl_files.yml` playbook's `config` object (e.g., `environment`, `region`, `cluster_name`, `account`, `vpc`, etc.). + +2. **Create HCL Templates:** Create a `templates` directory within the module. For each file generated by the Ansible playbook (`root.hcl`, `account.hcl`, `region.hcl`, `vpc.hcl`, `cluster.hcl`, and `README.md`), create a corresponding `.tf.tpl` template file. Convert the Jinja2 syntax to Terraform's `${...}` interpolation syntax. + +3. **Implement Module Logic (`main.tf`):** + * Use `locals` to render the file content for each template using the `templatefile()` function. + * Add a `module "github_repo"` block that calls the `terraform-github-repo` module. + * Set the `count` of this submodule to `var.create_repository ? 1 : 0`. + * Pass the repository name, template configuration, and team permissions to the submodule. + * Map the rendered local variables to the `files` input of the `github_repo` submodule. + +### Phase 2: Implementation and Onboarding + +1. **Create a Central Management Repository:** Set up a new Git repository (e.g., `terragrunt-environments`) that will contain the Terraform configuration for creating all new EKS cluster repositories. + +2. **Onboard a Pilot Project:** In the new management repository, add a `main.tf` file. Add a module block that calls the enhanced `terraform-eks-deployment` module with `create_repository = true` to provision a repository for a new test cluster. + +3. **Execute and Validate:** Run `terraform apply` to create the repository. Verify that: + * The repository is created on GitHub. + * It is correctly initialized from the `template-eks-cluster` template. + * All the Terragrunt HCL files and the `README.md` are present and correctly populated. + * Team permissions are correctly assigned. + +### Phase 3: Decommissioning the Old Workflow + +Since we are not migrating existing repositories, the decommissioning process is straightforward. + +1. **Disable the Lambda Function:** The Lambda trigger can be disabled in AWS. +2. **Archive Old Repositories:** The `template-automation-lambda` and `template-repos-lambda-deployment` Git repositories should be archived. +3. **Delete AWS Resources:** The old AWS resources (Lambda, ECR, IAM roles) can be deleted. + +## 4. Rollback Plan + +A rollback is not applicable in the traditional sense. If the new workflow fails, the state can be destroyed (`terraform destroy`), the module can be fixed, and the process can be re-run. The old Lambda-based system can be kept available for emergency use. diff --git a/docs/tf-native-v5.md b/docs/tf-native-v5.md new file mode 100644 index 00000000..541d50f8 --- /dev/null +++ b/docs/tf-native-v5.md @@ -0,0 +1,72 @@ +# Plan for Migrating to a Terraform-Native EKS Deployment Workflow (v5) + +This document outlines the plan to replace the current Lambda/Ansible-based system with a streamlined, Terraform-native workflow by enhancing the `terraform-eks-deployment` module. + +## 1. Analysis of the Current State + +The current process for provisioning a new EKS cluster repository involves multiple components: + +1. **`template-automation-lambda`**: A Python Lambda function that creates a new GitHub repository from a template. +2. **`generate_hcl_files.yml`**: An Ansible playbook inside the new repository that is run manually to generate a set of Terragrunt HCL files. +3. **`terraform-eks-deployment`**: The Terraform module that is referenced by the generated Terragrunt configuration to deploy the actual EKS cluster. + +This workflow is complex and involves manual steps. + +## 2. Proposed Solution: A Unified Repository Bootstrap Module + +We will consolidate the repository creation and configuration logic into the `terraform-eks-deployment` module. Its new, single purpose will be to bootstrap a fully configured GitHub repository for an EKS cluster. This eliminates the need for the Lambda function and the Ansible playbook. + +The new, unified process will be: +1. A developer defines a new EKS cluster repository by adding a single `module "eks_deployment"` block to a central Terraform configuration. +2. Running `terraform apply` will automatically: + a. Create a new GitHub repository using the `terraform-github-repo` module as a submodule. + b. Generate and commit all the necessary Terragrunt HCL files and a `README.md`. + c. Configure team permissions for the repository. + +The module will no longer be dual-purpose; it will *always* create a repository. The actual EKS deployment will be handled by the Terragrunt configuration within that new repository, which may in turn reference other modules. + +### Core Component: The Enhanced `terraform-eks-deployment` Module + +| Action (Old Workflow) | Terraform Equivalent (Inside `terraform-eks-deployment`) | +| :--- | :--- | +| 1. Create a new repository from a template. | A submodule block calling `terraform-github-repo` will create the repository. | +| 2. Generate HCL files from `config.json`. | The module will contain a `templates` directory with HCL templates (`.tf.tpl`). It will use `templatefile()` to render the final HCL content from its input variables. | +| 3. Write files to the repository. | The rendered file content will be passed to the `files` input of the `terraform-github-repo` submodule. | +| 4. Assign team permissions. | The module will accept a `teams` variable and pass it to the `terraform-github-repo` submodule. | + +## 3. Detailed Migration Plan + +### Phase 1: Enhance the `terraform-eks-deployment` Module + +1. **Define Input Variables:** In `variables.tf`, ensure all necessary variables are present. These are derived from the `generate_hcl_files.yml` playbook's `config` object (e.g., `repository_name`, `repository_teams`, `environment`, `region`, `cluster_name`, `account_config`, `vpc_config`, etc.). + +2. **Create HCL Templates:** Create a `templates` directory within the module. For each file generated by the Ansible playbook (`root.hcl`, `account.hcl`, `region.hcl`, `vpc.hcl`, `cluster.hcl`, and `README.md`), create a corresponding `.tf.tpl` template file. + +3. **Implement Module Logic (`main.tf`):** + * Use `locals` to render the file content for each template using the `templatefile()` function. + * Call the `terraform-github-repo` module unconditionally. + * Pass the repository name, template configuration, team permissions, and the rendered file content to the submodule. + +### Phase 2: Implementation and Onboarding + +1. **Integrate into a Management Repository:** The enhanced `terraform-eks-deployment` module will be consumed from a designated infrastructure management repository (e.g., `terragrunt-environments`). + +2. **Onboard a Pilot Project:** In the management repository, add a module block that calls the enhanced `terraform-eks-deployment` module to provision a repository for a new test cluster. + +3. **Execute and Validate:** Run `terraform apply` to create the repository. Verify that: + * The repository is created on GitHub. + * It is correctly initialized from the `template-eks-cluster` template. + * All the Terragrunt HCL files and the `README.md` are present and correctly populated. + * Team permissions are correctly assigned. + +### Phase 3: Decommissioning the Old Workflow + +Since we are not migrating existing repositories, the decommissioning process is straightforward. + +1. **Disable the Lambda Function:** The Lambda trigger can be disabled in AWS. +2. **Archive Old Repositories:** The `template-automation-lambda` and `template-repos-lambda-deployment` Git repositories should be archived. +3. **Delete AWS Resources:** The old AWS resources (Lambda, ECR, IAM roles) can be deleted. + +## 4. Rollback Plan + +A rollback is not applicable in the traditional sense. If the new workflow fails, the state can be destroyed (`terraform destroy`), the module can be fixed, and the process can be re-run. The old Lambda-based system can be kept available for emergency use. diff --git a/docs/tf-native.md b/docs/tf-native.md new file mode 100644 index 00000000..a5e5a9cf --- /dev/null +++ b/docs/tf-native.md @@ -0,0 +1,72 @@ +# Plan for Migrating to a Terraform-Native GitHub Repository Management Workflow + +This document outlines the plan to transition from the current Lambda/Ansible-based repository management system to a purely Terraform-native approach, leveraging the `terraform-github-repo` module. + +## 1. Current State Analysis + +Our current workflow for managing GitHub repositories relies on a Lambda function that executes Ansible playbooks. This setup has the following key characteristics: + +* **Technology Stack:** AWS Lambda, Python, Ansible. +* **Process:** A Lambda function is triggered, which in turn runs an Ansible playbook to configure GitHub repositories. +* **Drawbacks:** + * **Complexity:** Involves multiple technologies (Lambda, Ansible, Python) which increases the maintenance overhead. + * **State Management:** Managing state across these different systems can be challenging. + * **Less Declarative:** While Ansible is declarative, the overall workflow is more imperative compared to a pure Terraform solution. + +## 2. Proposed Solution: Terraform-Native Workflow + +We will replace the existing Lambda/Ansible setup with a new workflow centered around the `terraform-github-repo` Terraform module. This module provides a comprehensive set of resources for managing GitHub repositories declaratively. + +* **Technology Stack:** Terraform. +* **Process:** A new Terraform configuration will be created that uses the `terraform-github-repo` module to define the desired state of our GitHub repositories. +* **Benefits:** + * **Simplicity:** A single technology (Terraform) will be used for infrastructure and repository management. + * **Declarative:** The entire configuration will be declarative, making it easier to understand and manage. + * **State Management:** Terraform's state management will provide a single source of truth for the state of our repositories. + * **Reusability:** The `terraform-github-repo` module is a reusable component that can be used across multiple projects. + +## 3. Migration Plan + +The migration will be performed in the following phases: + +### Phase 1: Scoping and Setup + +1. **Identify Ansible Playbook Functionality:** Analyze the existing Ansible playbooks to identify all the repository configuration tasks they perform. This includes: + * Creating repositories. + * Managing collaborators and permissions. + * Configuring branch protection rules. + * Managing repository files (e.g., `CODEOWNERS`, license files). + * Setting up webhooks and deploy keys. + +2. **Map to Terraform Resources:** For each Ansible task, identify the corresponding resource in the `terraform-github-repo` module. + +3. **Setup New Terraform Project:** Create a new Git repository for the Terraform configuration that will manage the GitHub repositories. This repository will contain the new Terraform code. + +### Phase 2: Implementation + +1. **Develop Terraform Configuration:** Write the Terraform code that uses the `terraform-github-repo` module to replicate the functionality of the Ansible playbooks. The configuration should be modular and easily extensible. + +2. **Import Existing Resources:** Use `terraform import` to bring the existing GitHub repositories and their configurations under the management of the new Terraform configuration. This is a critical step to ensure a seamless transition without disrupting existing repositories. + +### Phase 3: Testing and Validation + +1. **Dry Run:** Perform a `terraform plan` to verify that the new configuration matches the existing state of the repositories. + +2. **Targeted Application:** Apply the new configuration to a non-critical repository first to validate the process. + +3. **Full Application:** Once the process is validated, apply the configuration to all repositories. + +### Phase 4: Decommissioning + +1. **Disable Lambda:** Disable the existing Lambda function to prevent it from making any further changes to the repositories. + +2. **Monitor:** Monitor the repositories for any unexpected changes or issues. + +3. **Remove Old Infrastructure:** Once the new system is stable, decommission the Lambda function and the associated Ansible playbooks. + +## 4. Rollback Plan + +In case of any issues, we can roll back to the previous system by: + +1. **Re-enabling the Lambda function.** +2. **Removing the new Terraform configuration from the state file using `terraform state rm`.** diff --git a/events/cloudformation-create-event.json b/events/cloudformation-create-event.json new file mode 100644 index 00000000..6c4299b4 --- /dev/null +++ b/events/cloudformation-create-event.json @@ -0,0 +1,17 @@ +{ + "RequestType": "Create", + "ServiceToken": "arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation", + "ResponseURL": "https://cloudformation-custom-resource-response-usgov-west-1.s3-us-gov-west-1.amazonaws.com/...", + "StackId": "arn:aws-us-gov:cloudformation:us-gov-west-1:229685449397:stack/test-repo-stack/12345678-1234-1234-1234-123456789012", + "RequestId": "unique-request-id-12345", + "LogicalResourceId": "MyTestRepository", + "ResourceType": "Custom::RepositoryCreator", + "ResourceProperties": { + "ServiceToken": "arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation", + "ProjectName": "example-test-repository", + "OwningTeam": "platform-team", + "Environment": "development", + "AwsRegion": "us-gov-west-1", + "AdditionalMetadata": "custom-value" + } +} diff --git a/events/service-catalog-event.json b/events/service-catalog-event.json new file mode 100644 index 00000000..31c84566 --- /dev/null +++ b/events/service-catalog-event.json @@ -0,0 +1,57 @@ +{ + "version": "0", + "id": "12345678-1234-1234-1234-123456789012", + "detail-type": "Service Catalog Product Provisioning", + "source": "aws.servicecatalog", + "account": "123456789012", + "time": "2024-01-01T12:00:00Z", + "region": "us-east-1", + "resources": [ + "arn:aws:catalog:us-east-1:123456789012:portfolio/port-abcdefghijk" + ], + "detail": { + "eventName": "ProvisionProduct", + "requestId": "12345678-1234-1234-1234-123456789012", + "provisionedProductId": "pp-abcdefghijklm", + "provisionedProductName": "example-template-repos-cluster", + "productId": "prod-abcdefghijk", + "provisioningArtifactId": "pa-abcdefghijk", + "recordId": "rec-abcdefghijk", + "status": "SUCCEEDED", + "outputs": [ + { + "OutputKey": "RepositoryName", + "OutputValue": "example-template-repos-cluster" + }, + { + "OutputKey": "ClusterName", + "OutputValue": "example-cluster-dev" + } + ], + "provisioningParameters": { + "project_name": "example-template-repos-cluster", + "owning_team": "platform-team", + "account_name": "dev-account", + "aws_region": "us-gov-west-1", + "cluster_mailing_list": "eks-admins@example.com", + "cluster_name": "example-cluster-dev", + "eks_instance_disk_size": "100", + "eks_ng_desired_size": "2", + "eks_ng_max_size": "10", + "eks_ng_min_size": "2", + "environment": "development", + "environment_abbr": "dev", + "finops_project_name": "example_project", + "finops_project_number": "fp00000001", + "finops_project_role": "example_project_app", + "organization": "example:dept:team", + "vpc_domain_name": "dev.example.com", + "vpc_name": "vpc-dev", + "tags": { + "managed_by": "terraform", + "owner": "platform-team", + "slim:schedule": "8:00-17:00" + } + } + } +} diff --git a/events/test-event.json b/events/test-event.json new file mode 100644 index 00000000..99a078d4 --- /dev/null +++ b/events/test-event.json @@ -0,0 +1,47 @@ +{ + "version": "0", + "id": "12345678-1234-1234-1234-123456789012", + "detail-type": "Service Catalog Product Provisioning", + "source": "aws.servicecatalog", + "account": "123456789012", + "time": "2024-01-01T12:00:00Z", + "region": "us-east-1", + "resources": [ + "arn:aws:catalog:us-east-1:123456789012:portfolio/port-abcdefghijk" + ], + "detail": { + "eventName": "ProvisionProduct", + "requestId": "12345678-1234-1234-1234-123456789012", + "provisionedProductId": "pp-abcdefghijklm", + "provisionedProductName": "example-template-repos-cluster", + "productId": "prod-abcdefghijk", + "provisioningArtifactId": "pa-abcdefghijk", + "recordId": "rec-abcdefghijk", + "status": "SUCCEEDED", + "provisioningParameters": { + "project_name": "example-template-repos-cluster", + "owning_team": "platform-team", + "account_name": "dev-account", + "aws_region": "us-gov-west-1", + "cluster_mailing_list": "eks-admins@example.com", + "cluster_name": "example-cluster-dev", + "eks_instance_disk_size": "100", + "eks_ng_desired_size": "2", + "eks_ng_max_size": "10", + "eks_ng_min_size": "2", + "environment": "development", + "environment_abbr": "dev", + "finops_project_name": "example_project", + "finops_project_number": "fp00000001", + "finops_project_role": "example_project_app", + "organization": "example:dept:team", + "vpc_domain_name": "dev.example.com", + "vpc_name": "vpc-dev", + "tags": { + "managed_by": "terraform", + "owner": "platform-team", + "slim:schedule": "8:00-17:00" + } + } + } +} \ No newline at end of file diff --git a/github/workflows/integration-tests.yml b/github/workflows/integration-tests.yml new file mode 100644 index 00000000..3d05b0d9 --- /dev/null +++ b/github/workflows/integration-tests.yml @@ -0,0 +1,35 @@ +name: Integration Tests + +on: + pull_request: + branches: [ main ] + workflow_dispatch: + +env: + GITHUB_API_URL: https://api.github.com + GITHUB_ORG: ${{ vars.TEST_GITHUB_ORG }} + GITHUB_TEST_TEAM: ${{ vars.TEST_GITHUB_TEAM }} + +jobs: + integration-tests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements.txt + pip install pytest pytest-cov + + - name: Run integration tests + env: + GITHUB_TOKEN: ${{ secrets.TEST_GITHUB_TOKEN }} + run: | + pytest tests/integration -v -m integration --cov=template_automation diff --git a/lambda-template-repo-generator.code-workspace b/lambda-template-repo-generator.code-workspace new file mode 100644 index 00000000..e76798ce --- /dev/null +++ b/lambda-template-repo-generator.code-workspace @@ -0,0 +1,17 @@ +{ + "folders": [ + { + "path": "../lambda-template-repo-generator" + }, + { + "path": "../packer-pipeline" + }, + { + "path": "../template-automation-lambda" + }, + { + "path": "../terraform-aws-template-automation" + } + ], + "settings": {} +} \ No newline at end of file diff --git a/main.tf b/main.tf new file mode 100644 index 00000000..3512139d --- /dev/null +++ b/main.tf @@ -0,0 +1,57 @@ +# +# This Terraform configuration creates only the ECR repository for the container image. +# The Lambda function, API Gateway, IAM roles, and other infrastructure are managed by +# the terraform-aws-template-automation module. + +provider "aws" { + region = var.aws_region +} + +data "aws_caller_identity" "current" {} + +resource "aws_ecrpublic_repository" "ecr_repo" { + repository_name = var.repository_name + + catalog_data { + about_text = var.catalog_data.about_text + architectures = var.catalog_data.architectures + description = var.catalog_data.description + operating_systems = var.catalog_data.operating_systems + usage_text = var.catalog_data.usage_text + } + + tags = var.tags +} + +locals { + repository_uri = aws_ecrpublic_repository.ecr_repo.repository_uri + repository_id = aws_ecrpublic_repository.ecr_repo.id + aws_account_id = data.aws_caller_identity.current.account_id + region = var.aws_region + arn = aws_ecrpublic_repository.ecr_repo.arn +} + +output "repository_uri" { + description = "The URI of the ECR repository" + value = local.repository_uri +} + +output "repository_id" { + description = "The ID of the ECR repository" + value = local.repository_id +} + +output "aws_account_id" { + description = "The ID of the AWS account" + value = local.aws_account_id +} + +output "region" { + description = "The AWS region where resources are created" + value = local.region +} + +output "arn" { + description = "The ARN of the ECR repository" + value = local.arn +} \ No newline at end of file diff --git a/packer.pkr.hcl b/packer.pkr.hcl new file mode 100644 index 00000000..96dbf3c9 --- /dev/null +++ b/packer.pkr.hcl @@ -0,0 +1,114 @@ +# Packer template for AWS Service Catalog Repository Generator Lambda +# +# This builds a Lambda container image that processes AWS Service Catalog provisioning +# events to automatically create and configure GitHub/GitLab repositories from templates. +# +# The Lambda function is triggered by EventBridge when Service Catalog provisions a product, +# and it creates a new repository with configuration based on the provisioning parameters. + +packer { + required_plugins { + docker = { + source = "github.com/hashicorp/docker" + version = "~> 1" + } + amazon = { + source = "github.com/hashicorp/amazon" + version = "~> 1" + } + } +} + +variable "repository_uri" { + type = string + default = "" + description = "ECR repository URI for the Service Catalog Lambda Docker image" +} + +variable "tag" { + type = string + default = "latest" + description = "Docker image tag (typically a version number)" +} + +variable "terraform_version" { + type = string + default = "1.5.7" + description = "Terraform version (not used for Lambda builds, but required by packer-pipeline)" +} + +variable "base_image" { + type = string + default = "229685449397.dkr.ecr.us-gov-west-1.amazonaws.com/service-catalog-repo-generator/lambda-python:3.11" + description = "Base AWS Lambda Python image for building the container (using cloned private ECR image)" +} + +variable "ecr_login_username" { + type = string + description = "ECR login username for post-processor" + default = "" +} + +variable "ecr_login_password" { + type = string + description = "ECR login password for post-processor" + default = "" + sensitive = true +} + +variable "ecr_login_server" { + type = string + description = "ECR login server URL for post-processor" + default = "" +} + +source "docker" "lambda" { + image = var.base_image + commit = true + changes = [ + "WORKDIR /var/task", + "CMD [ \"template_automation.app.lambda_handler\" ]" + ] +} + +build { + name = "service-catalog-repo-generator" + + sources = [ + "source.docker.lambda" + ] + + # Copy pip config files first + provisioner "file" { + source = "pip.conf" + destination = "/etc/pip.conf" + } + + provisioner "file" { + source = "pip-cert.pem" + destination = "/etc/pip-cert.pem" + } + + # Copy application files + provisioner "file" { + source = "." + destination = "/var/task" + } + + # Install Python dependencies for Service Catalog event handling + provisioner "shell" { + inline = [ + "cd /var/task", + "echo 'Installing dependencies for Service Catalog Lambda function...'", + "pip install -r requirements.txt", + "echo 'Lambda function built for AWS Service Catalog integration'" + ] + } + + post-processors { + post-processor "docker-tag" { + repository = var.repository_uri + tags = [var.tag] + } + } +} diff --git a/pip-cert.pem b/pip-cert.pem new file mode 100644 index 00000000..8a05f0b4 --- /dev/null +++ b/pip-cert.pem @@ -0,0 +1,323 @@ +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQGVCQdFyalIVHZ1OchWiMYDANBgkqhkiG9w0BAQwFADBs +MRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3VzMQww +CgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3VzIEJ1 +cmVhdSBSb290IENBMB4XDTE5MDcyNTE4MTAyOVoXDTI5MDcyNTE4MjAyN1owbDET +MBEGCgmSJomT8ixkARkWA0dvdjEWMBQGCgmSJomT8ixkARkWBkNlbnN1czEMMAoG +A1UECxMDVENPMQwwCgYDVQQLEwNQS0kxITAfBgNVBAMTGFVTIENlbnN1cyBCdXJl +YXUgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMWX8I9p +slFaUueuPpEFExgqKcGgoyTOBxFUCXNBnucL3cKRx9MC47kWOwQ94WYvI3LMcehC +6pOwIf5AuhrIdVrJaHSz317ENuDaiur9/qN3fBRidijHphynR/rwJSxiI3VQtj8G +SO4JmCA8dMsKayIl1RiKlQHPoNnSWyDEspAfenr0qq7PzbjKOEPXoO4eXO0plfB3 +aYd+qMRwHKQre4gRGpMfWu1w5JZqFItbXE/RSC38SoZWjkcMcjyTCDTSGY+j/aJw +SHx98riQ8SLQszL5Be0AmF0KHwMZNOsoaa5u/bF++g207W9guLVgO2Ak5D4Unyo3 +D7kcFSuBOVYdeT0XRi3iD0AwEkoCsVzeEOIqjAasj6hYD43O8GjfHpwGpAeASqTT +nbDajtuTsJrrBlLwpz49J5dihJ3Ah7jTirzQciEUZTXv3L7XpdBlt3/sv73Gn0F6 +jZPDANmHIfNHz0xWa9iES9sLPKln9cjnkJs/QlpooTJSrVuovGyzsbu1mb7PfBji +IMF8lVptjQYaWvvMXqXNx2+L6+uBVkEfmuZIs7Xen4ZNz4NP5MixTs3Tq2h81Hym +TbIlJUtSdwZ98jsX6YLerBYYMPawtSIH4Yfdq/Wpt7IHED47dTWdFfC0peqYfHIN +PoRG+eFYq5nHxadkGaifElPnNdvGblRLDj27AgMBAAGjUTBPMAsGA1UdDwQEAwIB +hjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTHXiB3QZv2GiBSkErqGoOT8cOr +HjAQBgkrBgEEAYI3FQEEAwIBADANBgkqhkiG9w0BAQwFAAOCAgEAdXsv6igAKGnq +VS79nePbjGj2Z+SFdM2jRVibv06mWR3uVqFNCz2zqlIXzX7PJmK7HycWDK82UWMh +8J0cn1O+PYWFalzhPWk7t1c6EK8wV63/iKj+voqNwZWL7L1/EQiQ8B4OPIyf7v5Y +j3/jqrvufLgGCyz+0JhBY8CBEGZ1knijrHxTv0DOV0ykKI0OpUIes+8SOTdszTDb +XujzE4ekSRTDqWJOCbsQb3KbBUr/k8APVq/Ir/xmS1WmauyP3zBIxMlPMmu9XTw/ +5nRUKKQe8FrVHELLO32iS+6bqdTNmkD7z/VyzWmBA0FVt8upD6Bs8U/bHjoiL/Jk +W3BQ6owq7u+B5w/Cl+WsgQcgVlDLlBZWMKnEng1n2MhqUnzf0dDGA99vrzLPVcPT +yoexQe1E1Y2EoORgaGbsnjkRTwppUnpnxkWrzObBieYB1ir0rRTbKS5hgwXu55Uc +6ypmCLUnQaDVWIZyKKwtmr4n/rX5KJPxj/zT0F+jH1WDyMDVg6jYyu1HIPcABkAU +OlsSr7Tfct75/JGf18oPSFMkV1kzeLUK21vflcMp+ZK0m2TRZyCLvMB/lEsRjsSM +wrgYk7cR14RqJ+RTA7IJqFQfNAXqV1ra+stZYYoLI83oK4shOhHLiO9lR6hSi43f +0w7ALm+8qd1Ih+E5BjmKBJAEFB5Zyzs= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIF1jCCA76gAwIBAgITLgAAAAmcP+bslIv04AAAAAAACTANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTE5MDgwNjE1MDc0NVoXDTI0MDgwNjE1MTc0NVow +YTETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czES +MBAGCgmSJomT8ixkARkWAmFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1IENB +IDEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCiUqJa4e90dNdAFC0W +ju9arRst3FchtNxT0ZPdg/2UpDFN35PFBQ4G1RJxGVGuhpkRmqLdtI9t9BQHZ/tk +QZ6ELJRJVxQMPONBuoXlUbnS3CHwDT5+YIvVZr3jHjv96tq6C2SYJ1BNeqDYjhdK +gF3WXUJpb6lbAwZtv7aHZUSVXcnW/hCkfI2aRZoGXCcgi6hbcJRC74HCGW0eLtCZ +M0Y5+lEGdKLAOiIsl4kea+34Uh5eHjIp9LHCicIfx+5RT5xor4hOJldu2pOmjzrg +FBCz59/5wZHIyQCHOu92p/VGO9eeCxCDlT8DWa78c2HjCnf0FvymlxoHPdH89Rhv +idPFAgMBAAGjggF6MIIBdjAQBgkrBgEEAYI3FQEEAwIBATAjBgkrBgEEAYI3FQIE +FgQUNDptGIuzWncMER7QFKnL+JZPMwswHQYDVR0OBBYEFMSLwaPcjo2CqYcxhzj8 +U1q1Px/KMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAS +BgNVHRMBAf8ECDAGAQH/AgEBMB8GA1UdIwQYMBaAFMdeIHdBm/YaIFKQSuoag5Px +w6seMFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9wa2kudGNvLmNlbnN1cy5nb3Yv +Q2VydEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUyMFJvb3QlMjBDQS5jcmww +ZQYIKwYBBQUHAQEEWTBXMFUGCCsGAQUFBzAChklodHRwOi8vcGtpLnRjby5jZW5z +dXMuZ292L0NlcnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJlYXUlMjBSb290JTIw +Q0EuY3J0MA0GCSqGSIb3DQEBDAUAA4ICAQAvLJiXBncvqEq2WjU4CtvB+g9GKgna +MIeu8D41/BdkhTpLR/Cus6Oq+N18cCyyBHNCPS4pz/cDzyzQvNMIDTP7tpcTwEfc +QW/WgPvfJtEmzOaRtNeSBBci1bySX4OMKnzB9ZQbGphaqYaVAG6n+NLCkg1MSvqK +cexAf8wkAJyjx2YOUh+xqwhXRE6UKlc9TVK0b2anVtg4FLNiUznZ6KerEKXx/wxv +XvOZRAY902P2FIRY9qbkEdAshNSA5HlY27pbdH4eZCTyk5uSTlIZQRtngL6w1Gy8 +Xh70AIv+kj38iKp8N4VgksHWS0Viw3Cg4h+3/hY08E/uLCzUKjdZt9I46bM1YKMv +K2LUA8xrWp0IN+wcdp2UUrAlVSHEp6LW+NR+VHtl0QiMYjXA+AvkoRvcoEotgeZP +mqfK9auR+3WiDUrkVLzPoPMQHWE9QXt+eErzBh+YXqqvPgPBGqA25CGwzyrs8iBT +jlhbJArFNO6KzQUwyf/Vw3dwX5oOebGuoh+KX9yRaN+q1ZqqWL1Jn40NXF8KQyLk +Ro4c9m+fpkTWhuxW6zW8YIbnmtNDk2X3YfAY1dIKAUIW24Si0SMka8pC2d9qaL2m +fyD0JoF+49cPDtTNHsUP5QR3a+JjqAT8haladoSyiNmO24ysueI7sg9A+zY8oJrM +Gi2tB39Jg7J6/w== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIF1jCCA76gAwIBAgITLgAAAApfi2u0+zjcuQAAAAAACjANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTE5MDgwNjE1MDc0M1oXDTI0MDgwNjE1MTc0M1ow +YTETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czES +MBAGCgmSJomT8ixkARkWAmFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1IENB +IDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDFLt4b/8hnKu0yk7IC +C0qY8gAF20DZrbE6rILe2quYeSQcztIw3H6K2+uAsvpCRjRc4+ra+bKQWLpTv5gP +6l6iDMlun3po1+Qqlga4S4/kJMoYP52AbcdHog33vdvpmtRhL2WLBdHfXLfahVx3 +OB1WkrZMFP4T3L4mTo8SW4abdIf5Q7SmClrHzy+znv4jhKEU9tiY7NXJBCINETx3 +5B8PE8F0r1s0Mv+yhoDHWk2Poa/rC+CrXZ+NdzWfI2ajUc1Nb2b+6f4Wrpc9qC+a +kxYywDcrUoGnwqJYDoIFZY2ErqTQUw7JGQkG/i+7gYs+VaHPcD3DNQq3iFzab26I +0vG5AgMBAAGjggF6MIIBdjAQBgkrBgEEAYI3FQEEAwIBATAjBgkrBgEEAYI3FQIE +FgQU6ZLQoy5LJaVqTI5Em9TBptKdLmAwHQYDVR0OBBYEFOpnUT2Oc868n6qxmUrj +FdfUn3tOMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAS +BgNVHRMBAf8ECDAGAQH/AgEBMB8GA1UdIwQYMBaAFMdeIHdBm/YaIFKQSuoag5Px +w6seMFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9wa2kudGNvLmNlbnN1cy5nb3Yv +Q2VydEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUyMFJvb3QlMjBDQS5jcmww +ZQYIKwYBBQUHAQEEWTBXMFUGCCsGAQUFBzAChklodHRwOi8vcGtpLnRjby5jZW5z +dXMuZ292L0NlcnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJlYXUlMjBSb290JTIw +Q0EuY3J0MA0GCSqGSIb3DQEBDAUAA4ICAQCYQm6VusLYzHy9PM0P4dSkHSUVGug+ +8Q/Gn1qQ+pejTpx0fR+pxq8DP8Ua3qgWsIz3scrONairxWVUW5AA4E0VXU0fO6n+ +4DLdJnwwIEIkV410p5w79l9Dl2NiI31Ijv0Y8PwEzXmcSvcz1Qc05TyRV+1yv6Uh +nHfnu4kHXj26NOOsPjrEJ60l0tcOT4p3edkwYRf3XzQ19k4ITEBeYF76y1FX8H+W +RTIjQNr8BXUVt+afJZXgUgSB0xHfSRBhTUXiFvKbs1BpICNQmhbFIaz7GJZkvx9r +b+7Um2EQNIQKxoe4rG4mar62Ux3k0i9o8O9nccQSl9VCuSvTyCmtpKpsKRRitMf2 +vBQ9D14p5pzDdFZQC75B8lkibXpuk8fQ3/CIMqK4547wIO8tgz4wqN8ID4tEBgqZ +Fot9XSJpDAZHYKx5GWVwKmhqwefACqqASjHR8NVakAd3EkcQ06SEzGYTTq2duWhi +fOxpJKtMtw9JTfbOG9Az28rRWGCk1vVHmtkVHApD3XdAV3RG6w/AqjNu/IY70fmd +wULhegJxbVdQucgwR4WyNbx7hCJYvoEyL5L7ZQwBpFXHnOI7wJFGw2eo5xIUehUS +4jPpb2OolWHEOjMkEkRfgfrJsnt/blpKXRmYRFUd1+c5VBOtsaYv3iYArxZziQxf +pR508zEDCd9cRQ== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFsjCCA5qgAwIBAgITLgAAAAvaREPe3QGJiAAAAAAACzANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTE5MDgwNjE1MDc0MVoXDTI0MDgwNjE1MTc0MVow +YjETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czET +MBEGCgmSJomT8ixkARkWA2VhZDEeMBwGA1UEAxMVVVMgQ2Vuc3VzIEJ1cmVhdSBD +QSAzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxA+7bWM9ZExFO/ZN +uFodd+ktg0TWojeV8QJTYAdtwzMquqDl/zMLgkHPD8xC730qMdKB6Df74i3moN5c +6h9S087T0tdf02U0J95AfO06oZiaGNzq/zacINhfbxWf2ZAyZCiwpcQL3w3uAjS1 +MK++iC8ZWDBnd5z64ewCDFS8d9FD5RrJ0GxGCcC4IJ8DyhOq7i3a/Td29wLTP1wz +QuFLVD/5JFWirqnJwgqVVEUdzf8ZK3MSk9DAZcIjY/mIZgnnZ+ukcD0TtYkOnPU7 +j7EGeqo6Jby3T75p4x3uRlNaEKAqXBqiu7bVx+T0cTtuJEjtw4l/8WEGEFGI6Jfs +0Du9+QIDAQABo4IBVTCCAVEwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFCB4 +OetP7QLwgNqbXIDospFC1inEMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsG +A1UdDwQEAwIBhjASBgNVHRMBAf8ECDAGAQH/AgEBMB8GA1UdIwQYMBaAFMdeIHdB +m/YaIFKQSuoag5Pxw6seMFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9wa2kudGNv +LmNlbnN1cy5nb3YvQ2VydEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUyMFJv +b3QlMjBDQS5jcmwwZQYIKwYBBQUHAQEEWTBXMFUGCCsGAQUFBzAChklodHRwOi8v +cGtpLnRjby5jZW5zdXMuZ292L0NlcnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJl +YXUlMjBSb290JTIwQ0EuY3J0MA0GCSqGSIb3DQEBDAUAA4ICAQCGmm3uxuTvZcWm +ihlWtSa/0H88MM3ubcOAqYmNHWCzynemR9CxUZfuR/qi8HvRKHm5HwDVT1LtL3Wf +K+9Lc7mcBHStZUdNgINVsqZzNi1L54v/UD3lAu79M/yh16DREvEnWLlc1CUhti+Q +P6aooRfF1VIAzoNZz3iUBj43uRJLewYhlFYRy8GFzRhoKJ/HNZI9nqlV7notKtvV +P2Ae++stlTGzrUEYi91tgJdoSOKweDg4EDjEr4y51yY2l8eJJTXtRRIMDdtv1wbF +XVpxcbWDvAFmYKFjpspaEiD3gAEdSDGcCv23KGFxZCMw5Chblg2drWCSCbJQ2VE/ +XiHcHGxrTQVru+ocZgEqH600BDAC+/nrVP1lJyfKsY2KUh9X/vzbAbx7r45l7LJh +Q173miuG1Hjm60OEtUsNobtVOG/TCxqHflRuMgVK5mGb00Hu5SxMel/ma5bhvWCS +ZQIYEIwo2b6GBicTuhHhBo0e4BdA3vvz8WroUTiezmMo8BveyYViqyWFCB26Wvhy +NB4pfg+GFfTl0wiHSpc1RfBFuoohkGgUMt0ci0jJp1ofb6MeK+p3DqBfKyhQiz+7 +EsgudLUeALpj38b5mWjvN17YBby5suRJnH8lv7+Z1nooo+MqapZZyrRu56PtEBJM +3m7NDAL9JACMk8yF5WDToKtcPuTgpg== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIF0zCCA7ugAwIBAgITLgAAAA+ydH8TcbjZAgAAAAAADzANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTIyMDkyMjE0NDQwOFoXDTI3MDkyMjE0NTQwOFow +YTETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czES +MBAGCgmSJomT8ixkARkWAmFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1IENB +IDEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCiUqJa4e90dNdAFC0W +ju9arRst3FchtNxT0ZPdg/2UpDFN35PFBQ4G1RJxGVGuhpkRmqLdtI9t9BQHZ/tk +QZ6ELJRJVxQMPONBuoXlUbnS3CHwDT5+YIvVZr3jHjv96tq6C2SYJ1BNeqDYjhdK +gF3WXUJpb6lbAwZtv7aHZUSVXcnW/hCkfI2aRZoGXCcgi6hbcJRC74HCGW0eLtCZ +M0Y5+lEGdKLAOiIsl4kea+34Uh5eHjIp9LHCicIfx+5RT5xor4hOJldu2pOmjzrg +FBCz59/5wZHIyQCHOu92p/VGO9eeCxCDlT8DWa78c2HjCnf0FvymlxoHPdH89Rhv +idPFAgMBAAGjggF3MIIBczAQBgkrBgEEAYI3FQEEAwIBAjAjBgkrBgEEAYI3FQIE +FgQUFE9/OhOsohsjHyLcCd1NqTNkdQYwHQYDVR0OBBYEFMSLwaPcjo2CqYcxhzj8 +U1q1Px/KMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP +BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFMdeIHdBm/YaIFKQSuoag5Pxw6se +MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9wa2kudGNvLmNlbnN1cy5nb3YvQ2Vy +dEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUyMFJvb3QlMjBDQS5jcmwwZQYI +KwYBBQUHAQEEWTBXMFUGCCsGAQUFBzAChklodHRwOi8vcGtpLnRjby5jZW5zdXMu +Z292L0NlcnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJlYXUlMjBSb290JTIwQ0Eu +Y3J0MA0GCSqGSIb3DQEBDAUAA4ICAQCdYsU2TVWTAzVjqPqlO+PtxTcoDxBjlvo+ +L519/iTxzlcz0Kiao83fGhsSitzNf0LsSTOWrAuCprX0sn5If4pasZKqVp+ZJnjF +H9Wpi/4gsaCtvY3V4Hm5ZS1BffUHrre/kR//pn9f2Axu3tTVfHNAEVr0kRvq9wPD +yMe5BzLtm9amOwFvAYP/69zXk4ig88mbOmXjK+EC5AUzwBhg9oI/Kv2AeLbKx+nr +DuguMe6RCp4NXBS1X3/cjRN37+ayJEHynFdWKiVNcvxABVFLGVHBA4fMD9kTjT2a +cf413mhywUcVTfpoj/94Kcqvl3oxgHWGIig9RWExMkvmrkYT5hGqfws+NIGrCGaZ +GA0cUYAY5cbkAg8If3Htt4aSCdTu6g/RbatMFND2GURO2fHPajBILBiDxCJM6OmT +SUQPghQC3QvE48CM5J6KAjPosGh8Ay454FhKv0ShvhKTaHzN6anBih8AbwU5G8iP +XeoNY+jZbkv1gBJ4J+8nffm1n5aFbssbxazppqTLpFDXimduWUxSXZbjwGGwHc7G +FmLj14c8og+ItE+meToVXt6oFSF9hkri5Lmanen9SqU9IPgxiTv91olwmXW6d/3Y +D202odbWVpAIIjiVJngfyOulCeEQsz5WjmPyIjFkXNz8NiwAJSJu1XtBtAMdaCDe +6z6OUG7UaQ== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIF0zCCA7ugAwIBAgITLgAAABDGRuhzKgVoqQAAAAAAEDANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTIyMDkyMjE0NDUxN1oXDTI3MDkyMjE0NTUxN1ow +YTETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czES +MBAGCgmSJomT8ixkARkWAmFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1IENB +IDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDFLt4b/8hnKu0yk7IC +C0qY8gAF20DZrbE6rILe2quYeSQcztIw3H6K2+uAsvpCRjRc4+ra+bKQWLpTv5gP +6l6iDMlun3po1+Qqlga4S4/kJMoYP52AbcdHog33vdvpmtRhL2WLBdHfXLfahVx3 +OB1WkrZMFP4T3L4mTo8SW4abdIf5Q7SmClrHzy+znv4jhKEU9tiY7NXJBCINETx3 +5B8PE8F0r1s0Mv+yhoDHWk2Poa/rC+CrXZ+NdzWfI2ajUc1Nb2b+6f4Wrpc9qC+a +kxYywDcrUoGnwqJYDoIFZY2ErqTQUw7JGQkG/i+7gYs+VaHPcD3DNQq3iFzab26I +0vG5AgMBAAGjggF3MIIBczAQBgkrBgEEAYI3FQEEAwIBAjAjBgkrBgEEAYI3FQIE +FgQUxgMHEbdrxtDC64yaqubXVeW060owHQYDVR0OBBYEFOpnUT2Oc868n6qxmUrj +FdfUn3tOMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP +BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFMdeIHdBm/YaIFKQSuoag5Pxw6se +MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9wa2kudGNvLmNlbnN1cy5nb3YvQ2Vy +dEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUyMFJvb3QlMjBDQS5jcmwwZQYI +KwYBBQUHAQEEWTBXMFUGCCsGAQUFBzAChklodHRwOi8vcGtpLnRjby5jZW5zdXMu +Z292L0NlcnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJlYXUlMjBSb290JTIwQ0Eu +Y3J0MA0GCSqGSIb3DQEBDAUAA4ICAQB/Kn2/ohaTr4XDgu5msLiKzjA3Rqb4Wf4r +FmzpJXcaB9N4Tyg19qgZ9l57AVDO6DWlXBENY+FXERe/qrvhFawZqActT7dPqJJv +Z30hwBcXc8ELjNxVp54MDJfd2oHUkXwJ46i1GphHfie0Q/csoraRpf/DjXuaruxM +Vgt4Roo6zBGf2nSCfqVLR2NZ93orfSybg5g2eutYuftkd5tzbcxdhHlTlhhbNpIV +quVaT46hN1h/q1bMmS4bGBdLUQggY5BtY9RM4gDhcyh1K8k5auM+uPyWqnnd10wI +vuRSu2zNueWlqVstSTbnZdf138nssj+MzN8xcmn+mXH7z8COXwhJLBKRr7Xg7l7G +UMmc86eYbmpphs3LhzZNMooAGUedm15Ln1u9wgywtP6CbpvBVIcSxmjJeiN6bXy6 +dtbZCCziijO1UehOqc81jZy/jdG158D0WfOumNkx1biGwZ/YR+oGslaSkMr58e/7 +abPBMlQmDwvlTWeiUqMZJAzNHk13c8jSeMtaGXtE9D9Sv2oPVGwjeB2krn1Lb8uU +YeEl0YmQ2W1GpoYC4zU7gnnNjSbLr13L8Gjsmk9FYy4HWDRgJvAvF2O3DldldxP2 +MurPmXriFtEUNo4e1UKJciPJlYChWz1/0Hwncab8AWaw3MPkyYpELKis+vTELriO +iHAYOPwOJg== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIF1DCCA7ygAwIBAgITLgAAAA4zbBR3VlxWyAAAAAAADjANBgkqhkiG9w0BAQwF +ADBsMRMwEQYKCZImiZPyLGQBGRYDR292MRYwFAYKCZImiZPyLGQBGRYGQ2Vuc3Vz +MQwwCgYDVQQLEwNUQ08xDDAKBgNVBAsTA1BLSTEhMB8GA1UEAxMYVVMgQ2Vuc3Vz +IEJ1cmVhdSBSb290IENBMB4XDTIyMDIyODE3NTUxOFoXDTI3MDIyODE4MDUxOFow +YjETMBEGCgmSJomT8ixkARkWA2dvdjEWMBQGCgmSJomT8ixkARkWBmNlbnN1czET +MBEGCgmSJomT8ixkARkWA2VhZDEeMBwGA1UEAxMVVVMgQ2Vuc3VzIEJ1cmVhdSBD +QSAzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxA+7bWM9ZExFO/ZN +uFodd+ktg0TWojeV8QJTYAdtwzMquqDl/zMLgkHPD8xC730qMdKB6Df74i3moN5c +6h9S087T0tdf02U0J95AfO06oZiaGNzq/zacINhfbxWf2ZAyZCiwpcQL3w3uAjS1 +MK++iC8ZWDBnd5z64ewCDFS8d9FD5RrJ0GxGCcC4IJ8DyhOq7i3a/Td29wLTP1wz +QuFLVD/5JFWirqnJwgqVVEUdzf8ZK3MSk9DAZcIjY/mIZgnnZ+ukcD0TtYkOnPU7 +j7EGeqo6Jby3T75p4x3uRlNaEKAqXBqiu7bVx+T0cTtuJEjtw4l/8WEGEFGI6Jfs +0Du9+QIDAQABo4IBdzCCAXMwEAYJKwYBBAGCNxUBBAMCAQEwIwYJKwYBBAGCNxUC +BBYEFE2wPwIWNvlAbZy05X4kklJu09q8MB0GA1UdDgQWBBQgeDnrT+0C8IDam1yA +6LKRQtYpxDAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMCAYYw +DwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTHXiB3QZv2GiBSkErqGoOT8cOr +HjBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vcGtpLnRjby5jZW5zdXMuZ292L0Nl +cnRFbnJvbGwvVVMlMjBDZW5zdXMlMjBCdXJlYXUlMjBSb290JTIwQ0EuY3JsMGUG +CCsGAQUFBwEBBFkwVzBVBggrBgEFBQcwAoZJaHR0cDovL3BraS50Y28uY2Vuc3Vz +Lmdvdi9DZXJ0RW5yb2xsL1VTJTIwQ2Vuc3VzJTIwQnVyZWF1JTIwUm9vdCUyMENB +LmNydDANBgkqhkiG9w0BAQwFAAOCAgEAjDWz6k+6ModUkHRJgTjv8nHfPJv1qI9d +WUejF3YSwU6ExE44C5C2oEXPtEAWR+LiEsW+U4ZZ8Zgi/F5qI3AblQbNXDplAbo/ +6UoKeieBftV5cf7WgbdFoVFuX2HppSVrDQPf4t6DpCM6qVs8/EIrBQOeKhVckhB1 +XgiuFTb3sRoOmWvRramBf3xp7WJ1P4T76gBUg2I6GMFV3EO/mv8XWM9QzFZ1nFOQ +z8/zRa1x53WuAc36d8ESGqL0ZxjNjSNU/HtpJnwtYj3hzJIsYgm938nU5p1diF00 +C89+a0CKkVnL7JW6tC8MQqnyE7TBBWjSmssxa4FHT753W/NaU6JVIJqOwuGTTenv +bQlHi+NxfqL0alNXX3ukUNDPB5XfGWCEBMGZ9xUNDXdxTS7lJzZGAddjqu94e5gd +KgDiEq52RQgkbZ8d+DYwpo/4XY7rj/bC4jvVXUhVd8E/NAbzTSo3VppK0pi/wDri +lm4p8WlzrCoGTVPeiZdCApa/bOoaq+X7/vN4HDUakJZFEPfxIwznfJbDEu7hrVE3 +fck3YuSBrQx6yYtmpLEnybaB5so0w+djeswxBVQSlBODYhrMFW+l3VIRa9PqHQWw +8TvAglbHxFUWWtlHBbwXgVdOqAVlh1LHU8mfbtkY8D4h+iXk+4nvBY1aKdDaZFTB +kDgqyXZwIww= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFSDCCBDCgAwIBAgIJAMn9gqHMdnl3MA0GCSqGSIb3DQEBBQUAMIGfMQswCQYD +VQQGEwJVUzERMA8GA1UECBMITWFyeWxhbmQxGzAZBgNVBAoTElUuUy4gQ2Vuc3Vz +IEJ1cmVhdTEiMCAGA1UECxMZVGVsZWNvbW11bmljYXRpb25zIE9mZmljZTEaMBgG +A1UEAxMRY2EudGNvLmNlbnN1cy5nb3YxIDAeBgkqhkiG9w0BCQEWEWNhQHRjby5j +ZW5zdXMuZ292MB4XDTEyMDgxNTE2MTM0OFoXDTMyMDgxMDE2MTM0OFowgZ8xCzAJ +BgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEbMBkGA1UEChMSVS5TLiBDZW5z +dXMgQnVyZWF1MSIwIAYDVQQLExlUZWxlY29tbXVuaWNhdGlvbnMgT2ZmaWNlMRow +GAYDVQQDExFjYS50Y28uY2Vuc3VzLmdvdjEgMB4GCSqGSIb3DQEJARYRY2FAdGNv +LmNlbnN1cy5nb3YwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDSqB5S +s674S6Hnpnl+/cT3OLrUCmuM1KZs+Uo5EsFcZzm4Me/XiF8izGSydFtAKFRbyyk5 +j/K5WLGxo7Ix6eCA1PZXWu6aJOfMmPRb1LaeIst1IlSCpjUoZ8pl60fjYLtbEK79 +STM/nrdV0E2EqcJu7dfzMB1oK96NG6tu8C7m7UgIbSv15NDapgDhyril6J4wVQJU +DOUGRbWjv0Qo6Re0NPBkRFf3owToopNQlQSGZU2UnUehheqXPzk4VQisPrhcVsbg +iu4c98gjtGHK1k2DyJOwsFq2hWmAByLZLJXR7pTqv7Ue8gogFl/ggbvuWrKlVmCh +wKln1pPSLYZ/txTZAgMBAAGjggGDMIIBfzA4BgNVHR8EMTAvMC2gK6AphidodHRw +Oi8vY2EuYXBwcy50Y28uY2Vuc3VzLmdvdi9jZXJ0cy9jcmwwHQYDVR0OBBYEFA8x +pgy5aVvXWgTVO8E7yyO3kp9yMIHUBgNVHSMEgcwwgcmAFA8xpgy5aVvXWgTVO8E7 +yyO3kp9yoYGlpIGiMIGfMQswCQYDVQQGEwJVUzERMA8GA1UECBMITWFyeWxhbmQx +GzAZBgNVBAoTElUuUy4gQ2Vuc3VzIEJ1cmVhdTEiMCAGA1UECxMZVGVsZWNvbW11 +bmljYXRpb25zIE9mZmljZTEaMBgGA1UEAxMRY2EudGNvLmNlbnN1cy5nb3YxIDAe +BgkqhkiG9w0BCQEWEWNhQHRjby5jZW5zdXMuZ292ggkAyf2Cocx2eXcwDwYDVR0T +AQH/BAUwAwEB/zALBgNVHQ8EBAMCAQYwLwYDVR0RBCgwJoERY2FAdGNvLmNlbnN1 +cy5nb3aCEWNhLnRjby5jZW5zdXMuZ292MA0GCSqGSIb3DQEBBQUAA4IBAQCLNU9/ +OxA2adbFXwiAh8XztL3MN7OUeXasSKtSDo00Ays/Sph1DXkUozSwx3B2JHtfrMj+ +A64qzjRm/Y7sDaM4SFa+Y3rdt7U9UY2UxQLo92zHQMqIbQhrdKBTiCVMrBvBzwWg +SI7KPi2lel499yb0vH/I6czuyQNTuYzHAsufYKeMMq4CeiBbboAegClpYJi5jJLl +dFQZpDUwSs+Pfb95CjPlfc0V3AH6GazbS3BNMMghECpL4rF0m7F7L3nDCklx1PsC +z2chyETY1X74Cg3D1mFV3iUjIvr6+eIZDQ3BStGwFjzxmdH2U2yh1nJnJzNXka9g +lUpluNENkgVZmOys +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIE1zCCA7+gAwIBAgITZQAANNYDIG4D4LElTwABAAA01jANBgkqhkiG9w0BAQsF +ADBiMRMwEQYKCZImiZPyLGQBGRYDZ292MRYwFAYKCZImiZPyLGQBGRYGY2Vuc3Vz +MRMwEQYKCZImiZPyLGQBGRYDZWFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1 +IENBIDMwHhcNMjIxMjI3MjExNTIxWhcNMjYxMjI3MjEyNTIxWjBoMRMwEQYKCZIm +iZPyLGQBGRYDZ292MRYwFAYKCZImiZPyLGQBGRYGY2Vuc3VzMRMwEQYKCZImiZPy +LGQBGRYDZWFkMSQwIgYDVQQDExtVUyBDZW5zdXMgQnVyZWF1IENBIDMgU3ViIDEw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPzIqL5D96G48OMzx7WZdi +01e6K5Tllvz5REVKMOlFIS22y/iAnr3hbA1FXH1ML+t0n7e7jKic+E4pXc90n5DP +0bBS5+srnkw3OvjTY//uBU6rMl5vTtbGY3BhL0jsoeT+/JdTTrif6gyNCSkpNvw0 +Hao3Yc5kfcU5Vo90nm1+gonOqa6bQFN/i4hwI2quu4M3IkLJZaWQQ0z1pIbbJyk0 +qANrUKy4yTABo4KkNdqKmRvvvRWuDpFmNJwDDpdT010HDX5Pdc48fFVPO0Faoox9 +A7BtBZL273u7O9dpE0ajTHk1De5ZxbgO8yFmGWVj6BYgI86HJCq74RP4K6IJuOGZ +AgMBAAGjggF+MIIBejAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQUGFK9+ZBI +M/dcDY4ObcigYRSrASQwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUIHg560/tAvCA +2ptcgOiykULWKcQwVwYDVR0fBFAwTjBMoEqgSIZGaHR0cDovL3BraS5lYWQuY2Vu +c3VzLmdvdi9DZXJ0RW5yb2xsL1VTJTIwQ2Vuc3VzJTIwQnVyZWF1JTIwQ0ElMjAz +LmNybDCBkAYIKwYBBQUHAQEEgYMwgYAwUgYIKwYBBQUHMAKGRmh0dHA6Ly9wa2ku +ZWFkLmNlbnN1cy5nb3YvQ2VydEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUy +MENBJTIwMy5jcnQwKgYIKwYBBQUHMAGGHmh0dHA6Ly9wa2kuZWFkLmNlbnN1cy5n +b3Yvb2NzcDANBgkqhkiG9w0BAQsFAAOCAQEAm1wFAR44iAl7dNHMjzIaaQe7dBbQ +gyS1t2mygO843JtcS2J/m3yGmEfo8wEwK5IxwX2UTmnc7Dh/iWlMO6cl8JKN12Fp +FM/yfpb+jaKECrsGW3uY5yKhrqmVGO9YnbiiGN07w0t+dbWAYGCtULoocYhFaLVQ +68Iv9KpOKVB3XKbP4bI2uhtx9H+uPHanhWVTJRHjg5pqI+xV7BoPfmods74oQfgm +PrsZqbwEvItVBMTGFQvhi60iEklk42s7ln/X7EqpKjtXwR4WAGuWPjTJ3OWkvVa4 +cNFBQRSALyDpqJFCqFoZBym9coyibi39QkWD2eizR4wm69jC66GOEmEb/A== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIE1zCCA7+gAwIBAgITZQAANNSyNhQfwZNfDwABAAA01DANBgkqhkiG9w0BAQsF +ADBiMRMwEQYKCZImiZPyLGQBGRYDZ292MRYwFAYKCZImiZPyLGQBGRYGY2Vuc3Vz +MRMwEQYKCZImiZPyLGQBGRYDZWFkMR4wHAYDVQQDExVVUyBDZW5zdXMgQnVyZWF1 +IENBIDMwHhcNMjIxMjI3MTcyOTQ3WhcNMjYxMjI3MTczOTQ3WjBoMRMwEQYKCZIm +iZPyLGQBGRYDZ292MRYwFAYKCZImiZPyLGQBGRYGY2Vuc3VzMRMwEQYKCZImiZPy +LGQBGRYDZWFkMSQwIgYDVQQDExtVUyBDZW5zdXMgQnVyZWF1IENBIDMgU3ViIDIw +ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+k0X7b2zULKIK7n3QEo6I +tY03iLD1+h4SLS+TcD1boOS5SR5A7nmtcSkn03xieHzQvb2YdQ8+ltlBBXFeQR4g +vTieZ77DN1pqDLkwThHscavRr8HHyuW20Bf9YYH11DzpuXe4WsMhkLeJWzZJ5GPI +TwWZFeCluJ9fb9/8wPhVERSDYtqS3DwdJ/6qkueJZ75AOMcmObx5pQWszypYQupm +L+oiofej7mu0gb7ioXwwM7XL8f28a2BEDFqM5M0sitBrC1yxN7a3cRnegT+PlCe/ +yiiihAZVYQt/HDEs4R4A85Wx/YUhiB3BKkyTUIV+abjeWMIrRi17SrxNDT9ZQkld +AgMBAAGjggF+MIIBejAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQU4wpH6ieo +Hr13KKDb4stKDQFKE/MwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUIHg560/tAvCA +2ptcgOiykULWKcQwVwYDVR0fBFAwTjBMoEqgSIZGaHR0cDovL3BraS5lYWQuY2Vu +c3VzLmdvdi9DZXJ0RW5yb2xsL1VTJTIwQ2Vuc3VzJTIwQnVyZWF1JTIwQ0ElMjAz +LmNybDCBkAYIKwYBBQUHAQEEgYMwgYAwUgYIKwYBBQUHMAKGRmh0dHA6Ly9wa2ku +ZWFkLmNlbnN1cy5nb3YvQ2VydEVucm9sbC9VUyUyMENlbnN1cyUyMEJ1cmVhdSUy +MENBJTIwMy5jcnQwKgYIKwYBBQUHMAGGHmh0dHA6Ly9wa2kuZWFkLmNlbnN1cy5n +b3Yvb2NzcDANBgkqhkiG9w0BAQsFAAOCAQEAs3Kf6bImA8lfZweCuCtcaSDRCr0X +pyr8A1TI95PgzpAEptGay/Ve2Bbs9JAzMIPqznEy7hC9kNY6Wn8jRxwSFhHJ1MVV +bMITRguhJ5asApmDInCx1/iha3WnsnmeonmPFOgpF/lgiyY7kMwXPzBNYPrs3qdf +AFTaF0rMRxJ3nz0R6C2K16hCDNOFW1E8X3eBFRK9poGsdOzpkrugrFDXGBWGIxIr +IUIE1xbQQzVv/qZ9Q1s7g6nt3zci//CgBXXRHn30G9SWbHASJhbN/XZOYMKtS15T +COzOm7B5Ujjw2h8YspiZKgINsWLbhU9E5OQkJuHeDpBpp/EFMbwsRQH//A== +-----END CERTIFICATE----- diff --git a/pip.conf b/pip.conf new file mode 100644 index 00000000..81d61437 --- /dev/null +++ b/pip.conf @@ -0,0 +1,10 @@ +[global] +cert = ~/.pip/pip-cert.pem +# proxy = http://proxy.tco.census.gov:3128 +index = https://nexus.it.census.gov:8443/repository/DataScience-Group/pypi +index-url = https://nexus.it.census.gov:8443/repository/DataScience-Group/simple +trusted-host = nexus.it.census.gov + pypi.python.org + pypi.org + files.pythonhosted.org + proxy.tco.census.gov \ No newline at end of file diff --git a/playbook.yml b/playbook.yml new file mode 100644 index 00000000..1fd5e745 --- /dev/null +++ b/playbook.yml @@ -0,0 +1,22 @@ +--- +- hosts: localhost + connection: local + gather_facts: false + tasks: + - name: Create /var/task directory + file: + path: /var/task + state: directory + mode: '0755' + + - name: Copy application files to /var/task + copy: + src: "{{ playbook_dir }}/" + dest: /var/task/ + mode: preserve + directory_mode: '0755' + + - name: Install pip requirements + pip: + requirements: /var/task/requirements.txt + executable: pip3 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..94c1cd8e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,12 @@ +# Core dependencies +PyGithub>=1.59.0 +pydantic>=2.0.0 +boto3>=1.26.0 +requests>=2.28.0 +jinja2>=3.1.0 +typing_extensions>=4.4.0 +pynacl>=1.5.0 # Required by PyGithub for cryptography +cryptography>=44.0.0 # Required by PyGithub for auth +pyjwt[crypto]>=2.10.0 # Required by PyGithub for JWT support +deprecated>=1.2.18 # Required by PyGithub for decorators +python-gitlab>=3.0.0 diff --git a/scripts/cleanup_test_repos.py b/scripts/cleanup_test_repos.py new file mode 100644 index 00000000..ccbe73c6 --- /dev/null +++ b/scripts/cleanup_test_repos.py @@ -0,0 +1,119 @@ +# scripts/cleanup_test_repos.py +import os +import requests +import logging + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + +def get_env_var(name): + """Get an environment variable or raise an error.""" + value = os.environ.get(name) + if not value: + raise ValueError(f"Environment variable {name} is not set.") + return value + +def delete_repository(api_base_url, headers, org_name, repo_name): + """Delete a specific repository.""" + delete_url = f"{api_base_url}/repos/{org_name}/{repo_name}" + try: + response = requests.delete( + delete_url, + headers=headers, + verify=False # Consider adding proper verification + ) + response.raise_for_status() # Raise an exception for bad status codes + logging.info(f"Successfully deleted repository: {repo_name}") + except requests.exceptions.RequestException as e: + logging.error(f"Failed to delete repository {repo_name}: {e}") + if e.response is not None: + logging.error(f"Response status: {e.response.status_code}") + logging.error(f"Response text: {e.response.text}") + else: + logging.error("No response received from the API.") + +def list_and_archive_test_repos(api_base_url, headers, org_name): + """List all repositories in the org and delete those matching the pattern.""" + repos_url = f"{api_base_url}/orgs/{org_name}/repos" + params = {'per_page': 100} # Adjust per_page as needed + page = 1 + deleted_count = 0 + + logging.info(f"Fetching repositories from organization: {org_name}") + + while True: + params['page'] = page + logging.info(f"Fetching page {page} of repositories...") + try: + response = requests.get(repos_url, headers=headers, params=params, verify=False) + response.raise_for_status() + repos = response.json() + logging.info(f"Found {len(repos)} repositories on page {page}.") + + if not repos: + logging.info("No more repositories found.") + break + + logging.info(f"Processing page {page} of repositories...") + for repo in repos: + repo_name = repo.get("name") + is_archived = repo.get("archived", False) + if repo_name and repo_name.startswith("temp-test-repo-"): + logging.info(f"Found test repository: {repo_name}") + logging.info(f"Deleting repository: {repo_name}") + delete_repository(api_base_url, headers, org_name, repo_name) + logging.info(f"Deleted repository: {repo_name}") + deleted_count += 1 + + # Check if there's a next page (GitHub uses Link header) + if 'next' not in response.links: + break + # Use the URL provided in the Link header for the next page + # Need to update repos_url for the next iteration + next_link = response.links.get('next') + if next_link: + repos_url = next_link['url'] + page += 1 # Increment page conceptually, actual page number is in the URL + else: + break # No next link header means we are done + + except requests.exceptions.RequestException as e: + logging.error(f"Failed to fetch repositories: {e}") + if e.response is not None: + logging.error(f"Response status: {e.response.status_code}") + logging.error(f"Response text: {e.response.text}") + break + except Exception as e: + logging.error(f"An unexpected error occurred: {e}") + break + + logging.info(f"Finished cleanup. Deleted {deleted_count} test repositories during this run.") + +if __name__ == "__main__": + try: + token = get_env_var("GITHUB_TOKEN") + api_url = get_env_var("GITHUB_API") + org = get_env_var("GITHUB_ORG") + + req_headers = { + "Authorization": f"token {token}", + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json" + } + + # Suppress InsecureRequestWarning for verify=False + # Ensure urllib3 is available or handle the import error + try: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + except ImportError: + logging.warning("urllib3 not found, cannot disable InsecureRequestWarning.") + + list_and_archive_test_repos(api_url, req_headers, org) + + except ValueError as e: + logging.error(e) + exit(1) + except Exception as e: + logging.error(f"An unexpected error occurred during script execution: {e}") + exit(1) diff --git a/scripts/lambda_setup.py b/scripts/lambda_setup.py new file mode 100644 index 00000000..a95ee931 --- /dev/null +++ b/scripts/lambda_setup.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +""" +Setup script for Lambda container configuration. +""" +import os +import shutil +import subprocess +import sys +import shutil +from pathlib import Path + +# Constants +LAMBDA_TASK_ROOT = '/var/task' +TMP_DIR = '/tmp' +PYTHON_VERSION = "3.11" # Match the Lambda container's Python version + +def run_command(cmd, check=True): + """Run a shell command and print its output""" + print(f"Running: {cmd}") + result = subprocess.run(cmd, shell=True, text=True, check=check, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + print(result.stdout) + return result + +def copy_directory(src, dest): + """Copy a directory to destination""" + print(f"Copying '{src}' to '{dest}'") + shutil.copytree(src, dest, dirs_exist_ok=True) + +def copy_file(src, dest): + """Copy a file to destination""" + print(f"Copying '{src}' to '{dest}'") + shutil.copy2(src, dest) + +def install_dependencies(): + """Install Python dependencies""" + print("=== Installing dependencies from requirements.txt ===") + + # Create a site-packages directory if it doesn't exist + site_packages = f"{LAMBDA_TASK_ROOT}/lib/python{PYTHON_VERSION}/site-packages" + os.makedirs(site_packages, exist_ok=True) + + # Install dependencies directly to the site-packages directory + run_command(f"pip3 install --no-cache-dir -r {TMP_DIR}/requirements.txt -t {LAMBDA_TASK_ROOT}") + + # Explicitly install critical dependencies + print("=== Explicitly installing critical dependencies ===") + run_command(f"pip3 install --no-cache-dir pydantic jinja2 PyGithub -t {LAMBDA_TASK_ROOT}") + + # Create a .pth file to ensure the Lambda runtime can find the packages + with open(f"{LAMBDA_TASK_ROOT}/lambda_path.pth", "w") as f: + f.write(f"{LAMBDA_TASK_ROOT}\n") + + print("=== Installing package in development mode ===") + run_command(f"pip3 install --no-cache-dir -e {TMP_DIR} -t {LAMBDA_TASK_ROOT} -v") + +def verify_dependencies(): + """Verify that key dependencies are installed correctly""" + print("=== Verifying dependencies installation ===") + + # Print Python path for debugging + print("Python sys.path:") + run_command("python3 -c 'import sys; print(sys.path)'") + + # Check key dependencies + dependencies = ['pydantic', 'jinja2', 'github'] # Add critical dependencies here + with open(f"{TMP_DIR}/requirements.txt") as f: + for line in f: + line = line.strip() + if line and not line.startswith("#"): + pkg = line.split("=")[0].split("<")[0].split(">")[0].split("~")[0].strip() + if pkg and pkg not in dependencies: + dependencies.append(pkg) + + # Use the Lambda container's Python to verify imports + for dep in dependencies: + cmd = f"cd {LAMBDA_TASK_ROOT} && python3 -c 'import {dep}; print(f\"{dep} installed successfully\")' || echo '{dep} not installed correctly'" + run_command(cmd, check=False) + +def setup_lambda_environment(): + """Main function to set up the Lambda environment""" + print("=== Setting up Lambda environment ===") + + # Debug info + print("=== Python and pip versions ===") + run_command("python3 --version") + run_command("pip3 --version") + + # Install dependencies + install_dependencies() + + # Copy app.py to Lambda task root + print("=== Copying app.py to Lambda task root ===") + copy_file(f"{TMP_DIR}/app.py", f"{LAMBDA_TASK_ROOT}/app.py") + + # Copy template_automation directory + print("=== Copying template_automation package ===") + copy_directory(f"{TMP_DIR}/template_automation", f"{LAMBDA_TASK_ROOT}/template_automation") + + # Create a wrapper script that ensures the Python path is set correctly + with open(f"{LAMBDA_TASK_ROOT}/.env", "w") as f: + f.write(f"PYTHONPATH={LAMBDA_TASK_ROOT}:{LAMBDA_TASK_ROOT}/lib/python{PYTHON_VERSION}/site-packages\n") + + # Verify dependencies + verify_dependencies() + + # List installed packages + print("=== Listing installed Python packages ===") + run_command("pip3 list") + + # Verify task directory structure + print("=== Verifying Lambda task root contents ===") + run_command(f"ls -la {LAMBDA_TASK_ROOT}") + run_command(f"ls -la {LAMBDA_TASK_ROOT}/template_automation") + + # Final check - try to import critical modules from the Lambda environment + print("=== Testing key imports from Lambda environment ===") + test_import = """ +import sys +print("Python Path:", sys.path) +try: + import pydantic + print("pydantic successfully imported:", pydantic.__file__) +except ImportError as e: + print("Error importing pydantic:", str(e)) +try: + import jinja2 + print("jinja2 successfully imported:", jinja2.__file__) +except ImportError as e: + print("Error importing jinja2:", str(e)) +try: + import github + print("github successfully imported:", github.__file__) +except ImportError as e: + print("Error importing github:", str(e)) +""" + with open(f"{LAMBDA_TASK_ROOT}/test_imports.py", "w") as f: + f.write(test_import) + run_command(f"cd {LAMBDA_TASK_ROOT} && python3 test_imports.py") + +if __name__ == "__main__": + setup_lambda_environment() + print("Lambda setup completed successfully") + sys.exit(0) \ No newline at end of file diff --git a/scripts/validate_github_token.py b/scripts/validate_github_token.py new file mode 100755 index 00000000..2c3763bf --- /dev/null +++ b/scripts/validate_github_token.py @@ -0,0 +1,321 @@ +#!/usr/bin/env python3 +""" +GitHub Token Validation Script + +This script retrieves a GitHub token from AWS Secrets Manager and validates it +by making test API calls to GitHub. It checks both authentication and permissions. + +Usage: + python validate_github_token.py --secret-name [SECRET_NAME] --region [AWS_REGION] --api-url [GITHUB_API_URL] + +Example: + python validate_github_token.py --secret-name github/token --region us-east-1 --api-url https://api.github.com +""" + +import argparse +import json +import logging +import sys +from typing import Dict, Optional, Tuple, List + +import boto3 +import requests +from botocore.exceptions import ClientError + +# Set up logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger("github-token-validator") + +def parse_args(): + """Parse command-line arguments.""" + parser = argparse.ArgumentParser(description='Validate GitHub token from AWS Secrets Manager') + parser.add_argument('--secret-name', required=True, help='Name of the secret in AWS Secrets Manager') + parser.add_argument('--region', default=None, help='AWS region (default: use AWS config)') + parser.add_argument('--api-url', default='https://api.github.com', help='GitHub API URL (default: https://api.github.com)') + parser.add_argument('--org', default=None, help='GitHub organization to test repo access') + parser.add_argument('--repo', default=None, help='GitHub repository to test access (format: org/repo)') + parser.add_argument('--verify-ssl', action='store_true', default=True, help='Verify SSL certificates') + parser.add_argument('--no-verify-ssl', dest='verify_ssl', action='store_false', help='Do not verify SSL certificates') + + return parser.parse_args() + +def get_secret(secret_name: str, region: str = None) -> Optional[str]: + """Get a secret from AWS Secrets Manager. + + Args: + secret_name: Name of the secret + region: AWS region + + Returns: + The secret value as a string, or None if retrieval fails + """ + logger.info(f"Retrieving secret: {secret_name}") + session = boto3.session.Session(region_name=region) + client = session.client('secretsmanager') + + try: + response = client.get_secret_value(SecretId=secret_name) + if 'SecretString' in response: + secret_string = response['SecretString'] + logger.info(f"Secret retrieved successfully (length: {len(secret_string)})") + + # Try to parse as JSON + try: + secret_data = json.loads(secret_string) + # Handle common token formats + if isinstance(secret_data, dict): + for key in ['token', 'github_token', 'access_token', 'pat']: + if key in secret_data: + logger.info(f"Found key '{key}' in JSON secret") + return secret_data[key] + + # If no specific token key, return first value + if secret_data: + first_key = next(iter(secret_data)) + logger.info(f"Using first key '{first_key}' from JSON secret") + return secret_data[first_key] + + logger.error("Secret is a JSON object but has no recognized token keys") + return None + + elif isinstance(secret_data, str): + logger.info("Secret is a JSON string") + return secret_data + except json.JSONDecodeError: + # Not JSON, assume plain text token + logger.info("Secret is plain text, using as token") + return secret_string + else: + logger.error("Secret doesn't contain a SecretString") + return None + + except ClientError as e: + logger.error(f"Error retrieving secret: {e}") + return None + +def test_github_token(token: str, api_url: str, verify_ssl: bool) -> Tuple[bool, Dict]: + """Test a GitHub token by making an API call. + + Args: + token: GitHub token + api_url: GitHub API URL + verify_ssl: Whether to verify SSL certificates + + Returns: + Tuple of (success, installation_info) + """ + headers = { + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': f'token {token}' + } + + logger.info(f"Testing GitHub token against {api_url}/user") + try: + response = requests.get( + f"{api_url}/user", + headers=headers, + verify=verify_ssl, + timeout=10 + ) + + if response.status_code == 200: + user_info = response.json() + logger.info(f"Authentication successful - user: {user_info.get('login')}") + return True, user_info + else: + logger.error(f"Authentication failed: {response.status_code} - {response.text}") + return False, {} + + except requests.RequestException as e: + logger.error(f"Request error: {e}") + return False, {} + +def check_rate_limit(token: str, api_url: str, verify_ssl: bool) -> Dict: + """Check GitHub API rate limits for the token.""" + headers = { + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': f'token {token}' + } + + logger.info(f"Checking rate limits at {api_url}/rate_limit") + try: + response = requests.get( + f"{api_url}/rate_limit", + headers=headers, + verify=verify_ssl, + timeout=10 + ) + + if response.status_code == 200: + rate_info = response.json() + logger.info(f"Rate limits retrieved successfully") + return rate_info + else: + logger.error(f"Rate limit check failed: {response.status_code} - {response.text}") + return {} + + except requests.RequestException as e: + logger.error(f"Request error: {e}") + return {} + +def check_organization_access(token: str, api_url: str, org: str, verify_ssl: bool) -> bool: + """Check if the token has access to an organization.""" + if not org: + logger.info("No organization specified, skipping organization check") + return True + + headers = { + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': f'token {token}' + } + + logger.info(f"Checking organization access for: {org}") + try: + response = requests.get( + f"{api_url}/orgs/{org}", + headers=headers, + verify=verify_ssl, + timeout=10 + ) + + if response.status_code == 200: + org_info = response.json() + logger.info(f"Organization access confirmed - org: {org_info.get('login')}") + return True + else: + logger.error(f"Organization access check failed: {response.status_code} - {response.text}") + return False + + except requests.RequestException as e: + logger.error(f"Request error: {e}") + return False + +def check_repository_access(token: str, api_url: str, repo: str, verify_ssl: bool) -> bool: + """Check if the token has access to a repository.""" + if not repo: + logger.info("No repository specified, skipping repository check") + return True + + headers = { + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': f'token {token}' + } + + logger.info(f"Checking repository access for: {repo}") + try: + response = requests.get( + f"{api_url}/repos/{repo}", + headers=headers, + verify=verify_ssl, + timeout=10 + ) + + if response.status_code == 200: + repo_info = response.json() + logger.info(f"Repository access confirmed - repo: {repo_info.get('full_name')}") + return True + else: + logger.error(f"Repository access check failed: {response.status_code} - {response.text}") + return False + + except requests.RequestException as e: + logger.error(f"Request error: {e}") + return False + +def check_token_scopes(token: str, api_url: str, verify_ssl: bool) -> List[str]: + """Check what scopes the GitHub token has.""" + headers = { + 'Accept': 'application/vnd.github.v3+json', + 'Authorization': f'token {token}' + } + + logger.info("Checking token scopes") + try: + response = requests.get( + f"{api_url}/user", # Any endpoint will return scopes in headers + headers=headers, + verify=verify_ssl, + timeout=10 + ) + + # GitHub returns token scopes in the X-OAuth-Scopes header + scopes = response.headers.get('X-OAuth-Scopes', '') + if scopes: + scope_list = [s.strip() for s in scopes.split(',')] + logger.info(f"Token scopes: {scopes}") + return scope_list + else: + logger.warning("No scopes found in response headers") + return [] + + except requests.RequestException as e: + logger.error(f"Request error: {e}") + return [] + +def main(): + """Main execution.""" + args = parse_args() + + # Get the GitHub token from Secrets Manager + token = get_secret(args.secret_name, args.region) + if not token: + logger.error("Failed to retrieve GitHub token") + sys.exit(1) + + logger.info(f"GitHub API URL: {args.api_url}") + logger.info(f"SSL verification: {'enabled' if args.verify_ssl else 'disabled'}") + + # Test authentication + auth_success, installation_info = test_github_token(token, args.api_url, args.verify_ssl) + if not auth_success: + logger.error("GitHub token validation failed") + sys.exit(1) + + print("=====================================") + print("GitHub App Token Validation Results") + print("=====================================") + print(f"✅ Authentication: Success") + print(f"🔌 Installation ID: {installation_info.get('id', 'Unknown')}") + print(f"📱 App ID: {installation_info.get('app_id', 'Unknown')}") + print(f"🏢 Target Type: {installation_info.get('target_type', 'Unknown')}") + print(f"🎯 Target: {installation_info.get('account', {}).get('login', 'Unknown')}") + + # Check token permissions + permissions = check_token_scopes(token, args.api_url, args.verify_ssl) + print("\n🔑 Installation Permissions:") + if permissions: + for perm in sorted(permissions): + print(f" - {perm}") + else: + print(" No permissions found") + + # Check rate limits + rate_info = check_rate_limit(token, args.api_url, args.verify_ssl) + if rate_info and 'resources' in rate_info: + core_rate = rate_info['resources'].get('core', {}) + print("\n📊 Rate Limits:") + print(f" Core: {core_rate.get('remaining', 'Unknown')}/{core_rate.get('limit', 'Unknown')} requests remaining") + reset_time = core_rate.get('reset', 0) + if reset_time: + from datetime import datetime + reset_datetime = datetime.fromtimestamp(reset_time) + print(f" Reset: {reset_datetime.strftime('%Y-%m-%d %H:%M:%S')}") + + # Check organization access if specified + if args.org: + org_access = check_organization_access(token, args.api_url, args.org, args.verify_ssl) + print(f"\n🏢 Organization Access ({args.org}): {'✅ Yes' if org_access else '❌ No'}") + + # Check repository access if specified + if args.repo: + repo_access = check_repository_access(token, args.api_url, args.repo, args.verify_ssl) + print(f"\n📁 Repository Access ({args.repo}): {'✅ Yes' if repo_access else '❌ No'}") + + print("\n✅ Token validation completed successfully!") + return 0 + +if __name__ == "__main__": + sys.exit(main()) \ No newline at end of file diff --git a/service-catalog/product-template.yaml b/service-catalog/product-template.yaml new file mode 100644 index 00000000..5b74f3d4 --- /dev/null +++ b/service-catalog/product-template.yaml @@ -0,0 +1,147 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: 'Service Catalog Product: Create GitHub Repository from Template' + +Metadata: + AWS::CloudFormation::Interface: + ParameterGroups: + - Label: + default: "Repository Configuration" + Parameters: + - ProjectName + - OwningTeam + - Label: + default: "Environment Settings" + Parameters: + - Environment + - AwsRegion + - Label: + default: "Optional Metadata" + Parameters: + - OrganizationPath + - FinOpsProjectName + - FinOpsProjectNumber + - AdditionalTags + + ParameterLabels: + ProjectName: + default: "Repository Name" + OwningTeam: + default: "Owning Team" + Environment: + default: "Environment" + AwsRegion: + default: "AWS Region" + OrganizationPath: + default: "Organization Path" + FinOpsProjectName: + default: "FinOps Project Name" + FinOpsProjectNumber: + default: "FinOps Project Number" + AdditionalTags: + default: "Additional Tags (JSON)" + +Parameters: + ProjectName: + Type: String + Description: Name of the GitHub repository to create (lowercase, hyphens only) + AllowedPattern: '^[a-z0-9][a-z0-9-]*[a-z0-9]$' + ConstraintDescription: Must be lowercase letters, numbers, and hyphens only + MinLength: 3 + MaxLength: 100 + + OwningTeam: + Type: String + Description: GitHub team that should have admin access to the repository + Default: tf-module-admins + MinLength: 1 + + Environment: + Type: String + Description: Environment for this repository/project + Default: development + AllowedValues: + - development + - staging + - production + - sandbox + + AwsRegion: + Type: String + Description: Primary AWS region for this project + Default: us-gov-west-1 + AllowedValues: + - us-gov-west-1 + - us-gov-east-1 + - us-east-1 + - us-west-2 + + OrganizationPath: + Type: String + Description: Organization path (e.g., dept:team:subteam) + Default: "" + + FinOpsProjectName: + Type: String + Description: FinOps project name for cost allocation + Default: "" + + FinOpsProjectNumber: + Type: String + Description: FinOps project number + Default: "" + + AdditionalTags: + Type: String + Description: Additional tags as JSON object (e.g., {"key1":"value1","key2":"value2"}) + Default: "{}" + + # Hidden parameter - the Lambda ARN is passed in from the Service Catalog product definition + LambdaFunctionArn: + Type: String + Description: ARN of the Lambda function that creates repositories + Default: "arn:aws-us-gov:lambda:us-gov-west-1:229685449397:function:service-catalog-repo-gen-template-automation" + +Resources: + # Custom Resource that invokes the Lambda function + RepositoryCreator: + Type: Custom::GitHubRepository + Properties: + ServiceToken: !Ref LambdaFunctionArn + # These properties are passed to the Lambda function + project_name: !Ref ProjectName + owning_team: !Ref OwningTeam + environment: !Ref Environment + aws_region: !Ref AwsRegion + organization_path: !Ref OrganizationPath + finops_project_name: !Ref FinOpsProjectName + finops_project_number: !Ref FinOpsProjectNumber + tags: !Ref AdditionalTags + +Outputs: + RepositoryName: + Description: Name of the created repository + Value: !Ref ProjectName + Export: + Name: !Sub '${AWS::StackName}-RepositoryName' + + RepositoryUrl: + Description: URL of the created GitHub repository + Value: !GetAtt RepositoryCreator.repository_url + Export: + Name: !Sub '${AWS::StackName}-RepositoryUrl' + + PullRequestUrl: + Description: URL of the configuration pull request + Value: !GetAtt RepositoryCreator.pull_request_url + Export: + Name: !Sub '${AWS::StackName}-PullRequestUrl' + + ConfigBranch: + Description: Branch name where configuration was pushed + Value: !GetAtt RepositoryCreator.branch_name + Export: + Name: !Sub '${AWS::StackName}-ConfigBranch' + + Message: + Description: Status message + Value: !Sub 'Successfully created repository ${ProjectName} and opened configuration pull request. Review and merge the PR to complete setup.' diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..d4492c1d --- /dev/null +++ b/setup.py @@ -0,0 +1,17 @@ +from setuptools import setup, find_packages + +setup( + name="template-automation", + version="0.1.0", + packages=find_packages(), + install_requires=[ + "boto3", + "requests" + ], + extras_require={ + "test": [ + "pytest", + "pytest-mock" + ] + } +) diff --git a/template-automation-lambda/.devcontainer/Dockerfile b/template-automation-lambda/.devcontainer/Dockerfile new file mode 100644 index 00000000..30353a3b --- /dev/null +++ b/template-automation-lambda/.devcontainer/Dockerfile @@ -0,0 +1,65 @@ +FROM mcr.microsoft.com/devcontainers/python:3.11 + +# Set environment variables +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV DEBIAN_FRONTEND=noninteractive + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + apt-transport-https \ + ca-certificates \ + curl \ + gnupg \ + lsb-release \ + software-properties-common \ + make \ + jq \ + zip \ + unzip \ + git \ + vim \ + bash-completion \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install AWS CLI v2 +RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \ + && unzip awscliv2.zip \ + && ./aws/install \ + && rm -rf aws awscliv2.zip + +# Install Terraform +ARG TERRAFORM_VERSION=1.9.1 +RUN curl -fsSL https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -o /tmp/terraform.zip \ + && unzip /tmp/terraform.zip -d /usr/local/bin \ + && rm /tmp/terraform.zip \ + && chmod +x /usr/local/bin/terraform + +# Install Packer +ARG PACKER_VERSION=1.10.1 +RUN curl -fsSL https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_amd64.zip -o /tmp/packer.zip \ + && unzip /tmp/packer.zip -d /usr/local/bin \ + && rm /tmp/packer.zip \ + && chmod +x /usr/local/bin/packer + +# We'll install Python dependencies at runtime instead of build time +# to avoid issues with file paths and to ensure the latest dependencies are installed + +# Setup aliases for Terraform +RUN echo 'alias tf="terraform"' >> /etc/bash.bashrc + +# Create a vscode user with the same UID/GID as the host user +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=$USER_UID +RUN groupadd --gid $USER_GID $USERNAME \ + && useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \ + && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME \ + && chmod 0440 /etc/sudoers.d/$USERNAME + +# Set the default user +USER $USERNAME + +# Setup workspace directory +WORKDIR /workspace \ No newline at end of file diff --git a/template-automation-lambda/.devcontainer/devcontainer.json b/template-automation-lambda/.devcontainer/devcontainer.json new file mode 100644 index 00000000..f23dfffa --- /dev/null +++ b/template-automation-lambda/.devcontainer/devcontainer.json @@ -0,0 +1,45 @@ +{ + "name": "Multi-Project DevContainer", + "dockerFile": "Dockerfile", + "workspaceMount": "source=${localWorkspaceFolder}/..,target=/workspace,type=bind,consistency=cached", + "workspaceFolder": "/workspace", + "mounts": [ + "source=${env:PIP_CONFIG_FILE},target=/home/vscode/.pip/pip.conf,type=bind,consistency=cached", + "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" + ], + "features": { + "ghcr.io/devcontainers/features/docker-in-docker:2": {}, + "ghcr.io/devcontainers/features/git:1": {} + }, + "remoteUser": "vscode", + "runArgs": [ + "--init", + "--privileged" + ], + "postCreateCommand": "sudo mkdir -p /home/vscode/.pip && sudo chown -R vscode:vscode /home/vscode/.pip && pip install --upgrade pip && pip install -r /workspace/template-automation-lambda/requirements.txt -r /workspace/template-automation-lambda/docs/requirements.txt -r /workspace/template-automation-lambda/template_automation/requirements.txt && pip install -r /workspace/template-eks-cluster/ansible/requirements.txt && pip install pytest pytest-mock coverage black pylint pre-commit ansible", + "settings": { + "terminal.integrated.defaultProfile.linux": "bash", + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.linting.enabled": true, + "python.linting.pylintEnabled": true, + "python.formatting.provider": "black" + }, + "customizations": { + "vscode": { + "settings": {}, + "extensions": [ + "ms-python.python", + "ms-azuretools.vscode-docker", + "hashicorp.terraform", + "ms-python.black-formatter", + "ms-python.pylint", + "redhat.ansible" + ] + } + }, + "hostRequirements": { + "cpus": 2, + "memory": "4gb", + "storage": "32gb" + } +} diff --git a/template-automation-lambda/.github/runner-config.yml b/template-automation-lambda/.github/runner-config.yml new file mode 100644 index 00000000..365cb021 --- /dev/null +++ b/template-automation-lambda/.github/runner-config.yml @@ -0,0 +1,7 @@ +# Runner configuration for different AWS accounts +# Format: environment_name: aws_account_id + +dev: dev-account-runner +staging: staging-account-runner +prod: prod-account-runner +lab: lab-account-runner diff --git a/template-automation-lambda/.github/workflows/build.yml b/template-automation-lambda/.github/workflows/build.yml new file mode 100644 index 00000000..2404497d --- /dev/null +++ b/template-automation-lambda/.github/workflows/build.yml @@ -0,0 +1,107 @@ +name: Build and Push Lambda Container + +on: + workflow_dispatch: + push: + branches: [ "main" ] + +permissions: + contents: write + id-token: write + +jobs: + build: + runs-on: ubuntu-latest + if: ${{ github.server_url == 'https://github.com' }} + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + AWS_ACCESS_KEY_ID: ${{ vars.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_DEFAULT_REGION: us-east-1 + GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }} + + steps: + - uses: actions/setup-node@v3 + with: + node-version: 16 + + - uses: actions/checkout@v4 + + - name: Setup HashiCorp Packer + uses: hashicorp/setup-packer@v3.1.0 + + - name: Download GTag + run: | + curl -sSL https://raw.githubusercontent.com/HappyPathway/centralized-actions/main/gtag.py -o gtag.py + curl -sSL https://raw.githubusercontent.com/HappyPathway/centralized-actions/main/gtag_requirements.txt -o requirements.txt + + - name: Setup minimal Python for gtag + uses: actions/setup-python@v4 + with: + python-version: '3.11' + cache: 'pip' + cache-dependency-path: requirements.txt + + - name: Install gtag dependencies + run: | + python -m pip install -r requirements.txt + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3.1.2 + with: + terraform_version: 1.9.1 + terraform_wrapper: false + + - name: terraform init + run: terraform init + + - name: terraform apply + run: terraform apply -auto-approve + + - name: terraform output + id: terraform_output + run: echo "repository_uri=$(terraform output -raw repository_uri)" >> $GITHUB_ENV + + - name: git fetch --unshallow --tags + run: git fetch --unshallow --tags + + - name: get tags + run: git tag --list + + - name: GTag + run: | + echo "next_tag=$(python gtag.py --${{ vars.increment_version }})" >> $GITHUB_ENV + + - name: Docker Login to ECR + uses: aws-actions/amazon-ecr-login@v2 + with: + mask-password: true + registry-type: public + env: + AWS_REGION: ${{ env.AWS_DEFAULT_REGION }} + + - name: packer init + run: packer init packer.pkr.hcl + + - name: packer validate + run: | + packer validate \ + -var "repository_uri=${{ env.repository_uri }}" \ + -var "tag=${{ env.next_tag }}" \ + packer.pkr.hcl + + - name: set tag + run: | + remote_repo="https://${{ vars.gh_username }}:${{ secrets.GH_TOKEN }}@${{ vars.gh_server }}/${{ github.repository }}.git" + git remote add repo ${remote_repo} + git config --global user.email "${{ vars.gh_email }}" + git config --global user.name "${{ vars.gh_username }}" + git tag -a ${{ env.next_tag }} -m "Release ${{ env.next_tag }}" + git push --tags -u repo + + - name: packer build + run: | + packer build \ + -var "repository_uri=${{ env.repository_uri }}" \ + -var "tag=${{ env.next_tag }}" \ + packer.pkr.hcl diff --git a/template-automation-lambda/.github/workflows/gh-token.yml b/template-automation-lambda/.github/workflows/gh-token.yml new file mode 100644 index 00000000..23564003 --- /dev/null +++ b/template-automation-lambda/.github/workflows/gh-token.yml @@ -0,0 +1,24 @@ +name: GitHub Token Refresh + +on: + schedule: + - cron: '*/5 * * * *' # Runs every 5 minutes + workflow_dispatch: # Allows manual triggering + +permissions: + contents: write + id-token: write + +jobs: + refresh-token: + name: Refresh GitHub Token + if: github.server_url != 'https://github.com' + uses: CSVD/centralized-actions/.github/workflows/upload-github-token.yml@main + with: + aws_region: 'us-gov-west-1' + secret_name: '/eks-cluster-deployment/github_token' # This matches the SECRET_NAME in app.py + github_app_id: ${{ vars.GH_APP_ID }} + github_app_installation_id: ${{ vars.GH_APP_INSTALLATION_ID }} + use_ecs_credentials: true + secrets: + github_app_pem_file: ${{ secrets.GH_APP_PEM_FILE }} diff --git a/template-automation-lambda/.github/workflows/github-client-integration-test.yml b/template-automation-lambda/.github/workflows/github-client-integration-test.yml new file mode 100644 index 00000000..da520fa1 --- /dev/null +++ b/template-automation-lambda/.github/workflows/github-client-integration-test.yml @@ -0,0 +1,40 @@ +name: GitHub Client Integration Tests + +on: + pull_request: + branches: [ main ] + workflow_dispatch: + +jobs: + integration-tests: + runs-on: ubuntu-latest + if: ${{ github.server_url == 'https://github.com' }} + permissions: + contents: read + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + cache: 'pip' + cache-dependency-path: template_automation/requirements.txt + + - name: Install dependencies + run: | + cd template_automation + python -m pip install --upgrade pip + pip install -r requirements.txt + + - name: Run integration tests + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + GITHUB_TOKEN_SECRET_NAME: /dev/secret/ssh/dont/tell + GITHUB_API: "https://api.github.com" # Can be overridden with vars if needed + GITHUB_ORG_NAME: ${{ github.repository_owner }} + run: | + cd template_automation + python -m pytest tests/ -v -m integration diff --git a/template-automation-lambda/.github/workflows/integration-tests.yml b/template-automation-lambda/.github/workflows/integration-tests.yml new file mode 100644 index 00000000..03b10fc7 --- /dev/null +++ b/template-automation-lambda/.github/workflows/integration-tests.yml @@ -0,0 +1,40 @@ +name: Integration Tests + +on: + pull_request: + branches: [ main ] + workflow_dispatch: + +jobs: + integration-tests: + runs-on: ubuntu-latest + if: ${{ github.server_url == 'https://github.com' }} + permissions: + contents: read + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.9' + cache: 'pip' + cache-dependency-path: template_automation/requirements.txt + + - name: Install dependencies + run: | + cd template_automation + python -m pip install --upgrade pip + pip install -r requirements.txt + + - name: Run integration tests + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + GITHUB_TOKEN_SECRET_NAME: /dev/secret/ssh/dont/tell + GITHUB_API: "https://api.github.com" # Can be overridden with vars if needed + GITHUB_ORG_NAME: ${{ github.repository_owner }} + run: | + cd template_automation + python -m pytest tests/ -v -m integration \ No newline at end of file diff --git a/template-automation-lambda/.gitignore b/template-automation-lambda/.gitignore new file mode 100644 index 00000000..0b5ddea8 --- /dev/null +++ b/template-automation-lambda/.gitignore @@ -0,0 +1,202 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class + +# Distribution / packaging +dist/ +build/ +*.egg-info/ + +# Virtual Environment +venv/ +env/ +.env/ + +# IDE +.idea/ +.vscode/ + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +coverage.xml +*.cover + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc +terraform_data_dirs \ No newline at end of file diff --git a/template-automation-lambda/.terraform.lock.hcl b/template-automation-lambda/.terraform.lock.hcl new file mode 100644 index 00000000..9111efd6 --- /dev/null +++ b/template-automation-lambda/.terraform.lock.hcl @@ -0,0 +1,24 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "5.96.0" + hashes = [ + "h1:pZojaCQ2mzdq9Jh8cuQg6al7rhxdf8NQTOK7mUrywwg=", + "zh:3f7e734abb9d647c851f5cb987837d7c073c9cbf1f520a031027d827f93d3b68", + "zh:5ca9400360a803a11cf432ca203be9f09da8fff9c96110a83c9029102b18c9d5", + "zh:5d421f475d467af182a527b7a61d50105dc63394316edf1c775ef736f84b941c", + "zh:68f2328e7f3e7666835d6815b39b46b08954a91204f82a6f648c928a0b09a744", + "zh:6a4170e7e2764df2968d1df65efebda55273dfc36dc6741207afb5e4b7e85448", + "zh:73f2a15bee21f7c92a071e2520216d0a40041aca52c0f6682e540da8ffcfada4", + "zh:9843d6973aedfd4cbaafd7110420d0c4c1d7ef4a2eeff508294c3adcc3613145", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:9d1abd6be717c42f2a6257ee227d3e9548c31f01c976ed7b32b2745a63659a67", + "zh:a70d642e323021d54a92f0daa81d096cb5067cb99ce116047a42eb1cb1d579a0", + "zh:b9a2b293208d5a0449275fae463319e0998c841e0bcd4014594a49ba54bb70d6", + "zh:ce0b0eb7ac24ff58c20efcb526c3f792a95be3617c795b45bbeea9f302903ae7", + "zh:dbbf98b3cd8003833c472bdb89321c17a9bbdc1b785e7e3d75f8af924ee5a0e4", + "zh:df86cf9311a4be8bb4a251196650653f97e01fbf5fe72deecc8f28a35a5352ae", + "zh:f92992881afd9339f3e539fcd90cfc1e9ed1356b5e760bbcc804314c3cd6837f", + ] +} diff --git a/template-automation-lambda/.tflog b/template-automation-lambda/.tflog new file mode 100644 index 00000000..3c658a5d --- /dev/null +++ b/template-automation-lambda/.tflog @@ -0,0 +1,246 @@ +2025-04-17T01:21:26.640-0700 [INFO] Terraform version: 1.10.5 +2025-04-17T01:21:26.640-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-04-17T01:21:26.640-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-04-17T01:21:26.640-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-04-17T01:21:26.640-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-04-17T01:21:26.640-0700 [INFO] Go runtime version: go1.23.3 +2025-04-17T01:21:26.640-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "version", "-json"} +2025-04-17T01:21:26.640-0700 [TRACE] Stdout is not a terminal +2025-04-17T01:21:26.640-0700 [TRACE] Stderr is not a terminal +2025-04-17T01:21:26.640-0700 [TRACE] Stdin is not a terminal +2025-04-17T01:21:26.640-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-04-17T01:21:26.640-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-04-17T01:21:26.640-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-04-17T01:21:26.641-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-04-17T01:21:26.641-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-04-17T01:21:26.641-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-17T01:21:26.641-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-04-17T01:21:26.641-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.641-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-04-17T01:21:26.642-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-04-17T01:21:26.643-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-04-17T01:21:26.643-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-04-17T01:21:26.643-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-04-17T01:21:26.643-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-04-17T01:21:26.643-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-04-17T01:21:26.643-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-04-17T01:21:26.646-0700 [INFO] Checkpoint disabled. Not running. +2025-04-17T01:21:26.647-0700 [INFO] CLI command args: []string{"version", "-json"} +2025-04-17T01:21:26.786-0700 [INFO] Terraform version: 1.10.5 +2025-04-17T01:21:26.786-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-04-17T01:21:26.786-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-04-17T01:21:26.786-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-04-17T01:21:26.786-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-04-17T01:21:26.786-0700 [INFO] Go runtime version: go1.23.3 +2025-04-17T01:21:26.786-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "providers", "schema", "-json", "-no-color"} +2025-04-17T01:21:26.786-0700 [TRACE] Stdout is not a terminal +2025-04-17T01:21:26.786-0700 [TRACE] Stderr is not a terminal +2025-04-17T01:21:26.786-0700 [TRACE] Stdin is not a terminal +2025-04-17T01:21:26.786-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-04-17T01:21:26.786-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-04-17T01:21:26.786-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-04-17T01:21:26.786-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-04-17T01:21:26.787-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-04-17T01:21:26.787-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-17T01:21:26.787-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-04-17T01:21:26.787-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-04-17T01:21:26.787-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-04-17T01:21:26.787-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-04-17T01:21:26.788-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-04-17T01:21:26.788-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-04-17T01:21:26.788-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-04-17T01:21:26.788-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-04-17T01:21:26.788-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-04-17T01:21:26.788-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-04-17T01:21:26.791-0700 [INFO] Checkpoint disabled. Not running. +2025-04-17T01:21:26.793-0700 [INFO] CLI command args: []string{"providers", "schema", "-json", "-no-color"} +2025-04-17T01:21:26.793-0700 [DEBUG] Using modified User-Agent: Terraform/1.10.5 HashiCorp-terraform-exec/0.21.0 +2025-04-17T01:21:26.793-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-17T01:21:26.794-0700 [TRACE] Meta.Backend: no config given or present on disk, so returning nil config +2025-04-17T01:21:26.794-0700 [TRACE] Meta.Backend: backend has not previously been initialized in this working directory +2025-04-17T01:21:26.794-0700 [TRACE] Meta.Backend: using default local state only (no backend configuration, and no existing initialized backend) +2025-04-17T01:21:26.794-0700 [TRACE] Meta.Backend: instantiated backend of type +2025-04-17T01:21:26.794-0700 [DEBUG] checking for provisioner in "." +2025-04-17T01:21:26.794-0700 [DEBUG] checking for provisioner in "/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5" +2025-04-17T01:21:26.794-0700 [DEBUG] checking for provisioner in "/Users/darnold/.terraform.d/plugins" +2025-04-17T01:21:26.794-0700 [DEBUG] checking for provisioner in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-04-17T01:21:26.794-0700 [TRACE] Meta.Backend: backend does not support operations, so wrapping it in a local backend +2025-04-17T01:21:26.794-0700 [TRACE] backend/local: requesting state manager for workspace "default" +2025-04-17T01:21:26.794-0700 [TRACE] backend/local: state manager for workspace "default" will: + - read initial snapshot from terraform.tfstate + - write new snapshots to terraform.tfstate + - create any backup at terraform.tfstate.backup +2025-04-17T01:21:26.794-0700 [TRACE] backend/local: requesting state lock for workspace "default" +2025-04-17T01:21:26.795-0700 [TRACE] backend/local: reading remote state for workspace "default" +2025-04-17T01:21:26.795-0700 [TRACE] statemgr.Filesystem: reading initial snapshot from terraform.tfstate +2025-04-17T01:21:26.795-0700 [TRACE] statemgr.Filesystem: snapshot file has nil snapshot, but that's okay +2025-04-17T01:21:26.795-0700 [TRACE] statemgr.Filesystem: read nil snapshot +2025-04-17T01:21:26.795-0700 [TRACE] backend/local: populating backendrun.LocalRun for current working directory +2025-04-17T01:21:26.807-0700 [TRACE] Config.VerifyDependencySelections: provider registry.terraform.io/hashicorp/aws has no lock file entry to satisfy "" +2025-04-29T09:32:02.115-0700 [INFO] Terraform version: 1.10.5 +2025-04-29T09:32:02.116-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-04-29T09:32:02.116-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-04-29T09:32:02.116-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-04-29T09:32:02.116-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-04-29T09:32:02.116-0700 [INFO] Go runtime version: go1.23.3 +2025-04-29T09:32:02.116-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "providers", "schema", "-json", "-no-color"} +2025-04-29T09:32:02.116-0700 [TRACE] Stdout is not a terminal +2025-04-29T09:32:02.116-0700 [TRACE] Stderr is not a terminal +2025-04-29T09:32:02.116-0700 [TRACE] Stdin is not a terminal +2025-04-29T09:32:02.116-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-04-29T09:32:02.116-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-04-29T09:32:02.116-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-04-29T09:32:02.116-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-04-29T09:32:02.116-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-04-29T09:32:02.116-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-29T09:32:02.116-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-04-29T09:32:02.116-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-04-29T09:32:02.117-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-04-29T09:32:02.117-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-04-29T09:32:02.117-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-04-29T09:32:02.118-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-04-29T09:32:02.118-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-04-29T09:32:02.118-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-04-29T09:32:02.118-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-04-29T09:32:02.118-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-04-29T09:32:02.120-0700 [INFO] Checkpoint disabled. Not running. +2025-04-29T09:32:02.122-0700 [INFO] CLI command args: []string{"providers", "schema", "-json", "-no-color"} +2025-04-29T09:32:02.122-0700 [DEBUG] Using modified User-Agent: Terraform/1.10.5 HashiCorp-terraform-exec/0.21.0 +2025-04-29T09:32:02.122-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-29T09:32:02.123-0700 [TRACE] Meta.Backend: BackendOpts.Config not set, so using settings loaded from backend.tf:2,3-16 +2025-04-29T09:32:02.123-0700 [TRACE] Meta.Backend: built configuration for "gcs" backend with hash value 3602047810 +2025-04-29T09:32:02.123-0700 [TRACE] Meta.Backend: backend has not previously been initialized in this working directory +2025-04-29T09:32:02.123-0700 [TRACE] Meta.Backend: moving from default local state only to "gcs" backend +2025-04-29T09:36:48.710-0700 [INFO] Terraform version: 1.10.5 +2025-04-29T09:36:48.711-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-04-29T09:36:48.711-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-04-29T09:36:48.711-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-04-29T09:36:48.711-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-04-29T09:36:48.711-0700 [INFO] Go runtime version: go1.23.3 +2025-04-29T09:36:48.711-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "version", "-json"} +2025-04-29T09:36:48.711-0700 [TRACE] Stdout is not a terminal +2025-04-29T09:36:48.711-0700 [TRACE] Stderr is not a terminal +2025-04-29T09:36:48.711-0700 [TRACE] Stdin is not a terminal +2025-04-29T09:36:48.711-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-04-29T09:36:48.711-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-04-29T09:36:48.711-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-04-29T09:36:48.711-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-04-29T09:36:48.711-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-04-29T09:36:48.711-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-04-29T09:36:48.711-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-04-29T09:36:48.711-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-04-29T09:36:48.711-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-04-29T09:36:48.712-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-04-29T09:36:48.712-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-04-29T09:36:48.712-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-04-29T09:36:48.712-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-04-29T09:36:48.712-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-04-29T09:36:48.712-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-04-29T09:36:48.712-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-04-29T09:36:48.715-0700 [INFO] Checkpoint disabled. Not running. +2025-04-29T09:36:48.716-0700 [INFO] CLI command args: []string{"version", "-json"} +2025-05-01T10:21:27.473-0700 [INFO] Terraform version: 1.10.5 +2025-05-01T10:21:27.474-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-05-01T10:21:27.474-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-05-01T10:21:27.474-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-05-01T10:21:27.474-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-05-01T10:21:27.474-0700 [INFO] Go runtime version: go1.23.3 +2025-05-01T10:21:27.474-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "version", "-json"} +2025-05-01T10:21:27.474-0700 [TRACE] Stdout is not a terminal +2025-05-01T10:21:27.474-0700 [TRACE] Stderr is not a terminal +2025-05-01T10:21:27.474-0700 [TRACE] Stdin is not a terminal +2025-05-01T10:21:27.474-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-05-01T10:21:27.474-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-05-01T10:21:27.475-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-05-01T10:21:27.477-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-05-01T10:21:27.477-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-05-01T10:21:27.477-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-05-01T10:21:27.477-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-05-01T10:21:27.477-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-05-01T10:21:27.477-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.478-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-05-01T10:21:27.479-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-05-01T10:21:27.479-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-05-01T10:21:27.479-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-05-01T10:21:27.480-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-05-01T10:21:27.481-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-05-01T10:21:27.481-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-05-01T10:21:27.481-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-05-01T10:21:27.483-0700 [INFO] Checkpoint disabled. Not running. +2025-05-01T10:21:27.484-0700 [INFO] CLI command args: []string{"version", "-json"} +2025-05-01T10:21:27.604-0700 [INFO] Terraform version: 1.10.5 +2025-05-01T10:21:27.604-0700 [DEBUG] using github.com/hashicorp/go-tfe v1.70.0 +2025-05-01T10:21:27.604-0700 [DEBUG] using github.com/hashicorp/hcl/v2 v2.23.0 +2025-05-01T10:21:27.604-0700 [DEBUG] using github.com/hashicorp/terraform-svchost v0.1.1 +2025-05-01T10:21:27.604-0700 [DEBUG] using github.com/zclconf/go-cty v1.16.2 +2025-05-01T10:21:27.604-0700 [INFO] Go runtime version: go1.23.3 +2025-05-01T10:21:27.604-0700 [INFO] CLI args: []string{"/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5/terraform", "providers", "schema", "-json", "-no-color"} +2025-05-01T10:21:27.604-0700 [TRACE] Stdout is not a terminal +2025-05-01T10:21:27.604-0700 [TRACE] Stderr is not a terminal +2025-05-01T10:21:27.604-0700 [TRACE] Stdin is not a terminal +2025-05-01T10:21:27.605-0700 [DEBUG] Attempting to open CLI config file: /Users/darnold/.terraformrc +2025-05-01T10:21:27.605-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraformrc +2025-05-01T10:21:27.605-0700 [INFO] Loading CLI configuration from /Users/darnold/.terraform.d/credentials.tfrc.json +2025-05-01T10:21:27.605-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins" +2025-05-01T10:21:27.605-0700 [DEBUG] checking for credentials in "/Users/darnold/.terraform.d/plugins/darwin_amd64" +2025-05-01T10:21:27.605-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-05-01T10:21:27.605-0700 [DEBUG] ignoring non-existing provider search directory terraform.d/plugins +2025-05-01T10:21:27.605-0700 [DEBUG] will search for provider plugins in /Users/darnold/.terraform.d/plugins +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/HappyPathway/openai/0.1.0/darwin_amd64/terraform-provider-openai" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] local provider path "/Users/darnold/.terraform.d/plugins/darnold/gigrack/0.1.0/darwin_amd64/terraform-provider-gigrack" contains invalid type "0.1.0"; ignoring +2025-05-01T10:21:27.605-0700 [WARN] Provider plugin search ignored symlink /Users/darnold/.terraform.d/plugins/darwin_amd64/terraform-provider-configstash: only the base directory /Users/darnold/.terraform.d/plugins may be a symlink +2025-05-01T10:21:27.606-0700 [TRACE] getproviders.SearchLocalDirectory: found hashicorp.com/edu/hashicups v0.3.1 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/hashicorp.com/edu/hashicups/0.3.1/darwin_amd64 +2025-05-01T10:21:27.606-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/darnold/gigrack v0.1.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/gigrack/0.1.0/darwin_amd64 +2025-05-01T10:21:27.606-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/3b6fef8/darwin_amd64" with invalid version "3b6fef8": invalid characters "b6fef8" +2025-05-01T10:21:27.606-0700 [WARN] ignoring local provider path "/Users/darnold/.terraform.d/plugins/registry.terraform.io/darnold/openai/70f25a4/darwin_amd64" with invalid version "70f25a4": invalid characters "f25a4" +2025-05-01T10:21:27.606-0700 [TRACE] getproviders.SearchLocalDirectory: found registry.terraform.io/happypathway/openai v5.0.0 for darwin_amd64 at /Users/darnold/.terraform.d/plugins/registry.terraform.io/happypathway/openai/5.0.0/darwin_amd64 +2025-05-01T10:21:27.606-0700 [DEBUG] ignoring non-existing provider search directory /Users/darnold/Library/Application Support/io.terraform/plugins +2025-05-01T10:21:27.606-0700 [DEBUG] ignoring non-existing provider search directory /Library/Application Support/io.terraform/plugins +2025-05-01T10:21:27.609-0700 [INFO] Checkpoint disabled. Not running. +2025-05-01T10:21:27.610-0700 [INFO] CLI command args: []string{"providers", "schema", "-json", "-no-color"} +2025-05-01T10:21:27.611-0700 [DEBUG] Using modified User-Agent: Terraform/1.10.5 HashiCorp-terraform-exec/0.21.0 +2025-05-01T10:21:27.611-0700 [DEBUG] Using modified User-Agent: HashiCorp Terraform/1.10.5 (+https://www.terraform.io) HashiCorp-terraform-exec/0.21.0 +2025-05-01T10:21:27.613-0700 [TRACE] Meta.Backend: no config given or present on disk, so returning nil config +2025-05-01T10:21:27.613-0700 [TRACE] Meta.Backend: backend has not previously been initialized in this working directory +2025-05-01T10:21:27.613-0700 [TRACE] Meta.Backend: using default local state only (no backend configuration, and no existing initialized backend) +2025-05-01T10:21:27.613-0700 [TRACE] Meta.Backend: instantiated backend of type +2025-05-01T10:21:27.613-0700 [TRACE] providercache.fillMetaCache: scanning directory .terraform/providers +2025-05-01T10:21:27.613-0700 [TRACE] getproviders.SearchLocalDirectory: failed to resolve symlinks for .terraform/providers: lstat .terraform: no such file or directory +2025-05-01T10:21:27.613-0700 [TRACE] providercache.fillMetaCache: error while scanning directory .terraform/providers: cannot search .terraform/providers: lstat .terraform/providers: no such file or directory +2025-05-01T10:21:27.613-0700 [DEBUG] checking for provisioner in "." +2025-05-01T10:21:27.613-0700 [DEBUG] checking for provisioner in "/usr/local/Cellar/tfenv/1.0.2/versions/1.10.5" +2025-05-01T10:21:27.613-0700 [DEBUG] checking for provisioner in "/Users/darnold/.terraform.d/plugins" +2025-05-01T10:21:27.613-0700 [DEBUG] checking for provisioner in "/Users/darnold/.terraform.d/plugins/darwin_amd64" diff --git a/template_automation/ROADMAP.md b/template_automation/ROADMAP.md new file mode 100644 index 00000000..f0a9a53c --- /dev/null +++ b/template_automation/ROADMAP.md @@ -0,0 +1 @@ +in terraform-aws-template-automation, can we setup IAM access rules for the lambda function? \ No newline at end of file diff --git a/template_automation/__init__.py b/template_automation/__init__.py new file mode 100644 index 00000000..8074dd03 --- /dev/null +++ b/template_automation/__init__.py @@ -0,0 +1 @@ +# Package initialization diff --git a/template_automation/__pycache__/__init__.cpython-311.pyc b/template_automation/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..753e6f6acec31bb4a60af44da4ea7545c713d338 GIT binary patch literal 211 zcmXv|K?=e!5KLMW5sGJ@;GyQ=p%>{By!e2ynzRO!rfj!}5AY2>L421VDD>8oDcU*A z&dkE>zLI3g0v=I*S%>)<%>@1z{XB>#Q|!n#{P1QN1>cX$p%4lP1ZhMlJUVgncx`Gp z>qY=p-tMwYl!k-_cT$j&cC`i4DeBs%rLHsrc?<==TtZ9U0U2KjDNSWa%58lg7JKJa XYjuvZaCUsGSq181&Uwcs!Z>7KqBJ<2 literal 0 HcmV?d00001 diff --git a/template_automation/__pycache__/app.cpython-311.pyc b/template_automation/__pycache__/app.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6eff2c6ef71cb56e428804f53c38a419529c0d0 GIT binary patch literal 41766 zcmeIb3ve9Qbs*aF19}ECzyKKhe?tN!KE#iI5+p$4Py9iANPGxFL-YU~a4>*-2BbI| z*j&f^meEtK5v@`}9IA}CWSfqu7kEp0p|_+By%lXJmlWT|w!+Ema+k5AJo}}#3X1Kl zR$I02opXD-r{@D|XxZ7VN(L9VyYKhhbI<*sbMCh-783)%^56a~SJ?{;^Ar4#e`(>D zpRa2f=2eDcrWj7cY2BJB4Si~-wDhT)($S}WN)Jz++pv@}m9msNmAb@Eu^K6kamq-; zn5Ind)VtG`%v0ti%ajHD4P1&leJNuqW63&YrQfOU%%!ZUEc(v6vzKgBHVva?oVvvv z88~>)mESqzJ=3=U#_z(v^b3d1lfRw$-_n3T{7b*70-BoQ&!t8CZx+fS)lbN?NdArv zADbU%j`siD=~+H+dQQz^u?+iWiTtL1a_L+~G!{2Y)&BDPx8UWw@(06Nuj%k(s!Ypt zGhF6Z7%t0c{ua1>m%39wJ6FbKJNIxl_|I{saJkM@F3)LN+$$$DZ{qU5VwfuD3ckWj zRd9t6uE@D(Ue6VOML)HVD{=0J@BQ#y3g4BSc9f~MJ8nUBwq6{y_dAxZa1Q(Ys%OsU zTK3rImw7wyTv_(Ie9QbzyJOY2yyTF4_~j-0sFT0$nseIQ9X^M9dBN_y?)3P)^(NEU zRhQSkw9Ku#o%R)e`MQg9dhL$5TzeuH;m=(%OjylY{V52M88c7h+2NrD~%tgO1-0Noc>on9Z5 zYuWB~`s}MKCM;o8sczPreugzTSgi}`J6vX)Vpb4mD}ZUdX}p>8fnSt^*RBXs1?UFF?w*OkNdfJ={)Qr*h(vfE!EW37J0F?S78^wx(% zhD;YaM|!#@XGcf-LwX;->I@m?9d564PHqDD)Q3NQ{t~>r$~YP5X_+bQIP{@YI_RT_bfy%9lV=zRZ^ZDNiA!^7pieeC(;yY| zyfzYMUdvgawOP1yXl)sx>@aDCn@tbWoWGquWKzV&2kpn{rcE%86tNa%I$lxevcBG5 zgtbEB#FH4*c^b6CIo?HyDrAP{um~uh#ftS?ee-ovi>-5TOD>PsUq0q^EJK2@;a}%dg$*#AF1jjsTY%YAg@AOzzvzzDlkUSj z!oL6$Jpiof1Nu#6xRXQSGdh(HX6DQ#2)AZfO9>bPn(G?=Sir#PuhF>t0bgn)ZXm^{ zN-+lN+<&V=!Bu0L?qg!-=~fSNZHW z*86Y4y2b3IFh$jO>qx`g(Fqhx%-AD^&6l|7~ zj0P}IvP(8|x3eorcI6wrTQ9uZA=LJWwLS0|wB^6r_j2E6nqaFHZMB4&Csl{nJ2z?s z+rCc#u-qaDv}4d&O*@Qa8BITj62Q=t9%5%{mCVkDOtZ66ZvgM++1W3wI%xk7U#YXR z-16M)ERR)1sVJoL`uI@B?5xA%S@zMv& z;CnS%Hrc9#7w$N#p`YXr2>AnI{=lvN^>Xk5BY^*&{hK+Y zp_L5z$evYU&#Jg*^;X|{{zmyLB}BgpM$oSUB65m1E|TnhxB7SVc?Q!aclYp??(T`# zJv$71?rKTEBAlTVmhFIR|U0L?Ppo9pJe~2;K+r16N zf0gmc?(j_9k8zp+^ZN{^4QPI!c}*YNI|Q`+5U2YJ6VN8te?|9aYlbB9*QQo5bF4VPhR{$%z(~k4krPR==glmE_vyE>5(}1Qln!*-<}ci4d|jF zSWEF`s(fSmSB||#OT=3ODV&jG|G8?&=1hKw2=OD)cdMhcKuW^0>(@0fP5kWm=74_D zC*snPMdbOnPj4dT)h zU@$caXfpz-i6BwKy{Rfsq-30x%Z#RnO%<)E!evFn%}-(}H&wHONZ6QG!X^QK_AZpm zCf7@k;fhn0bp!H;N zny3k1Ot*wh>^0rL(!O*ECQ>?>!CGKFa&?(^`RUN*x2*84>oBFM^DSRxfaq~_aa@v9EOf*pPEkN^lEN- zX{E()hgI^dW5qR#xT-(xUWTW)r9Kj35Mp{~z=8<{qsgDrsO`UO?R=2=pmj&b6clc1 zzMcAw)ITu3ZX^dsi5<pHW|5Z=W6;86RwK1!TIJ7P#YxG-MWC|%8H&f1W$q?~G{fWom#THXn=8%`D&Fz|_HPW!?UH15k zlqkKULxc7ziOzgZt{S43w$lX)Og~_`?cl%)gCZqgof%32)Gt6~=_!TofpUfPm~P0x ztuC#2`MuymC%JqhzMaJYX;ML8;=;j17ZQf>{4w;=`(SAk(k(cBArl1Recl@`-_?+@ z^Z7Zaw7N>Y0aH|<`)U(^1QR@p7RKd|qjdtTklE{)ch1T)2Nv--i&H6!3_i?FA)Cl2 z1y}<+AP`%cB%@8~EM!*G7C#Lk&cVO;+fb*sn2+?Pf6@2lK4LwvaRDqan{(1r(ASFk zTB5IoDG{3ib3((QW=9*Lm|)NFk4k=6@?P2dWfCEMOzH9$)V^*0h)w@$+KXw#*8hh7 zo@wj!Lk+3w6{>p0s$OsdBd`M^J3!b0Oy9i2=nV%SA8EPodv{DY(kmY6CE1g|d*qd- z4epJ0lHDj|H;UPf@bcmCC>fg&h9|}0$=mGXwDjB0|910B%^OFy)5=I%Sun3`)39mV zG`#X9Qr#rvHQhe*D2bMkmKhB`?+@WsP!H!m+ zSrDwK{r12&2L52^^`Z5F$F@S8{1f}!JKWog4@=1GbEL%~v^c~T2RMQeY@BH0h-^N2 zoM(srC9@!6!P;QGW4!}vIaHJ zdb($} zYQX8CO@r?_I(#oQ^%UqHR@C&^bnn>=@ID7|l^;&0uoM3IrY+ftcJ!dANoH~$Gp@iM zyUs@^loA|^2m$?FXE^CqFjMAMAj@x|9gO=}Wv@^Bf6OT_6SJ8ubd-L)=~%GJ6{uKLf&b=Nve zD%Z?ia!vh=Tz931q;k#LC0F({a^02IlFBuEmt2jXk?XFlZ(Yw1B|%TN*gn9>>;j1) z0?C>tiIj8b0zR(4h?qW&uFB55AS9I^+_+X{8F!?GJCBZzX}{%2D~KGhj*fK@NEqem!yB&sE`(fM(C;jNA{Nx)k5w3Gl#)k)Q39PZXN zo@BNoYN8?6FrNRat|jh380!y`C`oN>TDw55YI9J&y4SR8#u&;oJ_F^Ml7Or}V2YtU z(`QhgD!f8@MM>n;5Q_s(XH=}jn=Xw2pR)OwXddF(n?x!Hq2`{RN-Jy~RVc*Iz}C@` zwX`HsKIFqq_V6zd!?lqJUf2{WTTA<5nunF1ku9Nso^D`8L-BG96gD|8^^-gNtWZRi zEODAge0ZV|{>7Fo!t0W#jI`IxTQet-%h7;&6OTCJe$l&9iE{aJ7?TkU?!G=)*YU}0 z)E|qb(Ou^*s?Xv1pv>;8SZTK93)Y1@lWFfV5;?>#tA$GilR1~nwI2ut}U*0%3koq{jm5S*ZwQS z>G&1mbp8r)x?IO%Rl_%dZNqoWleRj zzpoqk0+C}2PCfW#IDd-aY6M5*q2wd%jZV*X7r*RTf+Le?HpF^=S#xu-rp*{=ZD?LhWa`OXGc5RM>@x52U`a^L+FE{;(ap0afU{^rJ$MNpxq-w z!Aja6gq4^cONT7(C%xtiti#Eid;)+}1jq2)m93R<`|%;O~8U(%^U zn^GwI+*Rk?wI(~?g#o)UpjBd*r01IBBmHo~gf74?sdC8)hgs$q9G>vuAp}HDOANTc zFR!k^cg|Qmn<_XLj5^)GdE?cmj!xva`s!VphW+G67GSYyRe} z*X#C|g;SCNj3%@ud$pc-LZTr9g^r&`>@6Vns$wGosrg*MWa|g;yeg>ud&ht?R#i@V zjvVF$z3%ee1bXwQHc)=UhQW}fHR_-?waw|^p;7z{kHe~U(#c$kRDe(_a@+vU)`pY5>1~`%7xZAEqAJj@Y z@Z}*RE0h%2?^KC9ymsgFE7ZsJ3QVm0!(A>OpmKV+m1P$+V~ITy;vH!`Qfv3|HxVxm zH#Aib9D`nWMnb~Tr;_r@3giRl)QBlPvbFikT2*lp6y9GULj`g1=o|Ww@JXwxa{MI; zlbNUI{ZP(KC1u5_REY$vwc}|px^y8UHSm(o zIj;aa@b1kA8eVMk!M6B}%_kB?X+E?upt}rbY-6BA9~B@{#-|WH#)w55R;8g~)$8Q_ zmfM9R@XheSw}1tm>L|XSLF+79tzbp*xA2|d;|n6 zL{?HseuN)Ew;{BK(L#=uklyVAj*)TvK9ANIT8-#q0JiC6;CI0;l&?h>qtbx_|25?T zp9N-Z9(n!vJs7}r0eP@HF&>C17y-;#IyDNJX$LC9MrkEOuI(^eZ-|v0LM#rRIJe+$ zV$zh=g@s{q7Flm#_TlycUyBq%`jQ7`ehR~{At*-d)RYMBS%d{9Y+f&Ndq!Aa=v$co zMNNYIixA8M|K5kd3!vhEIVk84iTXoCf9SEy5;G*|heiD`(GTyiOkVjnue`oUavOuW zd*!!^mUs95X$`694@TaKYTwenX(B~U!IH+e+TQFXB`v|yLvLMqbCHyu4dym}vlr3; zvy*AeOxa<;y4CYb8)Hb5*l^CjrGL|O|K!8-q;W`S913XBrI>zgC5Pm#uUabE|qb%7DsZjtRKY(+w`(5E zl9|h-X;x^O6`N*Bz5|S4z9O2h5c8Ff^7cJ0DZ4Xu*DRJC4dxcn4^nkfENOaNv=7*9 z^3y+N!1@JRx6c8yjhuFyU~U)9?Zn&;V9AcB1#`1#ZYJjDj|z+L)NEc73lEXJDKPIG zdb^2qT@+7ENZ+^5?P&K}dNo07*?I>41&b;-PTV;iv=s+SYDxW>`v$Pk6rTl1g2kl( zOmP+fQ=A3B6lblo08D1yt7$K%k>aVXD)CSkJi$E7f4@w66Rgvsb(&bGgV{x5cIDl} z+u8LbyI$c(>K1J0MB6!HI~Odg{QBD0)_(6xcfPcK?s49p&8jy#guH`d-a+V#bNV%J z&E2p1-qM>(4;s z{tIIM3nc#qc=4`WEspW7xNmn4sYijBYDS^(ni6JrAhxd3;0Z>^pBD3{2{l-h!IuX~dDqtSZ?3@uj9}{)ZQaDy{mCbfvx|XY3rIk1Kepz+ zn*MS+DLePB?m_DhQvNgrUcm^~9?{xE;!K!l3sOOztrW&qe0%7Vk23OsH_Vt7v9O`_ zzt~R-dfzR7;P}D*KivBE!LhU1ub9%^BK{6hM3O; z&Ba7EO8AbuJ&$V7Zr7d_YTLxxHd4?5Mqx4wnlmYcX#foZn9i7TU(32vNQ~uzv0UkX z)Sbz}|7Cqii)cUd8|X_;i6y5$UtjWA)t8(kd1$cv$k?^}xa%avM}Ez{M$lgMZOb<- ze~|Hd#(Iy^gos`II8iuEe{jWDYDnGnkepo0!DwZFW=I40Z&f*J)nGa5N9~p0C zK5Wpy$A^uk3-!7Wn`$mp>;9s`04{$~Z2*_QsMq8BVbf%W?k}3#ER!bPUz!Z?@t3DN zO>?Qte@SgSeMQgwSf_!HAL~tXTHTM+Ypyiwetg1!F3kpT`H5B!?>|X3Im>lF$!xPY zOLZTW8sG!?!|7fem6_vFpFz}OQei?(f&Hmxk@iZ!tfgA$MVN!4mym0)=!{5ct(5s7 zSNRuV&D;d;`}iNH<6=ailVU$JNQ^|$8`?>+KgCF5K^%1n6?RlEm18;fo-tYp!kP{a zw^fBmyujv6C_9%(CT&xZwo)YmA}9A^m>`m3{xbU%MH(n+mH419x~8J>l?X=863|~~ z*0ucq70`1Q6|-S7tN`i0NCYNe`0U)0!e&NvI#6SZ0uNwVQ>3n9^GiyC&gw%c;qWh3 z3OW&fe7Iay!uxO~9R5Wgr-B6Dxg;nhGpY_#eob|l6-c=btMnxD&yK|{$RlP&DshyF zs?nIX!`Z^@T<3lJWGpaD)NyG_;$y$3QDrnlbPFsMSm`1e?8e@igR*>FZhRjWU_q8F z52Wbp*Ng!p#Z6Tb>E#EEkX}K8^il#UhTu7^h8bmEYWN4nfvW9CP{S2c926yh8E_Co zySOtG{zcO+Uz`MX;#f}uDUgGlAUrn{Ls;_6Ev_z-Xlx6m%wL5O|3yU^5HO?ltR=(D zSL#!zZ;GZqsQ5C?yKKhI>T}9z3(%d2iVYoffki?8_iR0{z z>P>!4BZBW}1T3yR8mcdAfCUgm%fdgv`k%#MJu##eRJwfM({Ufq!kYx%_D9iIOnpYw zCnfCLPs!zg@zlgs#`@7+&-vR14@Dx`%(Unvc;kVa5;P8O58@}G3hetDTfp+b*4_ z!$!0o&tyVb5NWofv#WKye+Z$3TmYy~^Rf;kXH>lqkYG2Xd96bO}oDk_6Nko+7}$EFKQ?*8meB zb_sH?48g^-64;i(LzyYx)#E#N}_Hh1M=?#xL@o?MWRvk9hf@@e{^P2M}NREcg zQs_|#>sj#c8BtERC6GYG6g_Gcfn5K-Rv#+zpohtDzzAA#yiOU*a8`ygqj;`X9EJk) zQvnDrj(WgpD?MlQXN7&KgsB7xkGtJ|yL9|pN%z4k?cjx*{4|&Z5RY?Kf0l%3#5QQY z!cdj|tV*etRe?dTirGQW?q_L&AtRp1ORwey5F}l_qWW?>&$D0u85zcB)@RI-jeBLqn{ud3y9r0sK@`=`UB88zpfapx*_llwuh%wd(Yj zjeFz+Y<0=8szIR)klEe{Zcz}I7FhDMSJA@=2vbmvN86~|3lFzJLzWw|f^*{MeS`j^ z;rI$IaLaJSj!A_*S4Q>}$pHnAbHhnV#E41I<4J!`z^!f(UILjhLE68w>Wf?o!!dXMTn$8crYw|qF1Itq*!(AYw0QlpcW zgx4|34Ycs+CX_PgcEJ%-$Qs*Pct3jUk>I3LHarso^;ydfP@ogQ_&CFrRPgAb5?d-l9<4$=BYX#VNmb`Hr|yEV@+rv(`qQHRG|``it%eNSOH04K{L=ER?x4jcTI|F! z{>`@6d%r#SjX`p_`(cq#F)mh&-|A9Mc`7%q6Kf?HL0={6tBAfz@#+!uy`sLC=zAgI z^z4@|k;0m-jt4ek=@KkmqNVFr*N(o$kd7N5RMxp|Yns^Fz$iN?!R&%Z*)`kQHCufo zyGF=9D`uYsv1K{J)VoI>vOlu?(DGi!`x!#}gxC%u%2QyJos=hO1sfA2ZJ&?^d+-&T zm&M{EAWp$8u}=^WaHW!MAoQNyV^ag#7bj{&|vrK3G<89$F45dY%w3POib=^)A?ui! zb&QygeezM7ZHF-#;80Drlx@b2yRaA70Y;H5r(XiwDYBh}?NkCby`jH*{yVAnQpKth zB&P|Cz@8G>Q-nPQ4ceT&esFzJNZTW(?Lo=)76sbVTN*gREr9cf$a&fmHlN5w$>8i= z=oWr3>{vpo*3ZDPm1bZZ~>j-_S{6nS`P8+kA zrd4aIORPr<7*jxCkkiMn!IF+^*hNDQqu!-VpiwGL1Wv6#O05@aQQ0$0yt{`xiP0;G zXNqm)V$|Y3Q|z?QmACnG#kPE|*y*vcxr{rhAm*#0xTn&Lb(ebAahdnBqU!@zP;s>l zJCn)glpRl<9O#Dx#;OeUdYsFNT|0xa0$XGmxu)lGVed6IR$g2z2&al=NE*)kT`*#J z=9Z8wW)ke3`IH?R5Uuz>K;4qzb#X%@y~JL$MZAdrkRLHbKn>q)0n3@KTj zmQo-ku|BZqwL9oIOSar4yVPFlZ>;u86JT1kTOLyqHoz`gWsF1wM?p(ivL!6T{nq%F zm~?zQK#(Ov0Hekp_*O*r;d?K9+amt2#fVwM4qLL2mVXu ztrl;I6_`zwU(zYp_zp&Li%mH~pNaC|4sf;Z0uaSbwiImff9%whG<6({?>*qOf>NSqe=J2No$B%U zR0F16X2mC>=QVKRd@bpGPR8exblj$7AxhL*LAl8R?v%TDsd&w@RI-+?!E=HWs@Yfu z2;3_B|B?0t=LzYH%GeoQ@-=JHu%8ZClRObfFrTHp+Mcz{K<46J8M4iC`Az){#GJUV zWhD(^b0F(ih0t>o+Wxr+959l&0lyiWs7QO?$1y_}PbM=xtM4OjzNe&CkbR4K%Wz!yDwtYs%%3vGdH=wI98``17=*MY|; zuC`$Cv^M1U5C0ac-FNn(pvtjOj5vDKT(AE&b0kLp=Ws^)f=sF&d8_g=W~e5m(`A z0uCo-e~DA64qMAfnl8oyIZ!|2>e`0y3zS;o)xqe#_$RC`Z8R2vZf1#$y<_bx~G zz5yFId+)gzrFi*Cm(3AhyGh5rvP*5{tM>+y*)RW`I|Fn7*xPda&Op+2$kG1Q89(B{ zuJ)`S{MheMUMk(1xdr{4iqdR{{a0# z{!8$!?ApM$2fh!f{Fh&gxj_Kd2kfRwwUrzeMk`fo3l!X2iH3ZwFliXS@HyHNze`&x zRJWynkbO4qKeeXVITq)Gwfkyh?e5a76-D;icp*^8PjJ@*g}_NY#@z@Ma@Qka!L65j zKH81D5#`^syD#^AB;;Cg(v)~JP@HJQ3KR#5I6qzl=lLqYe2ja6!h8twe<6~O946`9 z0@3jkmPfczU1>bM%yssDU>~u3PLZ{Nxg#ADCF3y@WvmwmZlYy7O8`% z;0N|$ye1LiDbQiM_=6Y<7VTBQY6&S}Wg(+2rI_fOW8>nh5GzMd;wI0yWB|hsjsv}2 zc>ipCgGpEuh%R7Hl~5eMOgQXjU%rB+lBxyRx#1>IRm~v6n(XbAF?iX7Y`1WuD7|18 zS&`$=AE8-PGa{D~6t&smx?pTnQp3f2faJ@IqsoW(^3I0!s_d2mz1uhH249*)DBW?x ztHgD2{1;$M`&2XSFPMqD-fq2AXv!N4*%Tdj zk`kubuAcr*e;VKpE*P%IYkyQa?i?8Z3Ru;tk_wqQbl5^Gq6kk3F)7vRpgDEcUUD ziY6AyRkw^t&*DEUr7R^F$sloPnFqWGK$m+!pcYj_17`4=8T$p~^_Q5=W1)%aQ~aef z_PC39BWmE9{I(frp>t~8vfjvDvf8@`YvA04u1W3XW0^})*D^YHM-cgK0G`98#`rxi%yu28SW`aUc zU1M-{7&~L9Jz%B31R`@BC?`{rh*YloCKezfAOHcSJms)}eITWF=;K_TRfGs`Q(gf^ zd?{2UKS_4L-!@|(z2;h3QDAp~h`@rRva`ywjJGL+7riMNlnBxm6IRS8S9f*D6z<4o zLGYsy@%l@dC~&3JCjiE1bQlcm=o1zr!6a6FF1XPzq?!8J{|7&R%7Mnlrp88p?NcM- z1j^-IRzydj0gYj@oNIpG33|BZFmD#+GnPFd1F?wI)fH75Ec4dSL~fgd z-UH-}qHqI{7eP- zVpH>{h67SS+{;vdtlYg(86BX&@Z=c8)G$Vx9SBAO&JLPK)4a>8AX)~)=gKk&i8$ci zdY|-R34cgo{dL%4rE8h(bK#uu3Pm?e{-U)h(rlnVRh0?CW1Eztaiq2v?!c|)G-IFyN>gdd zpyx#Lq^qcy;n;tZE~BE7K^y@|dTC!DZqLt#D``~46kXI%xkTUNMXECJAR1$X%IG}a8aQz~aHot_3*V0=%;f~UFyxCKa zmxSV+EWBcwU-iu4vd4~XA1DO-Z)r?#vb%nvf!Pj~sKXua>La04=))X7A0H74jHq;! z?h-|YTs~SBP!h*OK3IB#x^!1MZrxGEg(2hY?3^2>3vi1%o&r-f%Mty`p>(<1^wM>^ zB-Te^qQZixK#@xHg|dOX=mcXnGCSe_7`^X<71DvSSh&92Hy1K0gK;QB9@OjU*v5xa z!cPNzhSGYLR+f36Bw@&c_G0PkQ%ls49YhThnR(uI+nbl9Ivztg6n#rFmQFR%d@iT3uVe< zi!!C>e+$!qi-RRK?X&a}+f?|(C2vFkGJ<;k|Aq+sBaB7oVK|8jnZvDpmLkiD_AE|_ z09Um0m2qWJ@|VWZurDGglpg*FV=t73uF!07JsnD0;uVlX+9jykut&-YkFC~Q0s!F_ z#I8R{)n>Zev2=xV%!W6+`2PXX7NA{t|6kzzzQz2qiODM1$Rm3XZS@G5En;TNtzJdg z^5l9iv7VHSpl=fOO+??M_zs}&0GKz%?oHh{yz3=(eL`KISO-_vqO+hM6!n8dKL}EJ zd3zq0+V3oW-SagMscU_3QYak|O9!aJ#&4&8Bb_vKK3Eg%(9SZc01)sTKU~$B<&y=8e~Ms z8WpofiFx!%X$9HWES9zqwn&jeUBZl(z`Wyo+fRCDK3rHNXRZlnu8C)^Nge|05m^sm zJs>v+n!~8D>mbHLU7c!mosaE@iM&9u;?O7k3H8=fvW3ge_2_x-hB>3|0j> zH~Hb*JZWAKnis_81<6BT7e#iFu#2jK_Fyc~dx}P2dquXFu)UA7%YLWjm6qFVFyAib z*NFLtZ(EdnIx$8km@U96J_WW*WV;C46%Mk)FgbnN4U_bg?E5FgQ^RTpGVz=^u@vR9 zy7mhOM3;KBYS)h$#@Md~53s=xA6PV(#VJ-HNDj&qgoF?~#mP?iJ0w#N4a+E~4)u7<$EU-iILS zI=?_#t_m$z#g?m*mtejony(RRK6$*mj>zGSDN-;D2Gp^(zL)ZTiu9$3HY;+P0~l}s z49G5PzJL7fG zBje%sVxM4L6|Jj;8cD5Z3golHL~KmL{IZ#pjBOTz1?GcxGJO8MuJ^m7Pr*7aTE~fX zTnTv|L!Jloj_*P1_iwy?Lp(DiISAGf(K_%0wLT;m&+qkY%s&fE~ zJ^t*kBG>$6+W3;WJ&m;j7N z5s2$(E?s?eiQm4&3zvN2C7ocX_z}yE2w5I-LrXGC~0_9+_GKVA{3t$ zi_fkb6>NnlBJ)ZCD{x4y#dI`*fX6gCi*hMliDU3~tV__eq z0V$D^=)#{A?^SEOKp;`m#r-_KcOHF$I*-7 zf%qaFRWE8E%t)@1O(vGa3BTF}-p^^r!+|9a9D*n45IlkI@V=up8#xK<_1ot9U~zS@ z_@Gz}%HR*|q%xL zwiT3Kz9`}5MLLOM#wB#{3(#!t+q??uqCeFX~yML{E&=S1@yi8FFl zap9`UE08AUWZ$X#GeT~knA>+d?QuZ~sBW$}df!0`&I$!*#e%c9Bg$8Xumuc(c>roJ z@F}pvB0EgjVP#%6fI$Yp+`lY61$Izm2MIf<1nIyaptQbY3)&Gr6}1vO`b?NB74Cgh zShrnR7qspZt%n}x*nZfJD?bKx26hy}<@|u%BcWJj$%w&Xnih)AJ;=PRLvkGZ${R zDXr+~>QR5nQ ziJC9bPX9OCg~ZUz4m6&UjfG8@SlYOw(`K1&r|g&*a{(!?-A=0|X|*sYTA>Y?EQi*w zZth+8tb6X}ZXMdX`sQg;eFn^fu7_91`N{X)WZEGPULhT5f(1nz=k645&Wi5rhAv= zFiMJ-F~!SZ5R&QlJ<^9@ofWOKgc_V0(+S3PoHJYp^Wo%-I5|hS`43mTWYQ;0`ou|} zUgvF{`H3ra%5OIGAte$mYfB4RAfg9I|}$NDwQTxFv;34+V(>~XoXd+WEoAU7Ra0f z*PIx@QMNE2s%U&Oq2s;%@9&qsl;o9wJs7YD%v-+ut>3%x<_+<9x8$I# z$AhwR9?;UG#2dnRLtwUUK04mFeY{UNJ|G?+kld8vAySIX)Hkw0V^HqBTvFC3*ba)e zgE0kyq8YJ#aTXoGEKo#v@$#_e_3oZ?rC&VE{5jKlWaudKqazyl_|Z|*NTKdWXWDW{ za&+4{2Kd-6JOLj6A-k0wD`vzZ3?&wuE|%-W{cVRYmg*iA7{KXKsR5iGmAA3*@t3u& z#m|*9|IyfLy;R8jrveRp{HMa!!7FI)Q2aRkHz{eynZP!N*ThPIEfuueBKVuXU#RV%=X`+6?n~y1&W7uz!NhS0i6D}ysZGj{@3O<{fd^^(O|+m zS`%NX+cCH0@_TiEx7Pq4e^*%n9zQp?7hkVt{=N!B{e89RX1eb054YvsH0u7l(EuO+ zJ-riy{m;Xu);6u~e>S%@wYE0ve$aYcgWo@BZ8qSuS~oa2SY5?8;1(*~R;TLnQelI3 z0giY0zrtPB6xf2r4?OJUf$@*wQNcL>duZJR3$81=$vN=iJ3a;7P%U~W8~0Pg+t2b| zuC&{X`_7?^uxdud3w13~S5(ocxT0>8P#WB?=$dP%`?eu{&tTV3$Oyak{Qx5?L>&fJ&(d|XFsQx_MxuyHCJRVEX zJFVzm?WgSNB{<)5J6m`bs1*$3MV5;lokpYC(WYqh(4m24Wi+N+M*IshX}9QKkV(Hq z|5Psf3A3wt!kBN-zaW!wi~a?feI${2!nBb@CdeEh(I(ynTCv7v;_-VEm$mLD!QE~8Vfg02*wK0SaB-_V`gE@Op;Y8FjXQ`^@usR%^cjS ze6vnq&Wg-g!kkrysu7q2B6Hvob7Y%2vUTyz8G&gRnRddo2bs)pN)WSNWa=L=$G4f| z_fy}t2uzpAbP=ZOi6!Husjpss@$yFh-Ti{)plCVx$Z~w!a$K;S6fGz3U);8I+%oOx z%QVo_#9JHHAEWCpB3^Bp3QZR*2GHJsfe4?Qho$Ehs8Pf32kp|6OkEMDu1W6+?0(In uR&xQ;ih4{K`ZSvK4M+l?o8hPQ6=hFpyYw2)lE literal 0 HcmV?d00001 diff --git a/template_automation/__pycache__/github_provider.cpython-311.pyc b/template_automation/__pycache__/github_provider.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8b26a8a789c734957f7a781a2131dc018881d57 GIT binary patch literal 33390 zcmchAYj9iFdEmwSO@IVIf^YC8z8?}PiXthAZ|Xsb5=n^?^`IbLPy$7Q)CFi;2%tD= zW|sAO8^(3ltZPr;tm80_w_!8Mmd!dFx^)`4bvpO5*Aa>dTkY<6X49FSg~rLO#=rLa z&IK-TLC~htcEQ8*xc8jzyub6E^L^jB-!Yl=6#SlE8oBfP0gC#c7)bn7anC1jgXa~B zrlu%bM$6}AQ!?Vpr{u&{Oeu(~oKg~3HKhhuF|S$BPH7i(Q@REHlzzc5Wmqsy8D#`d?#-&ef`- z$^p-eKdKz@`211r=o04XnvW`az4xQ4i(Ux*8mm^TjOL7qg*_iHYwELcx8Lva-u6We zL!Nne{~~1N^+#=I-OO$GnET$c+viV2nGsq;uubv6C+m>Uzk_acQ_y`BcHNXhN8uk` zPpjNYS`Ggi_}9X}4*vCS)vR%anw8OpubH4rjquC_|5GFqDI$}XZn`Q=bm0>=A$bA;$62l zYFuP)yS$zNNgK_9hzlP7G=R9X$V_`(3+|{@`pmuH^2|r|&$t=S?EPtCMomtr5E)t{J95Gk1UmR8&)5E-XGD+wcvb=~!& zl~Txn7ga&|pHQ#MLmF@CKGLgqrPn0V2dO!+W^=kk)?tF_6QP6~)X-X5N9$?BYr9Gm zRQsxEBW;>DgtS3z*qBI@>ISuQVvEd)wcb@W(>O)Le{ELa&?&FS?{Uq00wRVLar3ht z{~bWu_m(}3n|Au*D6gqj&16HKOb%MOGB7r@>}chJRjY_9Z{P7vqrE&GHQWX;%eSVN zT>j}_AA7@M zJw6E(B<5;ka;`RxxE5~FE?6Z@@DOHPVjik{XJ*_>{(v#QAT&&Z)J$j$t#rS>L9Z|(R z%zpL!;_cgzT(;z``dJs_CCioG z{T!AM!26=Pr@TJ*%rcW$uqK5_wfd;i=boSa>`smv5ZO!%Su=G|t*CB#nzZ`#v=7I& zGx2-=(}iXzm#Z0;W5$IY-Tdq#bJw>7OS7*TGF_Z+J#wTOx}tfWG;E_kjvpIwS#N}f zb2Ba=8vuZSHxbdr@g^oCqbt5hH!a;~oRE7UZO?CJB=Z*7z= z>-RM;Y947F+ge8?zbcZGhj!_~h_f}4R~9o-y8I`UMyLM`S~0YKYoQ!YKCha~tL5`* zSClUrV_M3X{m4+bZ75`mx;R5OZ|IIuDqa206kCJu>EG8s9Q#Z2mU+jR^Yt??oM9bP zobf7eyviD{Mhfb~gKR-Pn3XdtXFg8EIKdfvcw-N1?1>cAg%7g@bzoM`Jk}atH1gV_ zN7{;QZAGNGCX(A8DQb!ow8wO+DkvUUPatE%Z_vUb7-AO6QV^r$y7RJ#v0&vK{zZ(| zN5;x+V`X@fuR6k4o#Tw7ym6FGn3xKq3yD5~=AxqN1viXyI;y}WNX@wY%ZztMMC1^| z5dZ%~3yAv_K*eEru=_{Lq^QU@3dm7hP?%%1JT?4`jA`2wk|GUK>7Fsu5GkXg6#{DZ z0?8Xk(vp1t1jBv<{)s zdOx;N{5L121*n=NRV1wwlwsGh;&pzpd|ywiy$mQ!IR*6+m;X6&LBvITR_>vKiZ^9{ zAPXsj%CNXxB}$I2bm5j!LFJsdm@zg;m2vTwrt-=>)Hqc^`EkXF|K`LcExCwQQmZm% zGNh8!T3px?Dbb}3{?cSPsM^(*Y2*QrC1t7cY2#ba*WVNS8}iK!s^)O1BfnMISF%#W zbGZJIUr4==(q#qJIHshfg0@S^A*B^JGCvQC8?i**=t^6bc}XXMx}V+%Y4%YA^ByBu zBdt@WT6*t6Pm5y&h(M$m$gh%O+My@J`2ce%bMIO*w@9Yk=9D z;tKVwY#%x1WbQHAni=|6fustl9;lv`CGx1Xx&8o<+gaCLx6?%kDOliv-bvlg_MSi0 zq))7rqNkU+?dudn_*y`~oz9aJ6X)XrXU*{7M1%AE=s4OJ(dwP(KiS|M7#ta#7_60~ z2gKd!yf`-E1TyRnp{kbW=kb9gR$bj)AQd2QXD>|yU5LA5Cp=ij9Xf2o7a_TixCwGa zl3Waokrg7X3S(~H63`uPClUirm(NMVUL3Pv?u!Yrs$yWwb@^O=Kf`okT!R2(8k-I1X+Psxikh*R2Hg0Fv?37LGs4+St(IxRY+BK3!}Mxl zs0+YkZe|c4nn-rj@y7Sz(uhM6f6PIQpq-A_W;cNue=z{jd>gQgJWs_+D2w&$4_llJ7jjR-t*ESMbV%mmY*YTwV*G*Ak;-*^{zJ zW6Sq%ynSP{pKI*l8++J-OKZ*H{k6>Itn@!F zEPd_7ihAWaPV0DV$X>m~0*PuU6e-cU@GR??1G8BnxWAx3oMW#{vo~k?E4R6GcldL6 zgcqFgE^oZc5|boTG5HBFm>gY^Y>g@NbUBf{`c?nSK|Zg3eQ2Yf&FcfR*)F&%CnHAN zBV*OJv5GTR^Tuk{SRFag^yUI>&f{U?;gbVjyb_D&sF*Apoz zeQk~}XfE)c}p*A>5Zu=oBid^mrukfL-rY2_}-g=H^2O- zx_i62o2x#~S05Li$K-1JRavB__8tBA&2O8znxlNp(KXYKqvV^$SB-4d*pK`_3~V)W zN6zy{&O;Ck=NRW5<18^xcM41S!e$r|043E<0@%uazu^7Sw4{%lrq*Py>3K&zOH53I z0G>X5`dd9^FMMU>r4hF5^208$z;L!JyzL5WyYjg1^lIrfqL+7GOa{i>AVcfVG)h5mjW>RTnr1WtA@JS`mSFZp~>W4t$m-&c^mJw=NLX*Sl zzl**NQsLB{NdhQED`;gZJr6Xw3Lydn3Ec;D?;iAgFm3k|gi~7F5d@Wll1-&=M0zGk zX9FdxWgI}siWDWJRM8q~?u!nfE<~!E)-KBy&8bh4G$O6LOK8=QI;dXNFmDFsV85}C zcGQ!0G`ya+g92Jq;mh*pi4B%USB4a@KTsrTblNE7Pn%MoF3I4CD(f|62zE(HDxT4& zhTxFPjP16MvReGO7R7&a=7cY}51JECi1(30wscHnpwRakPdPvdrtA-4JlS4X1U2v2 zrEHLpb|0k>`XpBhZ}-@y^Flhwu0a#3%S$2X^`EX?uMrCsLyEUvjTWS4d}z49*t32PJumd&NX*bZKFB zRt7P*stmcML=I<$a3DQrGHz1NK9V4UGJkV2L|4-_1Yg&_uAuAQsh8pw7@ZBIrRDE5 zz6I;+_rx`JUwzdiq)ji|zXI;~f};~h<`tNsyJl%ivL)c8U@~Np^j7lVEE$cWoBfBA z!JtK2yG-W=zY9JgxWw|nlBwJ3SbdIgd_2g&Eo_EfeI^DlJ3i6VSH+M3N03 zvhAbi`}Uxo(}Z|3wLaY+v;mjnyFnY>pKSb)ogN6Yv6;)^txw?4ebn>h{vmsN(=igXW!^sW zI3;PzynVK1-h({Smf$Aa(vOFB@gMy=;W>h5VT>eU2GxvJ0#_KLr_;yiS7rOC>zNF= zH;r0_UfXqs0lWmVQ%RPp7Gw@!!5?<}@$g|f; z<0+>Axzjm$2ROb&IwO&pXV&TQJLyHY&s*(xx}O6PI!VgJIgbQ>;-M2#6VNF5;dMVN zNht>QU>hW=NEioVWGt;1$iynRzASD0)~`k z4^%k4i+&J#S@zNml7>miCaPItJkP)x+C;6AISXK!O9)$s!Vg4j1jM;KKF=-ByvKh( zYM|Y|88`z+j!K}k9}nNK;3=ghObX<^08imyuOEdeNEBdU#K%)gWT;rGh#BxLl*nlM zRNV}Zea!*2P~Y#$$a#O@KpX^!2`z&RjL$a)Cq2NUOXQvG zCRbW+O#$85VDG@$!KMW|Ag^!+8ua>bTDY-0yXZ6R+k zs)=(6i<8y^fyN=i1|zLY z&W9lasW3=+fxxe%qhXjH-lMNbm0+M}?xjFydhIc4N^2)N7N#C>hdkb891bq;{bybG z1J%1qh(ixcmO7H5Y#pSsX>)ABbNh}TMir5nbGnjDROsCW50UHg1q#OnQM%O8=U-g% z%mBA9ssK(dFeHU=RD}rI7br^>3NUJ>bb)-7Ig3{XXZj+HO94p0Ii5Nv6T<^i z6eZycuT!)5R1UQ@ji)jx10Wt9XXeI*&mf91K5!cSAoUid7^HCmW-%hN<~p6s9mD`? zvV8aeZMf!P7|_6!#P!72;Pf;1u}NXDgOm~DaX$lt!XKzfmQQ?4Qb?O03Q^O)xI9mj zX|?2HfY~hNnIhX+C`d}lZY|RHA!!L=83Ko!A>xVBgz#RUL@a^)WPxr=JEXQEg%^tA z8q5gRI0vEu>oC#eWN|>kkMKR2Ui1y1g{)1&iewN~GMCZ1f)=t>nPXr@Re&{byQ2z# z7}X=#1RC{wK<-!Nn|HgHUBt}wkAlt7B=L_RswhcNr`AqYs~_=_r$ zC(qR2^B%M+@ug~h@mXk%Vf5>ht7@%7V5yR&=^Vx$#TYuQ!#Re*6QBqXBw&4vCITKY=-%CGF%u zaheRG^mP#6`Th@dGcbw|%*l1RB3t&-MkVXG1ZJy^@1GF7oNxFv zQKcd4Cm>s`%Z(BHy=;$hOA}5y>k>_IBG&=|Qfvl`m~w zJqa+1%Oa)KeCeSL&vxmFNLf{+ypAtF%$E0U=7WG^De!#3S~bLsl&y%jRwINVS>)hR z{@~#1nXrkoHGQ1Ob@C_lyK@f2rX6x_JE;aLlDrE`4g z+?rB^au2$$@*bKSxA+^2f}e9Ryn|sK4DgeyYu5}rWe37v;>ue2GGJ(*lI6ENA@*8t zOny>U-14}g<@;yfKD$}P_PQUo@n^1qi)MO8@Nf;Ye8X&52ha|6{dw^ZirEvR?1f40 z;AQ^c<#1L^-dT3+@uBuVZ~j5^R^`Ji?$9KEXfk|er?T!lXTEiYZCTtp4i*~Eg5cpQ zm-xyhwsHwTcK660b&YLzjj`jCT-Rm3>vH(ix`(Umh}5^Sm6^@co$6-3x|6Ry6Qd55 z9eaWn@jdSB*{WfWo?$E7Bds0bk;m;PW0bM#D6!V{k@~}%CENAGZ2fRdu5UaVIo|iv z#UC#|q}hoZ-0^At`1FQhr=|VR4L>lj-S^oGSJ4LJdsgspEdjnIz_tV)pS!%FV>?fC zEoUObXC4jT*dD$C?Zpk#{4l+t+pOhU1|lc=H;j=ZJ#5S8o2NUiU3}{h-#Q5spb?6j zu*A}YNNGZBcZhxVOK5k8xRwWe%LBIML8PT)Lkm*0jYpBaZlxMd%3?)SP3?NuX7+ba zY*%-()tx&vP2V@X4MMuq;bA$;_*nlk$1Dp$(Y`Mmkc$C`C-|R$jQvUZZ-@;~z`s+| z@Q!}t;CD^$nAT0-HGK@Qv);~vcOB>CTN8pS+Rw`R5D>gdwGrqj0XmA$!H*>rq3nJg zQ|Hzm!=XC*_GqM~ZKtIxa&#cly}V^(yO#wMIW`c}C=VWbLV@)gv|@Uy<9MWF0(!V( z0!%EMYV3YO=^C4UgH{YJkjids{r;J^&#)aX_Ubg+9WJin7T<7-ZMX#^^ysmlntp8B zawy&FFjWD|)_@n`XPk-A>BNEa1@)KJ2TR8)q)v~l5$ zFK<`0vlZ>)^bfLkJnYa+M*zGQ?Jfb}DP$JiUJJjk9l_4a48Z zfIH58>m1vD9V96q!AFQ%ia4!v=hU6cJ##HcyO?)9p;Hd2flGew7wzFQ% zVymY=0tu*#*SHJU`3u(}2!?aq!+~RSavw6JiVg9b17QLvv+I$(CIK^9#kp(;t{WGz%flHpSbfSliT2UddWuy_3sp z=kwa1Q2F+2vdvDu>m1iLB}(vI+L+=GpX3iuuvM49aE?jdG08e6#R%7bQn?x6PmJ*= zrnt_leCJiR>N*(Caf5f!GN^4$DuvPfwytdD%@5s-l@o|Z)_8cEk5e^k-8UC{@4 zef%OQp4?m<m7ofTBD)e0KZrS?=&Ce|U6V zk8|DlwvjzN{WJgj0e0*fH+-ERz7An9T+L0s<|bQn6B@7K@TQ)v?*kL*=-WEVcbtWJ z)_i31=uc1l_ypHA!gr0Z&1eu&X8jcK@Ag0tU$OML%T@PgqgZ}62j*vcDEpYD{_Kqk=3srF7$ z`8P*i9bs#yeo^tz^Fh;xP2dH?6ZR9!41CP25W=7AyF4j0Q7WIISwGTOu0L`PPXjCx*sgTaJfBvZVzkef%;_U2~u;X zHf?P2DKI~?Ze4gU_x)TU#92=BmeZ`|bi`sO;7_TL#00PJ12_)*YHw+8IrUz-{gzqr z-jTkxTYAO&jlJCy2jzcVcjQ8c^5^Bf=95R24-d-W(Zg05M0O!mLgVNsIGcxVp z99BT!Z#rZU_iv73q`&FHNPjD9JJVtK+rx5r`?p7A=Xl6wDWmc}?ez}juMEy>?aE)(%OJw98kEp@ziL5PziQWC zZ`15hb=R9UJ9R3Exznt|Cv6J!A3+d19ql&`YW}fF4*q{^k)i)!9r*vLPzL$@Q<45= zf##nM)ZMge9$QrK}VuvK`6+E zOq@?}CkSdmR2badKh#qP(I*%d!VknOK-dgyB~iptOWA6mx2OudI}F=ymw4R9EG~TU z({Qp;{rn?wVY}Js>?5a@yN}G1htQd%-s7ooKP*OM#-N_eI9hmI9MCrld4M`xt(=?} z5WUJ;g|NZ?1B{4m>%+E$?I+o7GWRfPeugck0IUm|(}$7}u)VZV7TecDFND@EvX#^8 zG~aXrTrgX;g3C@`7d)K#CU3sUYHu>AsmV-(6*bI4>&Ca(5+P1JP9?qjzreG1@1oeC zvvhKSp4lJz)7oL2Qf-PWeWrWB&^MGdYfv2V$T)Or-F|5zVXuz^D?{+erpK>?h%X zML1G1#ZPO5tO7-8!(H-r7)Shf004~mA{_Bwe&Ne&_t)z=b31QtUr|P~a#lND^sZgx zvno~;K+Nb&UpKvATAgJ}&aW#$WE)&Co2LX9?1;5CoXc8k!9=XNF_q%7474oKUNJqP z6sTwk7Cg}zeygQ)=C2!GFtE1sAQo>s4`!oQaJRYym(yP0wHMfgA#*IMBu8_2EIN~_ zH-`;DCRY>~4shLv3=kwd5`HjHaCuWEPl_tLSX$s)_k&!jsCW)ipE_D6o$?^pteew_ zCD9YP=9DK%EkxQ7muuDs^$f@sgPbw81I5T`jaLS8j2aIG<&u4s0v@|QqLsE?GhPSR>>B5VHUx%3E5}+ED@dX1}&eR>Yqo#Q-TpjZQzF4aR@c zdNVD4)Hsg+k}CPBsihq7uqfr6NQEw)BaIc*($;Qx?j8Po_+d*bq=X+zke%>LVM{|_ ztUQ3cGmgEW2CjzCzGIV)J6Ou?t}>XmlXystoNYmc9Z+e4AM|g9R1tosa|pRkalQp% z1cRQPOR`oYG7^xFNC5zIf*h&?mqJ|9FEJ(#B=+x73^F)ex6=lFvH{>P_~7m7$0&f2|`t_}U59TE8-| zGLU-gZou>Qg!Q|gxsz>Lq`YQ*XuJFnTYhNGzGmN4uc{+?`K!uRC9HATxiJluZO0J) zk*!!ZWQ!ZUB3uDs$%^EauzA-uayJkEpmd`&d^vo1>keCf4b6p%{DmpOAIU9(70`|g zpdGFmkgk44%9y)KzdZl#LC#Re8|v`#ff?DyC6#EABt-x}sh zM);DEm4Vg#-ydBWh0SDXRXpCK=DzLbzOA^7A|7Pp6E8fdF+GkPxr9}n)%zG0Y zGJ8|6d+$wNhsy&}t_P4OZ?=KwB8>@taiOGDuPM`)UO<7Gz%PWk2xxB!)pFySg#Vf} zWI_<9(|JB62k}xD5iQ}A5d#@q;Jy@<-{YTm1HA_vS5U+S z`jYu0Xng_Es8cA$b$p6^|_xXjGB6D&RxuoaU$Pst7mxsQ?^65N4A2=!^qUewzx z&rCw5#`z<}1xOty2ua*J!Li-Mx}R_E0T;~HQNaZ}VlRIAMg&+=wi;sB2{y2-t|7UI z_DTru<*?wr92VhT4nq5VT9FW}fID~Q+%tackL4wSj!wvGfTF9=0s_l4UVjWQC=SZJ z1J+Cmg+?JAMkvP|pd?VYZeMy(|G%{B)P5*l;I5l2TcjWa##v z8gnQp7-CgC>%s z-7-?QG}*R-KAt9HduT#%^FdM*!HaqbpP%!81^qQdsPP}romtYhXP$#gr$kElEB8`E zDcXXTIL+;n)FWljkjMdOU7`QeHS=~!N$vwu8r2M~C`xgQ`B)|QhG?bK@9vgNll#nE z0~;B^_AdrmMuFU?Ii!7{P3~kQbp*7u`$y-JyzFM@7OmPTDAD~%O z0Q^eG_QelT~)m# z@67F1F43gar}sNuLCRJcKY|1p-xJ}$I4VoFXfQ1;T@@ee-I7vdZh-@z)qDF$Uo9;+ zw48YyF#eu6NFGercQynKbWL*CPuC{>q5bG;)&-4#3+s3Hq4zpqnDS&h0UkS_rVX2U z_B4FXyxF}kPi8`8-ucswyW1&bgcAfZ-{1rxZQk9cLZ;8^<4_h|6*9xWR3l9sAt?o- z=%!#6v{3Wz7V=jmY zLsH0q?{>9r&_Wjkpr=BX7f-{;%7zdCpj!h#btJ~RPnUQe%!-fxMoI3OTc$0M3ff0S zTIukv^6WZ>^)^T#XRg=p1o;bN%XTPj%6x&=OJi1qtn`tf6>c)Cmy|g(F6c|oLx|=@s&T%RJrB7tI zuF@PIHD)O1>@G~uzb{PCBdrIFk}tfbCfh49+o3#vnm*qpy_0#Y_7ctPzMut4(Z5>@ zIB4<}NuXrz;{jFI*Tpot!OO&+rCIckUD#8-Ml0lDvvJ_%b_+dHSy z)^__oN^v@9PaE&Mu*E*0oY_?ul@xB!$S8uw+cK~0HbsxTrU=2!2z$^md)4tQ)G>Eg z9i{q7nOpi?Fz;WXj-x>(jFa;j=C1@geSsc>K6LC^zwtf#a021Kt`0hO)iHk`JzylI zySS^SyXeP#z`8`{#U#Prm&xq7lBAsIsn-qZy^R#gE_~3kFMM$Ii&6%=DeBs5t|sXC z{`p0(c=YQNw%qsxH`Z~ncsS>Eftn7eLG9}#CS{x6=^Q6}_EcyRMjRfqr0nE7ofG6( z<1^z+4no8n;yk#$peK&5BYTpP`pB8w6}16wV}l!VLC@fpYvwL0 z8>Ff42zp!O;tZT1#e@s};O=6)q&{Wz2pSE9seo^U8;IcsGpCCvNMWv^ z*|L2|>}OhWB!SIM2sU1%a;gJ#emVcc$_nG-NLLERlNxY>`Sf}hv2L!4B_2Bnv&uQS|-qv--nxEz9E0H1P)8( zaA_Rsmnd(|FW#z^F-rgi9?F|q;5uYDGxH()DJ~-MslIz2)Jjm@=Xjty{oD@`K&nbc zf~(ThJ-;{uih#$#lSus-D&=bi$j?)(?W$nb=q=^XwSTF7n9KE?=X=hx6I0tgQ*6)F z>I7JViL-dI`2*@9RQGcRZ1JXp0RT;QUqC&;Lx(WAA39KZZZRH3P$SDUgq?xGw4oR8 zaI6KW(*Z^!{e#MldksH$zQDW$l_K1tfJL}V8(B2a*)W&_dGV9TbljA#{vsxUA>6cw z(wb15gl3YM0wy}1(UQ3c9SX$;CLoFuVCfke3Kvk}RgKL7>-b&I(h^35AbAf6<2pvL zz$MS9bBOOSoim_tHaY{s$j4yYH_jJp;%Y(zx_4zfwXKyMKlRzJd+1bkjnDG1JUu|)CW*fT0%1+-35IA;yq zASpBtDTbS_4K#>DfQ%BOj=+$muq!dlrdpNcxT7RzPq9P?#J#Z0>1%$NU&e zi$~iabcD@{4}IkPJ&-qnoh}@i)55io<0pGjF-im<&p0gug}|9Z(4eylm%77Up#dWS zFC_4xc3QH?pdd&f2+%ErA4vv49DO?gjm`q&5V(gpDNx8pfHMJ{N)$ths-WLyK-Y;} zcp9*Xf-7;6m4G$QM~1Oa)HqRTF<_JS0>M^Jn0N3A5$Fn`1F$O+oe-axR$M|{P8?l? zSB&)tRKyWA;1Dc%3ffO#kxplAep2v7;Gq)sDIzREK&A|R4QTbEg``%x5?CKT!`(Jf z(_-+rfO!V2sLt($OIhJsbaz}wi+0~yz8zJ=1@>+)&AdV40GCzZt}_1#!nFkgSM~b{ zWCJace;{b(S%l?=;6y=zi>$|j7MJi2S{^56G$&5H2uMeW{8P+RMTT@#kI~@n&1Lv1 zipeLaABsVoYi>FoRC&C#`#EM4%Y%~+mm{}ZP{}e8Z{pff&>$m&NO)(C2gJ86p`P;dpCotk?z<=vk0vWs7aCw=mha!^dTDm#Um3FxFohBw(G_yTNrj<%XAKlG^V z@OIf@wteWKnkyUS%SKnTB35Vk+_v=)YdwVbTpkfsoqU^(Y^4v)EGRsohjaLOho5!$ zQAf@Es(Ib80Rrs(yrZ8bCRLhWVLyV5WWkC{^Fu*uF32~E1y{6t<7}my!(Qa|J8QwO- z+GchND%q;5oAqFU`DpwSJ9(KKzrv4S5uR`b*Z6{KtnJ#z2_1uLTUXh#YhXT_m}DeZuCuo5AWeWZiC9yc51CaAV_3pdtX~VMuT}<1}xaW{uMk zd;TleU%DPX9I;$ntK{KR1D1zd>td2o!cK zxnB>w5Ll}ZFLUNb-rNY%>85)!9Ee#jWwE7;&^V=wg5fMnyk&{CpdJ}$K>(c;p8d|; zx8^{!Acs59%^&E7N1W{{Z@aoOD5^V7f7=?q_(s8-1*~Hl%vPb`vSZf;4`;l|8*j44 zn;)BVRuBJv;Kjh&g%`fO@@26I3tLcx1vEF}9?s(BEne2*730iumfO7LHfy;ZvE;6* zV``15EK*hbo#}5)Z(N1DS*yg5fM< zyk(3f=IM^L2nMnV6i5;lrXd%~_Ws1WdEqZ|-vyEQUND@ckGJ%(mOimiqnzbDZ-H#j zM~a-Qy5t|}XAJl=nu)v7;Y&0ABy-2KQ}w{5LkKf+mCc`GO*Dzb~k z;XJtE-yHg3kS!Yo!`X&-+YoC5k(RRX1V8Su!r^E%_v+>Ix#sg0jvZCv<-%x0;4uB4-qikIX6##>!CfY-joBpk#cY|#8q+mG5 zW!`a_bzF`(s@AhPN8Os~9~@2`@7 zQYx+kq*Uw#q*Ux&ISaTw0siDhExk3KFe zhaL;Nc}GK}u9Yq9T06DYwbr$n14@d8P$sZe&cJK~mEVO|7uJU&mf?7rhFQxnmI+-j z5xW!0RN4S#DlLaHm6n4LQ?g7W8>3r;eA{W(F#?7&p5={aS>xH1GL5YtV;y6H;f&+F zahx@dKh8T4Ugq-}Bl*}!4s0Yx*2+m}q#`F7&v_BcRJ=e_tYr!dgzm#*f(v3OP^zL@ zC{HS_`?m!mxmj&(p*>IP&4E%qU-97-y zMH)Hy-`B~|uUDYoB)e{;-p{fP7c1T`?`t1+D1PoxLEz`b14Z!WZ@YWtBPXa2k7KkC zPw3C(D}JHtRiCpd{?4p|M}KEi!K1&+?=wSyt?gG`RLcLpNe+R(Z|Ti}z(;Zne5BN0 z%2s`p)p5zF`lVV8Pkw1s!INKR_fimeWF0&L&W=%kL#^1!t-m2t{G&_-{uw;SNc`{^ z1=;wY)PaSvY7$X!6b%RBQ~1H5GZE!^9j>{pl?UEKJsCUzfE$TmBLmw7{30Ux5(Msj zT`60aeI0V*PKSJ8PIArDuG7cQ79pF=SQad0aw8=|ASyDj@ymw#32HI&q;zQNX%FLt z7=f}hdL4zul2Gy9jq}j5z5@^!2z47}&Es<_So0v5FwGx0B6v4z1(($h3P(697ro%| z!sf*7S`a?4r%zt#BGHOLQ5e!!6JW!a^dOk{?IG?2`Z3UJa-x4i^qzKWo_v;2KQK;H zzN!b1)C7_y?J4;f(d!^OMHB$^Qsxgikk=;42_I;|5l4Q>PZg0*@f-^&B{dR7`V;Zd zg-`KdZ^eHgZ~&t-DLj#h#q7sJLNR{&r+AqEoBD~Nuf$SANzo(F98%E<=5y&jN@*tT zWZBK4@g4_A*>N^@;XWYk5;O7^?;{m_NG4NTKwqbPP$~0w{@#MN!ri;@gAWiTyx@j2 z_leD)x)eNg!THz^WTo9ilq0R@*R!F^V)1!r;|3a5wvRya+Z`oOKhd=cq6Ykc#|x22!j zNtOqMZN+P!obzy((Y&(;kGZ)ea8iW%$cueX|Dw*sFxLHg1TnwBf(y+{QV^u6dlEg_7oY+}XPVC5G z9ipXlg$Hsb*N-m-(8B66gJ^Z4g;z9@BRZxBe6^b7TE+YWz9KD&M`}WvGvM>#^hBzoKKuMyB1(=pprJbl`pvLRa)?H+S|PL zHmki2w+nzagEbF7yb`uYEcBX<&#z-GaO6a992MNH34ZXB;AOAe50~d7GOh1}OG$|F*rFwO5OA z&vCX<-Zsh-BOb(DXV1e)%ylrF?FMf{?f95dnLR9vR93xt`a7fF8s#bv^Oc8J&n5x= zsO^WHn+sg)2;T~yTsa3O6|}HxlK}`gIT39!_25SD4^RJaG;Ny=J8dwa$PWqd3;<5E za#v0z|H!Vb3YroLUmT3zZtk^H@7e7`)r$A(`dWs{74H}JY6mOjKdX>I;AfQwha9TE zw#&i)*LgDZJCx`zl0k~UE?0pK()5U;84Fl&)IM|$*lVaa1jH9Q=ioh2BO=zH;tLrQ zK_?GCWQynzK4hw&o)*5|4SwVF^u1-*eEf}OdYWFGnVuGI90MJI#d*dF$r+@U7?fZa z6abL)5N<#F59qsx)_+6`DJy0fEfia3{xe#?L<GLpKgujS#kW~)Cp1YzWQqdJDERC6~Rrn=Lv>)f@$5bQ; z!`d3t5U-ZXD~Rcc7xuEwn1OhWR0(`Klz3tPo}UUufR(`>leMdI)_gGvoOLU4U`Mam z8sE~s3!fQ+r|H%+GRjdLIe0jx3&~Y5uh4#+l^s)&AZY&BV;bVsQr1GYu#wMcibvH` z`mB|i)z+0eFP6j%$2{uqVs1{4Y1P21+oR_o@ScT0qD2HQy$mB)0s zsxfFdv>&79ED1uHY%vY-YAI+=Arh7*W=O^tQpN2KDz6xB0*e{iREJ{}SOgjd)Reg( zra_+;!RpYbr*aC`Dqb!Yq8n4IX~OpiAjO)OSycuC5L^HVSBh!Sr%g?$!zYx~Iz*LJ zMv5yUdEGG+Eyu1w`?1{-gG(+TsK(l9h!;+j3S&Cr)uY!yyhbX=9#5Ku)i;x;1d0GF zZjV4G49V6>Ck)9p;x0D&5VUTJ?W78!(W`MBfJN$r0r-AZ9uC!9s1uZqM3DA=A_+;i zr$shFWZu-~F^_Q$y@3|4cu@_0jTpFK41*yf?6BbD!%gJgJoqFr3Bmn>_mOogOm!tD zC9|ANdR+a;8XL7HDS&CvR0q4d5%LXWW(Qy6ZjO}hyOdDU zgYe)}ZMT4ljx+xZK|}&R1=xU8WpEXaLZ*Zh7O?b`EbDm_{zWLm^W-l=Wj#;+Ql3~@ zDsCcFE4x4Qm@+?4{$fg%3{E|MHw#8c;YuE_D_HB`bd|iW@_BVkc~>WczRqmJD))rE L`pq5@NF)6}m5D7X literal 0 HcmV?d00001 diff --git a/template_automation/__pycache__/gitlab_provider.cpython-311.pyc b/template_automation/__pycache__/gitlab_provider.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c00c21df63a2b6ebe769039bf606f095731ae684 GIT binary patch literal 16066 zcmcILZEPDycDp2(pUV%Cl&Ej7EXfufONwL1c4RAwKje=)OLipLiIuysGOK`f!}KSzI}JwlN#=nK3)~)|6w~Ip&;pjk%`XWA16sm}iS!>TluIp0QdBb(rGqZ&SQO z@PBNDGXPV1U;9;WhVu$T#2~=ml-FCYgV1k3gUP?A@sOE#~uwu8P#?$naHZP zV@WBiI*-p_oAH!t-IuJ56V9?-a3^=TNWqeE`b{{bT(g+>Ly{CHO2_RVH9(**k; z)y0kQi^3amB#oX#W_na*<1@+FnYbjxX2q1sWHV=lw9010iFi7hBW2YfWSmZBV^GCp zMvSH7(}Ef@e-ozT$&~6oCy2@M3o%JbB^(A`P?QtGNV^a21NaO2DwQ>;oTAJqwosJ< znafX}&Rg=gDag+W+#DNy*`^1 z1)h`2#NHFNi#5=k7({?PQ4AxfMNkJI=h!dAMc`_=KzwE79}?>0G0};N6u4 z?>-ivKEubOR@Dn1VcE)ynPbZwrF-j0lnhTugOHdSFtB1D};t`|J!4=%%`26Jlm|CYI!(Dw`yv#veZ>C=g4e z;*u1LNr)nx_FQ~4-3#@_dx6J_@$rl}-FrSGo|R^R-%7o4F`Y?m-?giEBAM+?kwJE6 z%Lw0%yt*3(BZzU>3!nmx5D;_Agn-3T@iQ@fJ$hy?h>bYE5i<$)=u^-HiPF0O9$uur z3{x)OpE@o%?mFw2o%O|r&SEfvaKpADx4jr?D|snb!xxmp<^DZ_5`r%SR6VCeHWwmM zB@&&tU1Ez)_7bZ&o9{Z?m!0j!mY#)KxupkOvAL(%xU=N2bwW`9Ur;ue=l2L83cyV@ zwn7EIdH6kmE>Wso6tc5oI#EqE2DSYXa$KbV+c7J7E`BRdtxaJ(V0hM`ifTGqsQRkc zLt(83NqDsfDaQO|j?UA(y~2orYV^#e9uoT5by&z&Z_EU(xT6QRa*23i z5;jLJndJa6IBE8bgb13>a#HzFPk0ux zNuvNRQYAaZ1};4>d)kWO`m2NQ4K8%b(X;=!^V7X|_Y5xY8N9z5m$+NrEj4k*uQ6vVt@e3W7D=T1$6?nVW*wdSzpnh+4ucuHdAkUih?R^YB zZCQa&Ypx zecy?rJ?`?>&l!Pz;)K*^#4uT7b$#5C(a{s-2-gMdek*t4_z1$22=#OvR1JFxi(^3}IN1W-@6>;848b;u6P$%7{&f7mR{9R3kDRPlB{%o(H|%9^s9I zK-5gBXIPWc2J|m|-0`$A-Qysd&x(RkscM_dW@l8pCakMAB13vK@rQ%e;Bu@{7RHAlujx#>`&%Q&hakT;vOZ4ri1G8Wgr>IUnyRWTcbvoi z2v83Sms+WzoPR(RVM%x)3yCoKxy=L9GuaCqNPzHJ&!=@n0)>87;u0BNfQ$`8dLPVr z`?zSG>V~{xR>J;kq9}^esy&sNm;f=?mP~^Xu?s&j{Ot6MB(`AKQSPW35OMlrkSC_K zsoJu_8(GzlLp~^A2S~S72cS$E#1avOMb%wd7&WkRWmJZAQ?*K2QFTnlrFb?giqDZg zz^CMl>e1SQDddHygUBD^GmtKB$4qt&J+%_Fpn6GlvGT-8xSJsc!9&CpaVvaSC86k& zPd_9|Y1r98LTRP~p?BYW=gn*51%IdF?<`TYtEX7opww=XgWZc(0N~~i+;7@&{e^k^ z{2K*l{e4f(Bh(p-sRynQ8$t){Qo)c)iC_xu0qh3hZK;VlpE zMVg?d5@o6BtAKmq#;Zf`4aw~<-RipC4Ia2ccvuM!%i-Z-b98a{jefc5IXPT*B?gK= zeE1M#yl~z8(TDf^5yihz@oz6ttSk5h0usU*I`+;nx#5LhNlV*)aqg3I5Cmro@SeZ! z^6S^O=+p1=o}d@oZfudayjZe0YfjMjBlTBuSN`a3Wb1NdYa!C3M0!9is7=zvuILT- z&;3917rLHPx}Lk{y;tA*WA=Sk?i~JA_LJPL?!vAU%B~X-1y`saQR+uz;{HR=H*x3n z-{q9!g4{U)u24Uz)Waf8J~U>ChV}s?42Y9-?7d@h+tAWWpN@SRg8;Zf__z{2F6*vj z#R4!j_t_eN4@(U&7vM|Z0lfQ+?L8QzKHD2Q5TJe&=sW1O-geXQ_si z1;j^cJ+T->VjJ01p@oX_v`Qc}1i*^03Zmh^%`vb|8$#=pA)@m3v!uu3CwYtb%e+xX~7DPc{f85HfYS+B3 zC_?i!DDI3<)#5H6B+5?c!xg5Yw`eVQm2eb}PXm&nt_ypdhN(nK(oNY!-N0;&7R00;sJ=NE)01X@Bw*jag4Vz*p z)T$1B7LhUGdg7y5OnehU7A8kuvRumwi<>z)4dtavun-`dY-)A&-wQQd*?TwCxg6?T z3@gz-dD8)KOQV{9XQLeIEQAJ?(BS;RVkmO8|GoZ&TDfcg(m?>=3ZVl^=zz=~F!txI zC928QUjapb;8G3*pT*F!f2-`@db18dF|q--y&d;9Y%6Tou58$T`AEr0wX_wRxnf7x zM}x(UTa=B@7JHs4dF&ftp9gT+Q}R*aW+k*4OCF+&+n!Ul4P5^Ag0~Rvx!>6Ovn z7wz)X`|mVe>n=2&R2olScA)UkeC34_Wvz)+nTU=py?kr*)9=dBW8jMQ+(Oq!y(Oz< zl&-rx_yuLDjr<+~$S$>!FWpp4?G^X6Z9no}@m}^8BTZN5-kV!EDQ`V~%W=nc+Y143 zg~$mdazYNQboYYIa?75@9RM^}2<}yadu4Vn*|~SYRN>Bzdiq=Yto!WLukGQ3JFUNN z-2eQ+ZPwfMG(>K}SMyIx%T7P#JsgGSmnM0s!^r|83{Beyv(9 zUwiMiqB)D~;5PH#K#Z85B4dwp+Ss{qF*E)3nY%HECJwxc!)tByar?m%FVX(>rvs{lMK~fu5CLkC`fUHCHXk}x_T_rS= zRdEV8vH~qV*RTec(2{-vg`^AsVwexqUCQ6}w=MhI7DkoMo$|(m;FcOR|IRbAzpdaO zQv5^nwtIEWS5kNDx|Zv@7VB=Fk?Xn&b2Dn6hbpbD zB8#fE0os5gPQ72IoGVfNGz?!u_3&D#4o3or#Hv*k#>FuR<^0;vdA;_Wje~*b>>AAv zfeybOX^4BUEtH>E6UkXjLP}Xl9NzMFTc(fZ74%_B{W+Gu4gg4`k$Q3xffK5Cz3+H0 zkISvcZgv3BT(PdX7-}ss)=?S`s1VM3zo4vmMg;)AxIAAvDVP6U&pRGDd;(PY@Ck4? zqndwfkLDMgFDcHKWZjkNX&kIVssn^9DLx@2kR#|1>L!Gi#y~3ihG>cTdV59X0C|mF z!wv@?3_0+y1I5_p9AL1f);2Z6{^0=gy%n^WRrJZb=A7vrV9Yi2G0;kJ-y}~(^8;P5 zWzLm%RW!hgNt}0|#hIf-niyxDptG=Zt~Xt2r{-6*k!(b(7hy#M%v&m&--4mBRdRyn zXPNTrakGA9jx88^n4TMb-fPY^=U#87^3FWWQ{J7o&4lt!-c{CdExa#H^M07^0QAJr z?qS}od=2Qec&JVu2nY2rAG~hA5i;{b;9}u;8_1h4i%GaT0UDWhstp_rF%wBTUZrq6 zk#6>c+c(zV$Kh=sFlv(PK16{5gLmAtDdP^QQN6L%gD!WT^f#L^_HN8pCZj&?0J*xM z$4z;R$eKFZNphyoYlAm+V}R$F+ptI?7X{`pP#?%0pD0LK;k^poFTP>I*7t zIYMrx5Jnvht;vXy$F9(v#Mkf}8f7(|=>mjQ*O{F=NK=BC!??X1$&(0A%)ymQ8&OY{ z)Pq8bCi=`^2wPNLI7vkMA%Cc7eb4|%~Ol9`N0C)udw|x+h440oIf;ws9NQ+x|(sH(7;NSs~D&));3`g z{YR7!2jqvqmn;=(P=at2{?XpM?K_v-cizl?GFNCHP}&Cy!9z;$(B0tRa&Yj@L?L)e z37(pF+%L;>rxr$V>x#4%TRY{}#FG71=Rf$D{EJ5xkKB1hjwaB(@*U-sxE2OQtJt(b zX?g~e&?+)*eT0dh$`#Y?k zbubV%={qQEAt!!t8^AZThglzM&m!6i%7c}Q8xS_+J_s9o3xrXAC68tUyy05KE!{a; zBUa#x(|9=%xFfkM^#92kbz@T>NA5rBO&Is43?7DbVno(tXj*4^b6?H&r>V;#ciVmZOMPWK~PKcQ@X*|ibCQrgs z8}W}35c|vzF!Uz?qP8loavZ-B2@$tFoB)kWgv%nJsmHnYwdF)prvHL^q&WZ}F2Z@x z6Khnu-gHXt99jwk&|I;$<;rVt7%hw6<(lS~o5&LInH1$Hi;s(ct8U~`wp?b>rhY``ho`gZ{(5N9)-+{q= zLk!9OwS?-G2P$~?XsrVk$79(Xh}=Z&1*MkDXKpl_F%C>E)ni**5o)SFM9RC({f3sC zi4mrS+{jyTs|fCQ)s2uh+8T!&W7L6ECT-{vlEx_TOT}`Bhd^;SPJr{EC)%#7m`fiw zLO4xzZ6<>X?nG7mr;i&Yo8e>gn|4N{1x?vmZO=6Au15Xq<^C#NXyvZI8PG}F^#j#P zO;!|y(a(8Eg#7@u8Z>byWh-M5sD*(eLF*J$q9b*!ZO9A)uGujQi0hsQDr#2li?1TE}9?;<+1J zmo_bLc@b_RTn+=!9NC+Q{E=gNCg3u*C_-5>Gem{SF|*n6?%m|b2(F>qvH(4>Y!Ha=@r>*(72^WK$W^d+`IztEr2)_$o^@ z$Fe$1rjne)Nyvkc_NpFDAZ$pihX%Q_5OkvaKct>@b9xauz7Kc@qTxm=(qdT94=P*x z<>(-|TOQ4qM^6!(`pZh)%k!RUYx`H_g;TIVShK(v+5QC&sF0c~Hg(<1-U-Q}L2xC8 z3berKSKv4e$6o;FZE!QbWth3GAC^pt?})yrbCATB5?P1_I`f7X9_t z>J&eB*ZwZ_K#R`F8B`_=phGE#3o!XD++YD3#0hm-48gjb6Ll?_R{w8_jshej3 zXs+0}xwtV}Y}#1d(5`IQS>!qtZa2vGuhBk` zb_nUefjl2x1PaxDMcaUE8-T;;_T9zyJz&u&`Rf@V1pxQ!8cGaCfZ#$U2Zo$fq_O0} zkeh1ZN*)ZcR4e?Y1w%fnwxLp-)UgumpzOYqg<)E+wZnxwC}V&xtdR8fDt%x1 zq!I<*B48JIOFM2^Z;jk?e)>F#t^|`bRo_zF_H4$GW6712kxhc1AK5=>b{N$z9l1DE{N^4-8W|=ee!U4FN2mVdy3HnRnrNpi6OOu~M z?=ijx?4X)AmRvz*7!ncQ57m_z5{2G`OAZorQZT_=NkQ1wQVaQDd8?UurY0M&0M=-Hxi&0Eh?dYW8pgD{y`hQ-ZTy8ybYZd^0U>9kWYq=cpJU{j zvG_NbvIhYn*T2M22ZDA0kDPloe)GKe2}F^$CERP_?j%i@ELPeE=28G|iuPUf;;%?~ zE|Oo7@?9jqsxKj#D!U@JU4AlmpYmTMzmkoiEkL&aD*y}DG#^o1jn{S;Tpfz5MW*Y201^u~RR910 literal 0 HcmV?d00001 diff --git a/template_automation/__pycache__/repository_provider.cpython-311.pyc b/template_automation/__pycache__/repository_provider.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5722c8c4632e7bf60c897ac7b48e7481b8f4db6 GIT binary patch literal 6684 zcmbstOKcm*bx1CM%a1;!BvXp4)z1=BTZ-!>c48-uKaoF?mFQEXqKgtM?nv5(X{PSWG1FO#S2TwiPGj-&mY4f;ZzW_%c2Z z)_r-Q;LrFAflQzf%mfReOo+D&`EWtZh=oWd;vrsgn<)OrL z7wW*pI4%r0u>%+9xCr2))O(`O9woL)((%ggR%l7nRYPMtQi*8~RfRH1EgF<<$~h_( zG==8%IZ<5OR&}hE^Hfskrdp)BWNcF@rxglXv4z9fmJLbH=OxzJu`bQtR*gI5jq?)x zFUlKfsZiDpNiCG}v_Ol-ocJG{bJ}b9uHU$6#pMm%U~r}t4}%fFz~ypnS@$o~Yla{NaqJRy{1Jg{EgpTi1ed|pBY z#jE(f5x(?-?%{KujGqQ*Q1M@bDyIo3!48@b4bwoX&O(4b)IuNbpcjEY(#p}sDp?_2CZCdb|*bl zb#+6{tHzEr>*CKl%Lp1hFO|w0c{P`Q=Cy*b+;TZ@SP_NlIi_-@TVj)csWRBoZwnFY zg_*P=4Xvc+^m#+x(yf3~Nk^?vHd~YnG@G@=Y}T-)s1pNpC0G^R!faKNZ^NMW9^I+Rd2Hh3rA8G6}y##iy z&&#Z+9UGr+u9Mr0V+R>o4ckjSw3L3msL930Q zdphuyVP0Q1$1~OO%+Yve@1uh=hm*e_HH8ekRUuQGn0Y$=tzh2Bm=h1G6Az9i9_%gb z-#J(Vs`aDjm?=Dfx5^yD!;#C42CXX%>RDun+2r1!j#1pP2a#xEu zG!P7<5V&F|Xd+4+9HYmIEU0;UQ-e5FG%CYbfk?z_j`dSR#uxAh}L|tr~zCD1#=NL`3sc#HOiWlbTd)41E&p8lmQ{7ryN((Fj3@nLY z0Dv_lvkes@CELjwJ0+@TApmQSsKPL^IEy`kbOEOeHVKtM_;dtKi>ytYGecv}>u^m2 zekmP2F&;xU#}mh}G=$&`0trC~!8ihBKOqAf#Wn=!sX7MZrT}}SCQO;aRIPX5z<-c4 zXFf7}7puLCN4<-CxArGZVG-V{uvkl+Ie6CRVD6m7~PU-tGNWQ&@qwDy-C! zse=!n&X|`!F_X*Hp6tqRMw;PKDcq!YZrwwGNHjyh26wiOhh0iXYG zQ|az8xbZYS#;b%nyb87*pb0m;N>n1>L{E5(<5e-fsW`rqyieH{U7??ssczIalS;Z? z;@e6AN#;vszK?T4slL5{PmQgq1~`)IAf2@AEU3it8%R}o{Z@0^nBUNpo$uk}8-}#1 zm06BT8%!?dwz*%+Y7spvw=Ea9sBXn{PLXveETI7S%E+1wG8=U^@OUm5?rECigfW9t zxNJ-5$mwQ13lnHDYrd(ZvF#@a+P#X+U>gE#*AD@-Tl6JUxK!&KJQz68&DpzV-@R(z zy`#Q+dmrut<{rFN;a+X%wS)5EsyUZ2haOai9vlrlfFPe}#O+$@?BT$nZoaW%rdF$| z)uYtv-u(mMUxl|StaAL*hvwB^n5p$@YW*m+?%>x=VZACKgx9fR6t&HYF+|~W{C9{* z?Qtkf@TW{Een^l4@CD%u!54;4r2fsQ5_#;;fE{wWX(bNrJ=CxCQi%D@#760(A{A(4 z)7xy@3@QDO{a?bsU-RJ+WTIfv3FspUUpJnBGVoY9!4t#Lm2(ZgA>>tJ(Epi^{^PEs zGn_g_d-zDhAg$1ptn0|4GSbmouhSchKC6tvsAu6DgHM8Q7(SFDs*HE=_3_brX@A#| z0?Nc=A(L=5aXQsxhbEFdtxvZmj-R+$edQXa$3Hq#KarTfO02nAk956}^}HHvW|=2676ji z96JxGCS~g8PFC0RIZW*84<_k8_>MCGo{(M7F3Hw$WY-I|`u<9GeNby8^}7Ppkdr^V zeyG*sIN1$At$wV710C$bo~3kvy#ciBB7&bHxP;(M1aARI2Q5LT`OWXA(ouNHc^Kc0 zA@PO4W@NLKk!A*6U$413auV3|w*l-CyPu4v_l2WSsx~mZFMJ`^UYm8Q6K~@GSZ(rc z{FiFOQ-JQ-hadO(g(XYCOxg|+0z(}Y#u3Ai=;auwhHFzr{ zw;{Yj2IO=h$t@|=+!7CpkeOTt4)zX$cM)6#pkvoKZ)etVh|<|+zFZmaGWT{8)b?>0 z0BE3>gk$>xpOh8GUL4B_ID0{^v8+JdeHLRl3%R~cY%;CTbtXP%CLcgQ>^cA)dv44@ zL;;;4E2F2#@1Wm9URU-&Q+D<`v|AAz-tFf{EWp%FJmOK1w@4q=X8@_JeELd!0K;(k z*)0^LeI;VfO4cVLb(u&T#SQH2X9)28UXXK|$!QZk3w{cWl2X+U5j$^@M)J;l!gdJzGv#!h0L@y$@yBz(^<#;uD7aUQQ z^LPpJYnn48@Q-py;^t95?^zKy??AJ&B5pQ;71qcNa_BMjr@I#BZsxUOlAOR_!Eg%*jlpS@+_hc|GJ@Z@~Qy z-2&@@3n!%d;y;kCx%w#_*dEbX(_m_&%0yB3MAc4~6%z^f3a=P}wR1oZHEK`vSHhkl@q* z0{i-2<5v?FOku%}lgTT!$*Z-IF}o-3pSB5r=YuKRkCg!FP1r#!g~-sb9mbMKMkPCf zr6@T&ZpW|`Cj+S_JMY5*5(y-M_snSR;#+n&=#SY1!1H*Y?Z--h^bOcSEQLs7#2mSB z)IaBtiA0R==MFCIZ+~&tj?@`B_X{6%`uJD27hC&r0I$~{dwSC*@b_(k|9zMIF88-f z-);Z(EUz{}gBfUHP+%6xYZEP^B?cR!CF`PrI=Jj8)DH?HlA+Ps__Q4f!3YGvbL9`Uf1QI8Lbt+mM}u#z@#O81TQxCjkG@X=26>Vky)r zXc${aJLu)>5^!e^{|#o8sAX+|8HTNc#@O`+j^g2f+woGmmtUz^K6xX@(6_ncz!m9E z3C_KYyA1|-$CdDY#R`>n6uD@qIfmZsT*h!8)LCOh96IdA^ZTO-qEx7QW~&o4bT=?)K5FF599D?S?Z xz&%YTI7q+1)|X9Gzu%LAzPrQ2cV2MJ%T|1R#p8iR>JASRFF5AqDR{2Z{{p6Omwo^M literal 0 HcmV?d00001 diff --git a/template_automation/app.py b/template_automation/app.py new file mode 100644 index 00000000..75ad569c --- /dev/null +++ b/template_automation/app.py @@ -0,0 +1,705 @@ +"""AWS Lambda function for repository automation from CloudFormation Custom Resources. + +This module provides a Lambda function handler that automates the creation of new repositories +from a template repository when invoked as a CloudFormation Custom Resource. It validates input +from CloudFormation parameters, writes configuration files, and creates pull requests to set up +the new repository. +""" + +import os +import json +import logging +import time +from typing import Dict, Any, Optional +from urllib.request import urlopen, Request, HTTPError + +import boto3 +import requests +from pydantic import BaseModel, Field + +from .repository_provider import MergeRequestSettings, FileContent +from .github_provider import GitHubProvider +from .gitlab_provider import GitLabProvider + +# Set up enhanced logging with more detailed format +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s' +) +logger = logging.getLogger(__name__) + +# Also enable debug logging for our modules +logging.getLogger('template_automation').setLevel(logging.DEBUG) + +# Enable debug logging for requests library to see HTTP details +logging.getLogger('urllib3.connectionpool').setLevel(logging.DEBUG) +logging.getLogger('requests.packages.urllib3').setLevel(logging.DEBUG) + +VERIFY_SSL = os.environ.get("VERIFY_SSL", "true").lower() != "false" + +class CloudFormationResourceInput(BaseModel): + """Input validation model for CloudFormation Custom Resource parameters.""" + project_name: str = Field(..., description="Name for the new repository") + owning_team: Optional[str] = Field(default="tf-module-admins", description="Team that should own the repository") + + # Allow any additional parameters from CloudFormation + model_config = {"extra": "allow"} # Pydantic v2 syntax for allowing extra fields + + def to_template_settings(self) -> Dict[str, Any]: + """Convert CloudFormation parameters to template settings format.""" + # Extract all fields except the known top-level ones + exclude_fields = {'project_name', 'owning_team'} + + # Build attrs from all other fields + attrs = {} + tags = {} + + # Get all model fields including extra ones (Pydantic v2) + all_fields = self.dict() if hasattr(self, 'dict') else dict(self) + + for field_name, field_value in all_fields.items(): + if field_name not in exclude_fields: + # Handle tags specially if provided as dict + if field_name == 'tags' and isinstance(field_value, dict): + tags.update(field_value) + else: + attrs[field_name] = field_value + + return { + "attrs": attrs, + "tags": tags + } + +def log_api_call(method: str, url: str, headers: Dict = None, data: Any = None, response: requests.Response = None): + """Log detailed API call information for debugging.""" + logger.info(f"API Call: {method} {url}") + + # Log headers (without sensitive tokens) + if headers: + safe_headers = {k: v if k.lower() not in ['authorization', 'private-token'] + else f"[REDACTED - length: {len(v)}]" for k, v in headers.items()} + logger.info(f"Request headers: {json.dumps(safe_headers, indent=2)}") + + # Log request data/body (truncated if too long) + if data: + data_str = json.dumps(data, default=str) if isinstance(data, dict) else str(data) + if len(data_str) > 1000: + data_str = data_str[:1000] + "... [TRUNCATED]" + logger.info(f"Request data: {data_str}") + + # Log response details + if response: + logger.info(f"Response status: {response.status_code} {response.reason}") + logger.info(f"Response headers: {dict(response.headers)}") + + try: + response_text = response.text + if len(response_text) > 2000: + response_text = response_text[:2000] + "... [TRUNCATED]" + logger.info(f"Response body: {response_text}") + + # Try to parse as JSON for better formatting + if response.headers.get('content-type', '').startswith('application/json'): + try: + response_json = response.json() + logger.info(f"Response JSON (formatted): {json.dumps(response_json, indent=2)}") + except Exception: + pass # Already logged as text + except Exception as e: + logger.warning(f"Could not read response body: {str(e)}") + +def get_provider(): + """Get the appropriate repository provider based on environment configuration.""" + logger.info("=== PROVIDER INITIALIZATION ===") + logger.info("Determining repository provider from environment variables...") + + # Log which environment variables are set (without sensitive values) + env_check = { + 'GITHUB_API': 'GITHUB_API' in os.environ, + 'GITLAB_API': 'GITLAB_API' in os.environ, + 'GITHUB_TOKEN_SECRET_NAME': 'GITHUB_TOKEN_SECRET_NAME' in os.environ, + 'GITLAB_TOKEN_SECRET_NAME': 'GITLAB_TOKEN_SECRET_NAME' in os.environ, + 'GITHUB_ORG_NAME': 'GITHUB_ORG_NAME' in os.environ, + 'GITLAB_GROUP_NAME': 'GITLAB_GROUP_NAME' in os.environ, + 'TEMPLATE_REPO_NAME': 'TEMPLATE_REPO_NAME' in os.environ, + 'VERIFY_SSL': f"VERIFY_SSL={VERIFY_SSL}", + } + logger.info(f"Environment variables check: {json.dumps(env_check, indent=2)}") + + # Log actual environment values (non-sensitive) + if 'GITHUB_API' in os.environ: + logger.info(f"GitHub API URL: {os.environ['GITHUB_API']}") + if 'GITLAB_API' in os.environ: + logger.info(f"GitLab API URL: {os.environ['GITLAB_API']}") + if 'GITHUB_ORG_NAME' in os.environ: + logger.info(f"GitHub organization: {os.environ['GITHUB_ORG_NAME']}") + if 'GITLAB_GROUP_NAME' in os.environ: + logger.info(f"GitLab group: {os.environ['GITLAB_GROUP_NAME']}") + if 'TEMPLATE_REPO_NAME' in os.environ: + logger.info(f"Template repository: {os.environ['TEMPLATE_REPO_NAME']}") + + # Determine which provider to use based on environment variables + if "GITHUB_API" in os.environ: + logger.info("Selected provider: GitHub") + try: + token_secret = os.environ["GITHUB_TOKEN_SECRET_NAME"] + logger.info(f"Retrieving GitHub token from secret: {token_secret}") + + token = get_secret(token_secret) + logger.info(f"Successfully retrieved GitHub token (length: {len(token) if token else 0})") + + provider_config = { + 'api_base_url': os.environ["GITHUB_API"], + 'organization': os.environ["GITHUB_ORG_NAME"], + 'verify_ssl': VERIFY_SSL + } + logger.info(f"GitHub provider configuration: {json.dumps(provider_config, indent=2)}") + + provider = GitHubProvider( + api_base_url=os.environ["GITHUB_API"], + token=token, + organization=os.environ["GITHUB_ORG_NAME"], + verify_ssl=VERIFY_SSL + ) + logger.info("GitHub provider initialized successfully") + + # Test API connectivity - using a more appropriate endpoint for GitHub App tokens + logger.info("Testing GitHub API connectivity...") + # For GitHub App tokens, it's better to use an endpoint that works with both user and app tokens + test_url = f"{os.environ['GITHUB_API']}/repos/{os.environ['GITHUB_ORG_NAME']}" + try: + import requests + response = requests.get( + test_url, + headers={'Authorization': f'Bearer {token}'}, + verify=VERIFY_SSL, + timeout=10 + ) + logger.info(f"API test response: {response.status_code} {response.reason}") + if response.status_code == 200: + logger.info("GitHub API connection test successful") + org_info = response.json() + if isinstance(org_info, list) and len(org_info) > 0: + logger.info(f"Found {len(org_info)} repositories in organization: {os.environ['GITHUB_ORG_NAME']}") + else: + logger.info(f"Connected to GitHub API as expected") + elif response.status_code == 404: + # For GitHub App tokens, try an alternative endpoint + logger.info("First endpoint returned 404, trying alternative endpoint for GitHub App token...") + alt_test_url = f"{os.environ['GITHUB_API']}/app" + alt_response = requests.get( + alt_test_url, + headers={'Authorization': f'Bearer {token}'}, + verify=VERIFY_SSL, + timeout=10 + ) + logger.info(f"Alternative API test response: {alt_response.status_code} {alt_response.reason}") + if alt_response.status_code == 200: + logger.info("GitHub App authentication successful") + else: + logger.warning(f"Alternative API test failed: {alt_response.text}") + else: + logger.warning(f"API test failed: {response.text}") + except Exception as e: + logger.warning(f"API connectivity test failed: {str(e)}") + + return provider + + except Exception as e: + logger.error(f"Failed to initialize GitHub provider: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + import traceback + logger.error(f"Full traceback: {traceback.format_exc()}") + raise + + elif "GITLAB_API" in os.environ: + logger.info("Selected provider: GitLab") + try: + token_secret = os.environ["GITLAB_TOKEN_SECRET_NAME"] + logger.info(f"Retrieving GitLab token from secret: {token_secret}") + + token = get_secret(token_secret) + logger.info(f"Successfully retrieved GitLab token (length: {len(token) if token else 0})") + + provider_config = { + 'api_base_url': os.environ["GITLAB_API"], + 'organization': os.environ["GITLAB_GROUP_NAME"], + 'verify_ssl': VERIFY_SSL + } + logger.info(f"GitLab provider configuration: {json.dumps(provider_config, indent=2)}") + + provider = GitLabProvider( + api_base_url=os.environ["GITLAB_API"], + token=token, + organization=os.environ["GITLAB_GROUP_NAME"], + verify_ssl=VERIFY_SSL + ) + logger.info("GitLab provider initialized successfully") + + # Test API connectivity + logger.info("Testing GitLab API connectivity...") + test_url = f"{os.environ['GITLAB_API']}/user" + try: + import requests + response = requests.get( + test_url, + headers={'Private-Token': token}, + verify=VERIFY_SSL, + timeout=10 + ) + logger.info(f"API test response: {response.status_code} {response.reason}") + if response.status_code == 200: + user_info = response.json() + logger.info(f"Connected as user: {user_info.get('username', 'unknown')}") + else: + logger.warning(f"API test failed: {response.text}") + except Exception as e: + logger.warning(f"API connectivity test failed: {str(e)}") + + return provider + + except Exception as e: + logger.error(f"Failed to initialize GitLab provider: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + import traceback + logger.error(f"Full traceback: {traceback.format_exc()}") + raise + else: + logger.error("=== PROVIDER CONFIGURATION ERROR ===") + logger.error("No repository provider configuration found!") + logger.error("Required environment variables missing:") + logger.error("For GitHub: GITHUB_API, GITHUB_TOKEN_SECRET_NAME, GITHUB_ORG_NAME") + logger.error("For GitLab: GITLAB_API, GITLAB_TOKEN_SECRET_NAME, GITLAB_GROUP_NAME") + + # Log all environment variables for debugging (without sensitive values) + all_env_vars = {k: v if k not in ['GITHUB_TOKEN_SECRET_NAME', 'GITLAB_TOKEN_SECRET_NAME'] + and 'token' not in k.lower() and 'secret' not in k.lower() + else f"[REDACTED - length: {len(v)}]" + for k, v in os.environ.items() if k.startswith(('GITHUB_', 'GITLAB_'))} + logger.error(f"Current environment variables: {json.dumps(all_env_vars, indent=2)}") + + raise ValueError("No repository provider configuration found in environment") + +def get_secret(secret_name: str) -> str: + """Get a secret from AWS Secrets Manager.""" + logger.info(f"=== RETRIEVING SECRET: {secret_name} ===") + + # Log AWS region and other relevant info + session = boto3.Session() + region = session.region_name or os.environ.get('AWS_DEFAULT_REGION', 'us-east-1') + logger.info(f"AWS region: {region}") + logger.info(f"Using SSL verification for AWS: True (always enforced)") + + # Always use SSL verification for AWS services, regardless of VERIFY_SSL setting + client = boto3.client('secretsmanager', use_ssl=True, verify=True) + + try: + logger.info(f"Calling get_secret_value for: {secret_name}") + response = client.get_secret_value(SecretId=secret_name) + logger.info(f"Secret retrieval successful, response keys: {list(response.keys())}") + + if 'SecretString' in response: + secret_string = response['SecretString'] + logger.info(f"Retrieved secret string, length: {len(secret_string)}") + + # Handle different secret formats + try: + # Try to parse as JSON first + logger.info("Attempting to parse secret as JSON...") + secret_data = json.loads(secret_string) + logger.info(f"Secret is valid JSON, type: {type(secret_data)}") + + if isinstance(secret_data, dict): + logger.info(f"Secret JSON keys: {list(secret_data.keys())}") + if 'token' in secret_data: + logger.info("Found 'token' key in JSON secret") + return secret_data['token'] + elif len(secret_data) == 1: + # If it's a dict with one key, return that value + key, value = next(iter(secret_data.items())) + logger.info(f"Using single key '{key}' from JSON secret") + return value + else: + # Multiple keys, try common token field names + for token_field in ['access_token', 'api_token', 'github_token', 'gitlab_token', 'pat']: + if token_field in secret_data: + logger.info(f"Found '{token_field}' key in JSON secret") + return secret_data[token_field] + # If no common token fields, return the first value + key, value = next(iter(secret_data.items())) + logger.warning(f"No standard token field found, using first key '{key}'") + return value + elif isinstance(secret_data, str): + logger.info("Secret JSON contains a string value") + return secret_data + else: + logger.warning(f"Unexpected JSON secret format: {type(secret_data)}") + return str(secret_data) + + except json.JSONDecodeError as e: + # If not JSON, treat as plain text token + logger.info(f"Secret is not JSON format (error: {str(e)}), treating as plain text") + return secret_string.strip() + else: + logger.error("Secret response does not contain 'SecretString' field") + logger.error(f"Available fields: {list(response.keys())}") + raise ValueError("Secret value not found") + + except Exception as e: + logger.error(f"Failed to get secret {secret_name}: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + + # Log additional AWS error details if available + if hasattr(e, 'response'): + logger.error(f"AWS error response: {e.response}") + + import traceback + logger.error(f"Full traceback: {traceback.format_exc()}") + raise + +def send_cfn_response(event: dict, context, status: str, response_data: dict, physical_resource_id: str = None, reason: str = None): + """Send response to CloudFormation for Custom Resource. + + Args: + event: CloudFormation Custom Resource event + context: Lambda context + status: SUCCESS or FAILED + response_data: Data to return to CloudFormation + physical_resource_id: Physical resource identifier + reason: Reason for failure (if status is FAILED) + """ + response_url = event.get('ResponseURL') + if not response_url: + logger.warning("No ResponseURL in event, skipping CloudFormation response") + return + + # Build response body + response_body = { + 'Status': status, + 'Reason': reason or f'See CloudWatch Log Stream: {context.log_stream_name}', + 'PhysicalResourceId': physical_resource_id or context.log_stream_name, + 'StackId': event.get('StackId'), + 'RequestId': event.get('RequestId'), + 'LogicalResourceId': event.get('LogicalResourceId'), + 'Data': response_data + } + + json_response = json.dumps(response_body) + logger.info(f"Sending CloudFormation response: {json_response}") + + try: + headers = { + 'Content-Type': '', + 'Content-Length': str(len(json_response)) + } + + req = Request(response_url, data=json_response.encode('utf-8'), headers=headers, method='PUT') + response = urlopen(req) + logger.info(f"CloudFormation response sent successfully. Status: {response.status}") + + except HTTPError as e: + logger.error(f"Failed to send CloudFormation response: {e}") + logger.error(f"Response code: {e.code}, Reason: {e.reason}") + except Exception as e: + logger.error(f"Unexpected error sending CloudFormation response: {str(e)}") + +def lambda_handler(event: dict, context) -> dict: + """Process CloudFormation Custom Resource events to create new repositories from templates. + + This handler expects CloudFormation Custom Resource events in the following format: + { + "RequestType": "Create|Update|Delete", + "ResponseURL": "pre-signed-url-for-response", + "StackId": "arn:aws:cloudformation:...", + "RequestId": "unique-id", + "ResourceType": "Custom::RepositoryCreator", + "LogicalResourceId": "MyRepository", + "ResourceProperties": { + "ServiceToken": "arn:aws:lambda:...", + "ProjectName": "repo-name", + "OwningTeam": "team-name", + ...other parameters... + } + } + + Args: + event: CloudFormation Custom Resource event containing: + RequestType (str): Type of CloudFormation request (Create, Update, Delete) + ResourceProperties (dict): All CloudFormation parameters including + ProjectName (str): Name for the new repository + OwningTeam (str): Team that should own the repository + ...additional CloudFormation parameters... + context: AWS Lambda context object + + Returns: + dict: Creation results containing: + repository_url (str): URL of the created repository + pull_request_url (str): URL of the config pull request + """ + request_id = getattr(context, 'aws_request_id', 'unknown') + logger.info(f"[{request_id}] Lambda function started") + logger.info(f"[{request_id}] Raw event: {json.dumps(event, default=str)}") + + # Extract request type and properties + request_type = event.get('RequestType', 'Unknown') + logger.info(f"[{request_id}] CloudFormation Request Type: {request_type}") + + # Log environment configuration (without sensitive data) + env_vars = { + 'VERIFY_SSL': VERIFY_SSL, + 'TEMPLATE_REPO_NAME': os.environ.get("TEMPLATE_REPO_NAME", "NOT_SET"), + 'TEMPLATE_CONFIG_FILE': os.environ.get("TEMPLATE_CONFIG_FILE", "config.json"), + 'GITHUB_API': os.environ.get("GITHUB_API", "NOT_SET"), + 'GITLAB_API': os.environ.get("GITLAB_API", "NOT_SET"), + 'GITHUB_ORG_NAME': os.environ.get("GITHUB_ORG_NAME", "NOT_SET"), + 'GITLAB_GROUP_NAME': os.environ.get("GITLAB_GROUP_NAME", "NOT_SET"), + } + logger.info(f"[{request_id}] Environment configuration: {json.dumps(env_vars, indent=2)}") + + # Handle Delete requests - we don't delete repositories + if request_type == 'Delete': + logger.info(f"[{request_id}] Delete request received - repositories are not deleted automatically") + send_cfn_response( + event, + context, + 'SUCCESS', + {'Message': 'Repository not deleted - manual cleanup required'}, + physical_resource_id=event.get('PhysicalResourceId', 'none') + ) + return { + "statusCode": 200, + "body": json.dumps({"message": "Delete request acknowledged"}) + } + + try: + # Extract resource properties from CloudFormation event + logger.info(f"[{request_id}] Parsing CloudFormation Custom Resource event...") + + if 'ResourceProperties' not in event: + raise ValueError("Event missing 'ResourceProperties' field - not a valid CloudFormation Custom Resource event") + + resource_properties = event['ResourceProperties'] + + # Remove ServiceToken as it's not a user parameter + resource_params = {k: v for k, v in resource_properties.items() if k != 'ServiceToken'} + logger.info(f"[{request_id}] Resource properties: {json.dumps(resource_params, default=str)}") + + # Normalize parameter names (CloudFormation uses PascalCase, we need snake_case) + normalized_params = {} + for key, value in resource_params.items(): + # Convert PascalCase to snake_case + snake_key = ''.join(['_' + c.lower() if c.isupper() else c for c in key]).lstrip('_') + normalized_params[snake_key] = value + + logger.info(f"[{request_id}] Normalized parameters: {json.dumps(normalized_params, default=str)}") + + # Validate input using Pydantic model + logger.info(f"[{request_id}] Validating CloudFormation parameters...") + cfn_input = CloudFormationResourceInput(**normalized_params) + logger.info(f"[{request_id}] Input validation successful:") + logger.info(f"[{request_id}] - project_name: {cfn_input.project_name}") + logger.info(f"[{request_id}] - owning_team: {cfn_input.owning_team}") + + # Convert to template settings format + template_settings = cfn_input.to_template_settings() + logger.info(f"[{request_id}] - template_settings: {json.dumps(template_settings, default=str)}") + + # Get repository provider + logger.info(f"[{request_id}] Initializing repository provider...") + provider = get_provider() + provider_type = provider.__class__.__name__ + logger.info(f"[{request_id}] Using provider: {provider_type}") + logger.info(f"[{request_id}] Provider config: API={provider.api_base_url}, Org={provider.organization}, SSL={provider.verify_ssl}") + + # Get or create repository + logger.info(f"[{request_id}] Getting/creating repository: {cfn_input.project_name}") + try: + project = provider.get_repository(cfn_input.project_name, create=True) + logger.info(f"[{request_id}] Repository operation successful") + logger.info(f"[{request_id}] Repository details: {json.dumps(project, default=str, indent=2)}") + except Exception as e: + logger.error(f"[{request_id}] Repository operation failed: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + raise + + # Add the team as admin to the repository (GitHub only) + if cfn_input.owning_team and provider_type == "GitHubProvider": + logger.info(f"[{request_id}] Adding team '{cfn_input.owning_team}' as admin to the repository") + try: + provider.set_team_permission(cfn_input.project_name, cfn_input.owning_team, permission="admin") + logger.info(f"[{request_id}] Team '{cfn_input.owning_team}' added as admin successfully") + except Exception as e: + logger.error(f"[{request_id}] Failed to add team as admin: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + # Continue anyway, as team permissions are not critical + logger.info(f"[{request_id}] Continuing despite team permission error") + else: + logger.info(f"[{request_id}] Skipping team assignment (no owning_team or not GitHub provider)") + + # Give newly created repositories a moment to initialize + if project.get('created_at'): + logger.info(f"[{request_id}] Checking if repository was recently created...") + logger.info(f"[{request_id}] Repository created_at: {project.get('created_at')}") + try: + from datetime import datetime, timezone + + # Try to parse the creation time - different providers may use different formats + created_at = project['created_at'] + now = datetime.now(timezone.utc) + logger.info(f"[{request_id}] Current time: {now.isoformat()}") + + # Try dateutil parser first, fall back to simple ISO format + try: + import dateutil.parser + created_time = dateutil.parser.parse(created_at) + logger.info(f"[{request_id}] Parsed creation time using dateutil: {created_time.isoformat()}") + except ImportError: + # Fall back to basic ISO format parsing + if created_at.endswith('Z'): + created_at = created_at[:-1] + '+00:00' + created_time = datetime.fromisoformat(created_at) + logger.info(f"[{request_id}] Parsed creation time using fromisoformat: {created_time.isoformat()}") + + time_diff = (now - created_time).total_seconds() + logger.info(f"[{request_id}] Time difference: {time_diff} seconds") + + if time_diff < 10: # Repository was just created + logger.info(f"[{request_id}] Repository was just created, waiting 3 seconds for initialization...") + time.sleep(3) + else: + logger.info(f"[{request_id}] Repository is not newly created, proceeding immediately") + except Exception as e: + logger.warning(f"[{request_id}] Could not parse creation time: {str(e)}") + logger.warning(f"[{request_id}] Continuing without delay") + else: + logger.info(f"[{request_id}] No created_at field found in repository data") + + # Create branch for configuration first + config_branch = "repo-init" + logger.info(f"[{request_id}] Creating configuration branch: {config_branch}") + try: + provider.create_branch(cfn_input.project_name, config_branch) + logger.info(f"[{request_id}] Branch {config_branch} created successfully") + except Exception as e: + logger.error(f"[{request_id}] Failed to create branch {config_branch}: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + # If we can't create a branch, try using main branch + config_branch = "main" + logger.info(f"[{request_id}] Falling back to {config_branch} branch") + + # Clone template contents to the config branch AFTER the branch is created + template_repo = os.environ["TEMPLATE_REPO_NAME"] + logger.info(f"[{request_id}] Cloning template contents from {template_repo} to {cfn_input.project_name} on branch {config_branch}") + try: + provider.clone_repository_contents( + source_repo=template_repo, + target_repo=cfn_input.project_name, + target_branch=config_branch # Explicitly specify the target branch + ) + logger.info(f"[{request_id}] Template cloning completed successfully to {config_branch} branch") + except Exception as e: + logger.error(f"[{request_id}] Failed to clone template contents: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + logger.info(f"[{request_id}] Continuing with repository setup despite cloning failure") + + # Write configuration file + config_file = os.environ.get("TEMPLATE_CONFIG_FILE", "config.json") + config_content = json.dumps(template_settings, indent=2) + logger.info(f"[{request_id}] Writing configuration file: {config_file}") + logger.info(f"[{request_id}] Configuration content: {config_content}") + logger.info(f"[{request_id}] Target branch: {config_branch}") + + try: + file_result = provider.write_file( + cfn_input.project_name, + file=FileContent( + path=config_file, + content=config_content + ), + branch=config_branch, + message="Add repository configuration from CloudFormation" + ) + logger.info(f"[{request_id}] Configuration file written successfully") + logger.info(f"[{request_id}] Write file result: {json.dumps(file_result, default=str)}") + except Exception as e: + logger.error(f"[{request_id}] Failed to write configuration file: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + raise + + # Create merge/pull request + logger.info(f"[{request_id}] Creating merge/pull request from {config_branch} to main") + try: + mr_settings = MergeRequestSettings( + title=f"Initialize {cfn_input.project_name} from CloudFormation", + description=f"This pull request contains the initial repository configuration from CloudFormation.\n\nStack: {event.get('StackId', 'N/A')}\nLogical Resource: {event.get('LogicalResourceId', 'N/A')}", + source_branch=config_branch + ) + logger.info(f"[{request_id}] MR settings: title='{mr_settings.title}', source='{mr_settings.source_branch}', target='{mr_settings.target_branch}'") + + mr = provider.create_pull_request(cfn_input.project_name, settings=mr_settings) + logger.info(f"[{request_id}] Merge/pull request created successfully") + logger.info(f"[{request_id}] MR details: {json.dumps(mr, default=str, indent=2)}") + except Exception as e: + logger.error(f"[{request_id}] Failed to create merge/pull request: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + raise + + # Build response data + response_data = { + "RepositoryUrl": project["web_url"], + "RepositoryName": cfn_input.project_name + } + + # Use pull_request_url for GitHub and merge_request_url for GitLab + if provider_type == "GitHubProvider": + # Extract the HTML URL from the pull request response + if mr and '_links' in mr and 'html' in mr['_links'] and 'href' in mr['_links']['html']: + pr_url = mr['_links']['html']['href'] + logger.info(f"[{request_id}] Pull request URL from _links: {pr_url}") + response_data["PullRequestUrl"] = pr_url + elif mr and 'html_url' in mr: + # Some GitHub API versions return html_url directly + response_data["PullRequestUrl"] = mr['html_url'] + logger.info(f"[{request_id}] Pull request URL from html_url: {mr['html_url']}") + else: + logger.warning(f"[{request_id}] Could not extract PR URL from response: {json.dumps(mr, default=str)}") + response_data["PullRequestUrl"] = "N/A" + else: + response_data["MergeRequestUrl"] = mr.get("web_url", "N/A") + + logger.info(f"[{request_id}] Operation completed successfully") + logger.info(f"[{request_id}] Response data: {json.dumps(response_data, indent=2)}") + + # Send success response to CloudFormation + physical_resource_id = f"{cfn_input.project_name}-repository" + send_cfn_response(event, context, 'SUCCESS', response_data, physical_resource_id) + + return { + "statusCode": 200, + "body": json.dumps(response_data) + } + + except Exception as e: + logger.error(f"[{request_id}] Lambda function failed with error: {str(e)}") + logger.error(f"[{request_id}] Exception type: {type(e).__name__}") + import traceback + logger.error(f"[{request_id}] Full traceback: {traceback.format_exc()}") + + # Send failure response to CloudFormation + error_message = f"Failed to create repository: {str(e)}" + send_cfn_response(event, context, 'FAILED', {}, reason=error_message) + + return { + "statusCode": 500, + "body": json.dumps({"error": str(e)}) + } diff --git a/template_automation/github_client.py b/template_automation/github_client.py new file mode 100644 index 00000000..2af8d01b --- /dev/null +++ b/template_automation/github_client.py @@ -0,0 +1,669 @@ +"""GitLab client module for template automation. + +This module provides the GitLabClient class which handles all interactions with the GitLab API +for template repository automation using the requests library directly. +""" + +import base64 +import json +import logging +import time +import urllib.parse +from typing import List, Optional, Dict, Any, Union + +import requests + +logger = logging.getLogger(__name__) + +class GitLabClient: + """A client for interacting with GitLab's API in the context of template automation. + + This class provides methods for template repository operations including: + - Creating projects from templates + - Managing project contents + - Setting up group access + - Configuring project settings + + Attributes: + api_base_url (str): Base URL for the GitLab API + token (str): GitLab authentication token + group_name (str): GitLab group name + commit_author_name (str): Name to use for automated commits + commit_author_email (str): Email to use for automated commits + verify_ssl (bool): Whether to verify SSL certificates + + Example: + ```python + client = GitLabClient( + api_base_url="https://gitlab.example.com", + token="glpat-...", + group_name="my-group", + commit_author_name="Template Bot", + commit_author_email="bot@example.com" + ) + + project = client.create_project_from_template( + template_project_name="template-service", + new_project_name="new-service", + visibility="private" + ) + ``` + """ + + def __init__( + self, + api_base_url: str, + token: str, + group_name: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + """Initialize a new GitLab client. + + Args: + api_base_url: Base URL for the GitLab API + token: GitLab authentication token + group_name: GitLab group name + commit_author_name: Name to use for automated commits + commit_author_email: Email to use for automated commits + verify_ssl: Whether to verify SSL certificates + """ + self.api_base_url = api_base_url.rstrip('/') + self.token = token + self.group_name = group_name + self.commit_author_name = commit_author_name + self.commit_author_email = commit_author_email + self.verify_ssl = verify_ssl + + # Create session for connection reuse + self.session = requests.Session() + self.session.headers.update({ + 'Authorization': f'Bearer {token}', + 'Content-Type': 'application/json', + 'User-Agent': 'Template-Automation-Lambda' + }) + + # Cache group ID for API calls + self._group_id = None + + # Log initialization + logger.info(f"Initialized GitLab client for group: {group_name} (SSL verify: {verify_ssl})") + + def _get_group_id(self) -> int: + """Get the group ID for the configured group name. + + Returns: + Group ID as integer + """ + if self._group_id is None: + url = f"{self.api_base_url}/api/v4/groups/{urllib.parse.quote(self.group_name, safe='')}" + group_data = self._request("GET", url) + self._group_id = group_data["id"] + logger.info(f"Found group ID {self._group_id} for group {self.group_name}") + return self._group_id + + def _request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: + """Make a request to the GitLab API. + + Args: + method: HTTP method (GET, POST, PATCH, PUT, DELETE) + url: URL path or full URL to request + **kwargs: Additional arguments to pass to requests + + Returns: + Response data as a dictionary + + Raises: + requests.exceptions.RequestException: On request errors + """ + # Prepend base URL if not already an absolute URL + if not url.startswith('http'): + url = f"{self.api_base_url}{url}" + + # Set SSL verification + kwargs['verify'] = self.verify_ssl + + # Log the request + if 'json' in kwargs: + logger.info(f"GitLab API {method} request to {url} with payload: {json.dumps(kwargs['json'])}") + else: + logger.info(f"GitLab API {method} request to {url}") + + # Make the request + try: + response = self.session.request(method, url, **kwargs) + + # Raise exception for error status codes + if response.status_code >= 400: + logger.error(f"GitLab API error: {response.status_code} - {response.text}") + + response.raise_for_status() + + # Return JSON data for non-empty responses + if response.text: + try: + return response.json() + except json.JSONDecodeError: + logger.warning(f"Received non-JSON response: {response.text}") + return {"raw_content": response.text} + return {} + except requests.exceptions.RequestException as e: + if hasattr(e, 'response') and e.response is not None: + try: + # Try to parse JSON, but handle case where response is not JSON + if e.response.text.strip(): + try: + error_body = e.response.json() + logger.error(f"GitLab API error details: {json.dumps(error_body)}") + except json.JSONDecodeError: + logger.error(f"GitLab API returned non-JSON error: {e.response.text}") + else: + logger.error(f"GitLab API returned empty error response with status code: {e.response.status_code}") + except (ValueError, AttributeError): + logger.error(f"GitLab API error: Unable to parse response") + logger.error(f"Request failed: {str(e)}") + raise + + def get_project( + self, + project_name: str, + create: bool = False, + owning_group: Optional[str] = None + ) -> Dict[str, Any]: + """Get or create a GitLab project with optional group permissions. + + Args: + project_name: The name of the project to retrieve or create + create: Whether to create the project if it doesn't exist + owning_group: The name of the GitLab group to grant developer access + + Returns: + The project data + """ + try: + # Try to get the project + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}" + project = self._request("GET", url) + logger.info(f"Found existing project: {project_name}") + + if owning_group: + self.set_group_permission(project_name, owning_group, "developer") + + return project + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404 and create: + logger.info(f"Creating project {project_name}") + + group_id = self._get_group_id() + + # Create a new project with minimal parameters + url = f"/api/v4/projects" + try: + project = self._request("POST", url, json={ + "name": project_name, + "namespace_id": group_id, + "visibility": "private", + "initialize_with_readme": True + }) + except requests.exceptions.HTTPError as create_error: + # Safe handling of response parsing + error_message = str(create_error) + logger.error(f"Failed to create project with error: {error_message}") + + # If we got an HTML response instead of JSON (likely an error page) + if "" in error_message or " Dict[str, Any]: + """Get branch information. + + Args: + project_name: Name of the project + branch_name: Name of the branch + + Returns: + Branch data + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + encoded_branch = urllib.parse.quote(branch_name, safe='') + url = f"/api/v4/projects/{encoded_path}/repository/branches/{encoded_branch}" + return self._request("GET", url) + + def get_default_branch(self, project_name: str) -> str: + """Get the default branch name of a project. + + Args: + project_name: Name of the project + + Returns: + Default branch name (usually 'main' or 'master') + """ + project = self.get_project(project_name) + return project["default_branch"] + + def create_branch(self, project_name: str, branch_name: str, from_ref: str = "main") -> None: + """Create a new branch in the project. + + Args: + project_name: Name of the project + branch_name: Name of the branch to create + from_ref: Reference to create branch from + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}/repository/branches" + self._request("POST", url, json={ + "branch": branch_name, + "ref": from_ref + }) + + logger.info(f"Created branch {branch_name} in {project_name}") + + def write_file( + self, + project: Dict[str, Any], + path: str, + content: str, + branch: str = "main", + commit_message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a project. + + Args: + project: The project object + path: Path where to create/update the file + content: Content to write to the file + branch: Branch to commit to + commit_message: Commit message to use + + Returns: + The created/updated file content + """ + project_name = project["name"] + + # Try to get the existing file to check if it exists + try: + file = self.get_file_contents(project_name, path, branch) + # Update existing file + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + encoded_file_path = urllib.parse.quote(path, safe='') + url = f"/api/v4/projects/{encoded_path}/repository/files/{encoded_file_path}" + result = self._request("PUT", url, json={ + "commit_message": commit_message or f"Update {path}", + "content": content, + "branch": branch, + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + logger.info(f"Updated file {path} in project {project_name}") + return result + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + # Create new file + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + encoded_file_path = urllib.parse.quote(path, safe='') + url = f"/api/v4/projects/{encoded_path}/repository/files/{encoded_file_path}" + result = self._request("POST", url, json={ + "commit_message": commit_message or f"Create {path}", + "content": content, + "branch": branch, + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + logger.info(f"Created new file {path} in project {project_name}") + return result + raise + + def get_file_contents(self, project_name: str, path: str, ref: str = "main") -> Dict[str, Any]: + """Get the contents of a file in a project. + + Args: + project_name: Name of the project + path: Path to the file + ref: Branch, tag, or commit SHA + + Returns: + File data + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + encoded_file_path = urllib.parse.quote(path, safe='') + url = f"/api/v4/projects/{encoded_path}/repository/files/{encoded_file_path}" + params = {"ref": ref} + return self._request("GET", url, params=params) + + def read_file(self, project: Dict[str, Any], path: str, ref: str = "main") -> str: + """Read a file from a project. + + Args: + project: The project object + path: Path to the file to read + ref: Git reference (branch, tag, commit) to read from + + Returns: + The file contents as a string + """ + project_name = project["name"] + file = self.get_file_contents(project_name, path, ref) + content = base64.b64decode(file["content"]).decode("utf-8") + return content + + def create_merge_request( + self, + project_name: str, + title: str, + description: str, + source_branch: str, + target_branch: str = "main" + ) -> Dict[str, Any]: + """Create a merge request in a project. + + Args: + project_name: Name of the project + title: Title of the merge request + description: Description/body of the merge request + source_branch: Branch containing the changes + target_branch: Branch to merge into + + Returns: + The created merge request object + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}/merge_requests" + mr = self._request("POST", url, json={ + "title": title, + "description": description, + "source_branch": source_branch, + "target_branch": target_branch, + "remove_source_branch": True + }) + + logger.info(f"Created MR !{mr['iid']} in {project_name}: {title}") + return mr + + def trigger_pipeline( + self, + project_name: str, + ref: str, + variables: Optional[Dict[str, str]] = None + ) -> None: + """Trigger a GitLab CI/CD pipeline. + + Args: + project_name: Name of the project + ref: Git reference to run the pipeline on + variables: Pipeline variables + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}/pipeline" + pipeline_variables = [] + + if variables: + pipeline_variables = [{"key": k, "value": v} for k, v in variables.items()] + + self._request("POST", url, json={ + "ref": ref, + "variables": pipeline_variables + }) + + logger.info(f"Triggered pipeline in {project_name} on {ref}") + + def set_group_permission(self, project_name: str, group_name: str, access_level: str) -> None: + """Set a group's permission on a project. + + Args: + project_name: Name of the project + group_name: Name of the group + access_level: Access level ('guest', 'reporter', 'developer', 'maintainer', 'owner') + """ + # Map access level names to GitLab access level numbers + access_levels = { + "guest": 10, + "reporter": 20, + "developer": 30, + "maintainer": 40, + "owner": 50 + } + + if access_level not in access_levels: + raise ValueError(f"Invalid access level: {access_level}. Must be one of: {list(access_levels.keys())}") + + try: + # Get the group ID + group_url = f"/api/v4/groups/{urllib.parse.quote(group_name, safe='')}" + group = self._request("GET", group_url) + group_id = group["id"] + logger.info(f"Found group: {group_name} with ID: {group_id}") + + # Share project with group + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}/share" + self._request("POST", url, json={ + "id": group_id, + "group_access": access_levels[access_level] + }) + logger.info(f"Set {group_name} permission on {project_name} to {access_level}") + + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + logger.warning(f"Group {group_name} not found, skipping permission assignment") + elif e.response.status_code == 409: + logger.info(f"Group {group_name} already has access to {project_name}") + else: + logger.error(f"Failed to set group permission: {str(e)}") + raise + + def update_project_topics(self, project_name: str, topics: List[str]) -> None: + """Update the topics of a project. + + Args: + project_name: Name of the project + topics: List of topics to set + """ + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}" + + self._request("PUT", url, json={"topics": topics}) + + logger.info(f"Updated topics for {project_name}: {topics}") + + def create_project_from_template( + self, + template_project_name: str, + new_project_name: str, + visibility: str = "private", + description: Optional[str] = None, + topics: Optional[List[str]] = None + ) -> Dict[str, Any]: + """Create a new project from a template using GitLab's fork and template features. + + Args: + template_project_name: Name of the template project + new_project_name: Name for the new project + visibility: Visibility level ("private", "internal", "public") + description: Description for the new project + topics: List of topics to add to the project + + Returns: + The newly created project + """ + group_id = self._get_group_id() + + # Create project from template by forking and then renaming + encoded_template_path = urllib.parse.quote(f"{self.group_name}/{template_project_name}", safe='') + fork_url = f"/api/v4/projects/{encoded_template_path}/fork" + + # Fork the template project + fork_data = { + "namespace_id": group_id, + "name": new_project_name, + "path": new_project_name + } + + if description: + fork_data["description"] = description + + new_project = self._request("POST", fork_url, json=fork_data) + + # Update visibility if needed + if visibility != "private": + encoded_path = urllib.parse.quote(f"{self.group_name}/{new_project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}" + self._request("PUT", url, json={"visibility": visibility}) + + # Add topics if provided + if topics: + self.update_project_topics(new_project_name, topics) + + logger.info(f"Created new project: {new_project_name} from template: {template_project_name}") + return new_project + + def create_readme_file(self, project_name: str) -> Dict[str, Any]: + """Create a README.md file in an empty project to initialize it. + + Args: + project_name: Name of the project + + Returns: + The created file content data + """ + content = f"""# {project_name} + +This project was created automatically by the template automation system. + """ + + encoded_path = urllib.parse.quote(f"{self.group_name}/{project_name}", safe='') + url = f"/api/v4/projects/{encoded_path}/repository/files/README.md" + result = self._request("POST", url, json={ + "commit_message": "Initialize project with README", + "content": content, + "branch": "main", + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + + logger.info(f"Created README.md in project {project_name} to initialize it") + return result + + def clone_repository_contents( + self, + source_repo_name: str, + target_repo_name: str, + source_branch: str = "main", + target_branch: str = "main", + commit_message: str = "Initial project setup from template" + ) -> None: + """Clone all files from a source project to a target project. + + This method copies all files from the source project to the target project, + effectively implementing project templating by copying file content. + All files are copied in a single commit for a cleaner history. + + Args: + source_repo_name: Name of the source/template project + target_repo_name: Name of the target project where files will be copied + source_branch: Branch to copy files from in the source project + target_branch: Branch to copy files to in the target project + commit_message: Commit message for the file creation commit + + Raises: + ValueError: If source project or branch doesn't exist + """ + logger.info(f"Cloning contents from {source_repo_name}:{source_branch} to {target_repo_name}:{target_branch}") + + try: + # Get the source project info + source_project = self.get_project(source_repo_name) + + # Get all files from the source project recursively + encoded_source_path = urllib.parse.quote(f"{self.group_name}/{source_repo_name}", safe='') + tree_url = f"/api/v4/projects/{encoded_source_path}/repository/tree" + params = {"ref": source_branch, "recursive": True, "per_page": 100} + + all_files = [] + page = 1 + + while True: + params["page"] = page + tree_data = self._request("GET", tree_url, params=params) + + if not tree_data: + break + + # Filter out directories, only keep files + files = [item for item in tree_data if item["type"] == "blob"] + all_files.extend(files) + + # Check if there are more pages + if len(tree_data) < params["per_page"]: + break + page += 1 + + logger.info(f"Found {len(all_files)} files to copy from {source_repo_name}") + + # Create actions for batch commit + actions = [] + + # Process each file + for file_item in all_files: + file_path = file_item["path"] + + # Skip .git directory and other metadata files if they exist + if file_path.startswith(".git/") or file_path == ".git": + continue + + # Get the file content + try: + file_content = self.get_file_contents(source_repo_name, file_path, source_branch) + content = base64.b64decode(file_content["content"]).decode("utf-8") + + # Add action to create this file + actions.append({ + "action": "create", + "file_path": file_path, + "content": content + }) + except Exception as file_err: + logger.error(f"Failed to get content for file {file_path}: {str(file_err)}") + # Continue with other files + + # Create all files in a single commit + if actions: + logger.info(f"Creating {len(actions)} files in {target_repo_name}") + encoded_target_path = urllib.parse.quote(f"{self.group_name}/{target_repo_name}", safe='') + commit_url = f"/api/v4/projects/{encoded_target_path}/repository/commits" + + commit_data = { + "branch": target_branch, + "commit_message": commit_message, + "actions": actions, + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + } + + self._request("POST", commit_url, json=commit_data) + logger.info(f"Successfully cloned all files from {source_repo_name} to {target_repo_name} in a single commit") + else: + logger.warning(f"No files found to copy from {source_repo_name}") + + except requests.exceptions.HTTPError as project_err: + logger.error(f"Failed to get source project {source_repo_name}: {str(project_err)}") + raise ValueError(f"Source project {source_repo_name} does not exist") + except Exception as e: + logger.error(f"Unexpected error during project cloning: {str(e)}") + raise diff --git a/template_automation/github_provider.py b/template_automation/github_provider.py new file mode 100644 index 00000000..e1931653 --- /dev/null +++ b/template_automation/github_provider.py @@ -0,0 +1,643 @@ +"""GitHub repository provider implementation. + +This module provides the GitHub implementation of the repository provider interface. +""" + +import base64 +import json +import logging +import time +import urllib.parse +from typing import Dict, List, Optional, Any, Union + +import requests + +from .repository_provider import ( + RepositoryProvider, + RepositorySettings, + FileContent, + MergeRequestSettings +) + +logger = logging.getLogger(__name__) + +class GitHubProvider(RepositoryProvider): + """GitHub implementation of the repository provider interface.""" + + def __init__( + self, + api_base_url: str, + token: str, + organization: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + """Initialize GitHub provider with required settings.""" + super().__init__( + api_base_url=api_base_url, + token=token, + organization=organization, + commit_author_name=commit_author_name, + commit_author_email=commit_author_email, + verify_ssl=verify_ssl + ) + self.session = requests.Session() + + # Use Bearer format for GitHub App tokens, and token format for personal access tokens + # GitHub App tokens usually start with "ghs_" or are longer JWT tokens + if token.startswith(('ghs_', 'ghu_', 'github_pat_')) or len(token) > 50: + logger.info("Using Bearer token format (for GitHub Apps or fine-grained PATs)") + auth_header = f'Bearer {token}' + else: + logger.info("Using token format (for classic PATs)") + auth_header = f'token {token}' + + self.session.headers.update({ + 'Authorization': auth_header, + 'Accept': 'application/vnd.github.v3+json', + 'Content-Type': 'application/json', + 'User-Agent': 'Template-Automation-Lambda' + }) + + # Disable SSL verification warnings if needed + if not verify_ssl: + import urllib3 + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + def _request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: + """Make a request to the GitHub API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + url: URL path or full URL + **kwargs: Additional arguments to pass to requests + + Returns: + Response data as dictionary + + Raises: + requests.exceptions.RequestException: On request failure + """ + # Simple URL handling - just prepend base URL if needed + if not url.startswith('http'): + # For GitHub Enterprise, check if we need to add /api/v3 + if self.api_base_url and '.github.com' not in self.api_base_url: + # GitHub Enterprise API requires /api/v3 prefix + if not url.startswith('/api/v3'): + url = f"/api/v3{url}" if url.startswith('/') else f"/api/v3/{url}" + + url = f"{self.api_base_url}{url}" + + kwargs['verify'] = self.verify_ssl + + if 'json' in kwargs: + logger.info(f"GitHub API {method} request to {url} with payload: {json.dumps(kwargs['json'])}") + else: + logger.info(f"GitHub API {method} request to {url}") + + try: + response = self.session.request(method, url, **kwargs) + response.raise_for_status() + return response.json() if response.text else {} + except requests.exceptions.RequestException as e: + logger.error(f"GitHub API request failed: {str(e)}") + if hasattr(e, 'response') and e.response is not None: + status_code = e.response.status_code + logger.error(f"Error status code: {status_code}") + + # Check if response text is empty before attempting to parse JSON + if e.response.text and e.response.text.strip(): + try: + error_details = e.response.json() + logger.error(f"Error details: {json.dumps(error_details)}") + except json.JSONDecodeError: + logger.error(f"Non-JSON error response: {e.response.text}") + # For non-JSON responses, don't try to re-parse as JSON later + e.response._content = b'{"message": "Non-JSON response received"}' + else: + logger.error(f"Empty error response with status code: {status_code}") + # Provide a default JSON response to avoid JSONDecodeError + e.response._content = b'{"message": "Empty response"}' + raise + + def get_repository( + self, + name: str, + create: bool = False, + settings: Optional[RepositorySettings] = None + ) -> Dict[str, Any]: + """Get or create a repository. + + Args: + name: Repository name + create: Whether to create if it doesn't exist + settings: Repository settings if creating new + + Returns: + Repository data + + Raises: + ValueError: If repository doesn't exist and create=False + """ + try: + logger.info(f"Checking if repository {name} exists in {self.organization}") + repo_data = self._request('GET', f'/repos/{self.organization}/{name}') + # Add web_url field for compatibility with other providers + repo_data['web_url'] = self.get_repository_url(name) + return repo_data + except requests.exceptions.RequestException as e: + # Check if this is a 404 error (repository not found) + is_404 = (hasattr(e, 'response') and + e.response is not None and + e.response.status_code == 404) + + if is_404 and create: + logger.info(f"Repository {name} not found, creating new repository") + if not settings: + settings = RepositorySettings() + + # Set up repository creation data + create_data = { + 'name': name, + 'private': settings.visibility == 'private', + 'auto_init': True, # Ensure GitHub creates a default branch with a README + 'visibility': settings.visibility or 'private' + } + if settings.description: + create_data['description'] = settings.description + + logger.info(f"Creating repository with data: {json.dumps(create_data)}") + + try: + # Try to create in the organization + try: + logger.info(f"Attempting to create repository in organization: {self.organization}") + repo = self._request('POST', f'/orgs/{self.organization}/repos', json=create_data) + logger.info(f"Repository {name} created successfully in organization {self.organization}") + + # Wait for repository creation to complete + max_retries = 10 # Increase the number of retries + for i in range(max_retries): + try: + # Try to get the repository to confirm it's available + repo_data = self._request('GET', f'/repos/{self.organization}/{name}') + + # Check for default branch - first try the default_branch from repo data + default_branch = repo_data.get('default_branch', 'main') + try: + # Try to get the default branch + branch_data = self.get_branch(name, default_branch) + logger.info(f"Default branch '{default_branch}' found in repository {name}") + + # Since we have a valid branch, add web_url and return + repo_data['web_url'] = self.get_repository_url(name) + return repo_data + except requests.exceptions.RequestException: + # Try alternate branch names if the default wasn't found + alternate_branches = ['main', 'master'] + branch_found = False + + for alt_branch in alternate_branches: + if alt_branch != default_branch: # Skip if already tried + try: + branch_data = self.get_branch(name, alt_branch) + logger.info(f"Alternate branch '{alt_branch}' found in repository {name}") + # Update repo data with correct branch info + repo_data['default_branch'] = alt_branch + repo_data['web_url'] = self.get_repository_url(name) + branch_found = True + return repo_data + except requests.exceptions.RequestException: + logger.info(f"Alternate branch '{alt_branch}' not found in repository {name}") + continue + + # If we get here, no branch was found + if i < max_retries - 1: + logger.info(f"Waiting for repository initialization to complete (attempt {i+1}/{max_retries})") + time.sleep(2) # Longer sleep to allow GitHub to initialize the repo + else: + logger.warning(f"Repository {name} created but no default branch found after {max_retries} attempts.") + logger.warning("Repository may not be properly initialized. Will attempt to create a default branch.") + + # As a last resort, try to create a default branch with a README + try: + # First create a file to ensure there's content + self.write_file( + name, + FileContent( + path="README.md", + content=f"# {name}\n\nRepository created by template automation.", + encoding="utf-8" + ), + branch="main", # Attempt to use main as the default branch + message=f"Initial commit for {name}" + ) + logger.info(f"Created README.md in repository {name}") + + # Try once more to get the repository with the newly created branch + repo_data = self._request('GET', f'/repos/{self.organization}/{name}') + repo_data['web_url'] = self.get_repository_url(name) + return repo_data + except Exception as create_file_err: + logger.error(f"Failed to create initial file in repository: {str(create_file_err)}") + # Continue with what we have + repo_data['web_url'] = self.get_repository_url(name) + return repo_data + except requests.exceptions.RequestException: + if i < max_retries - 1: + logger.info(f"Waiting for repository creation to complete (attempt {i+1}/{max_retries})") + time.sleep(2) # Increased sleep time to allow GitHub to complete the operation + else: + logger.warning("Repository creation may not be complete, continuing anyway") + repo['web_url'] = self.get_repository_url(name) + return repo + + repo['web_url'] = self.get_repository_url(name) + return repo + except requests.exceptions.RequestException as org_err: + # If org creation fails, try creating in user's account + logger.warning(f"Failed to create repo in organization: {str(org_err)}") + logger.warning("Creating repository might require additional permissions") + + # Retry with the original error + raise org_err + + # Set topics if provided + if settings.topics: + try: + logger.info(f"Setting repository topics: {settings.topics}") + self._request('PUT', f'/repos/{self.organization}/{name}/topics', + json={'names': settings.topics}) + except Exception as topic_err: + logger.warning(f"Failed to set repository topics: {str(topic_err)}") + logger.warning("Continuing without topics") + + logger.info(f"Successfully created repository {name}") + return repo + except Exception as create_err: + logger.error(f"Failed to create repository {name}: {str(create_err)}") + if hasattr(create_err, 'response') and create_err.response is not None: + logger.error(f"Status code: {create_err.response.status_code}") + logger.error(f"Response headers: {dict(create_err.response.headers)}") + try: + if create_err.response.text: + if create_err.response.headers.get('content-type', '').startswith('application/json'): + try: + error_data = create_err.response.json() + logger.error(f"Response JSON: {json.dumps(error_data)}") + + # Check for specific error messages + if 'message' in error_data: + error_msg = error_data['message'] + if 'exists' in error_msg.lower(): + logger.warning(f"Repository {name} already exists, trying to retrieve it") + # Repository exists but we couldn't access it earlier - try getting it again + try: + return self._request('GET', f'/repos/{self.organization}/{name}') + except Exception as retry_err: + logger.error(f"Failed to retrieve existing repository: {str(retry_err)}") + except json.JSONDecodeError: + logger.error(f"Could not parse error response as JSON: {create_err.response.text}") + else: + logger.error(f"Response body: {create_err.response.text}") + except Exception as parse_err: + logger.error(f"Error parsing error response: {str(parse_err)}") + + raise ValueError(f"Failed to create repository {name}: {str(create_err)}") + elif is_404: + raise ValueError(f"Repository {name} not found") + else: + # Re-raise other errors + logger.error(f"Failed to get repository {name}: {str(e)}") + raise + + def get_branch(self, repo_name: str, branch: str) -> Dict[str, Any]: + """Get a branch from a repository. + + Args: + repo_name: Repository name + branch: Branch name + + Returns: + Branch data + """ + return self._request('GET', f'/repos/{self.organization}/{repo_name}/branches/{branch}') + + def create_branch( + self, + repo_name: str, + branch: str, + from_branch: str = "main" + ) -> None: + """Create a new branch in a repository. + + Args: + repo_name: Repository name + branch: New branch name + from_branch: Base branch name + """ + # Get the SHA of the base branch + base = self.get_branch(repo_name, from_branch) + sha = base['commit']['sha'] + + # Create the new branch + self._request('POST', f'/repos/{self.organization}/{repo_name}/git/refs', json={ + 'ref': f'refs/heads/{branch}', + 'sha': sha + }) + logger.info(f"Created branch {branch} in {repo_name} from {from_branch}") + + def write_file( + self, + repo_name: str, + file: FileContent, + branch: str = "main", + message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a repository. + + Args: + repo_name: Repository name + file: File content and metadata + branch: Branch to write to + message: Commit message + + Returns: + Updated file data + """ + url = f'/repos/{self.organization}/{repo_name}/contents/{file.path}' + + # Check if file exists + try: + existing = self._request('GET', url, params={'ref': branch}) + method = 'PUT' + data = {'sha': existing['sha']} + except requests.exceptions.RequestException as e: + # Check if this is a 404 error (file not found) + is_404 = (hasattr(e, 'response') and + e.response is not None and + e.response.status_code == 404) + + if is_404: + method = 'PUT' if branch != 'main' else 'POST' + data = {} + else: + # Re-raise other errors + raise + + # Prepare commit data + data.update({ + 'message': message or f"{'Update' if method == 'PUT' else 'Create'} {file.path}", + 'branch': branch + }) + + # Handle content encoding based on type + if isinstance(file.content, bytes): + # Binary content already provided as bytes + data['content'] = base64.b64encode(file.content).decode('ascii') + else: + # Text content provided as string + data['content'] = base64.b64encode(file.content.encode(file.encoding)).decode('ascii') + + return self._request(method, url, json=data) + + def create_pull_request( + self, + repo_name: str, + settings: MergeRequestSettings + ) -> Dict[str, Any]: + """Create a pull request. + + Args: + repo_name: Repository name + settings: Pull request settings + + Returns: + Created pull request data + """ + return self._request('POST', f'/repos/{self.organization}/{repo_name}/pulls', json={ + 'title': settings.title, + 'body': settings.description, + 'head': settings.source_branch, + 'base': settings.target_branch + }) + + def clone_repository_contents( + self, + source_repo: str, + target_repo: str, + source_branch: str = "main", + target_branch: str = "main", + message: str = "Initial project setup from template" + ) -> None: + """Clone contents from one repository to another. + + Args: + source_repo: Source repository name + target_repo: Target repository name + source_branch: Source branch name + target_branch: Target branch name + message: Commit message + """ + # Get the source repository tree + try: + # First, check if source repository exists + try: + logger.info(f"Checking if source repository {source_repo} exists in {self.organization}") + source_repo_data = self._request('GET', f'/repos/{self.organization}/{source_repo}') + logger.info(f"Source repository {source_repo} found with default branch: {source_repo_data.get('default_branch', 'main')}") + + # If source_branch is not the default branch, verify it exists + default_branch = source_repo_data.get('default_branch', 'main') + if source_branch != default_branch: + try: + logger.info(f"Checking if branch {source_branch} exists in source repository {source_repo}") + self.get_branch(source_repo, source_branch) + except requests.exceptions.RequestException: + # If requested branch doesn't exist, try the default branch instead + logger.warning(f"Branch {source_branch} not found in source repository, falling back to default branch {default_branch}") + source_branch = default_branch + except requests.exceptions.RequestException as e: + if hasattr(e, 'response') and e.response is not None and e.response.status_code == 404: + logger.error(f"Source repository {source_repo} not found in organization {self.organization}") + logger.error("Check that the repository exists and the token has access to it") + raise ValueError(f"Source repository {source_repo} not found: {str(e)}") + else: + logger.error(f"Error checking source repository {source_repo}: {str(e)}") + raise + + logger.info(f"Getting repository tree for {source_repo} on branch {source_branch}") + source = self._request('GET', + f'/repos/{self.organization}/{source_repo}/git/trees/{source_branch}', + params={'recursive': 1}) + + # Check if we got a truncated response + if source.get('truncated', False): + logger.warning(f"Repository tree for {source_repo} is truncated. Some files may not be copied.") + + # Get the number of files to be processed + files = [item for item in source.get('tree', []) if item.get('type') == 'blob'] + logger.info(f"Found {len(files)} files to copy from {source_repo} to {target_repo}") + + if len(files) == 0: + logger.warning(f"No files found in source repository {source_repo} on branch {source_branch}") + logger.warning("Check that the branch contains files and the token has access to them") + return + + # Get the latest commit on the target branch to use as base + try: + target_branch_data = self.get_branch(target_repo, target_branch) + base_tree = target_branch_data['commit']['sha'] + logger.info(f"Using base tree {base_tree} from target repository") + except Exception as e: + logger.error(f"Failed to get base tree: {str(e)}") + logger.warning("Will attempt to create files without base tree") + base_tree = None + + # Create a batch of blobs for all files + blobs = [] + file_count = len(files) + success_count = 0 + skipped_count = 0 + + logger.info(f"Creating blobs for {file_count} files") + for index, item in enumerate(files): + if item['path'].startswith('.git/'): + logger.debug(f"Skipping git file: {item['path']}") + skipped_count += 1 + continue + + try: + # Get file content + if (index + 1) % 10 == 0 or index + 1 == file_count: + logger.info(f"Processing file {index + 1}/{file_count} - {item['path']}") + else: + logger.debug(f"Processing file {index + 1}/{file_count} - {item['path']}") + + blob = self._request('GET', + f'/repos/{self.organization}/{source_repo}/git/blobs/{item["sha"]}') + + # Check if content is available + if 'content' not in blob: + logger.warning(f"No content found for {item['path']}, skipping") + skipped_count += 1 + continue + + # For GitHub API batch operations, we can use the content directly + blobs.append({ + 'path': item['path'], + 'mode': '100644', # Regular file + 'type': 'blob', + 'content': base64.b64decode(blob['content']).decode('utf-8', errors='replace') + }) + + success_count += 1 + except Exception as e: + logger.error(f"Failed to process file {item['path']}: {str(e)}") + skipped_count += 1 + continue + + logger.info(f"Successfully created {success_count} blobs ({skipped_count} files skipped)") + + if not blobs: + logger.warning("No files to commit, skipping commit creation") + return + + # Create a new tree with all blobs + logger.info("Creating tree with all files") + tree_data = { + 'base_tree': base_tree, + 'tree': blobs + } + + tree_response = self._request('POST', + f'/repos/{self.organization}/{target_repo}/git/trees', + json=tree_data) + + logger.info(f"Tree created with SHA: {tree_response['sha']}") + + # Create a commit with the new tree + logger.info("Creating commit with all files") + commit_data = { + 'message': message, + 'tree': tree_response['sha'], + 'parents': [base_tree] if base_tree else [] + } + + commit_response = self._request('POST', + f'/repos/{self.organization}/{target_repo}/git/commits', + json=commit_data) + + logger.info(f"Commit created with SHA: {commit_response['sha']}") + + # Update branch reference to point to new commit + logger.info(f"Updating branch {target_branch} reference to new commit") + ref_data = { + 'sha': commit_response['sha'], + 'force': True + } + + ref_response = self._request('PATCH', + f'/repos/{self.organization}/{target_repo}/git/refs/heads/{target_branch}', + json=ref_data) + + logger.info(f"Branch {target_branch} updated to commit {commit_response['sha']}") + logger.info(f"Successfully copied {success_count} of {file_count} files ({skipped_count} skipped) from {source_repo} to {target_repo} in a single commit") + + except Exception as e: + logger.error(f"Error cloning repository contents: {str(e)}") + raise + + def get_repository_url(self, repo_name: str) -> str: + """Get the web interface URL for a repository. + + Args: + repo_name: Repository name + + Returns: + Web interface URL for the repository + """ + # For GitHub Enterprise, we need a different URL format than API URLs + return f"{self.api_base_url}/{self.organization}/{repo_name}" + + def set_team_permission( + self, + repo_name: str, + team_name: str, + permission: str = "admin" + ) -> Dict[str, Any]: + """Set team permissions for a repository. + + Args: + repo_name: Repository name + team_name: Team name (slug) + permission: Permission level (pull, push, admin, maintain, triage) + + Returns: + Response data + """ + logger.info(f"Setting {permission} permissions for team {team_name} on repository {repo_name}") + try: + return self._request('PUT', + f'/orgs/{self.organization}/teams/{team_name}/repos/{self.organization}/{repo_name}', + json={'permission': permission}) + except requests.exceptions.RequestException as e: + logger.error(f"Failed to set team permissions: {str(e)}") + if hasattr(e, 'response') and e.response is not None: + status_code = e.response.status_code + if status_code == 404: + logger.error(f"Team {team_name} not found or does not have access to repository") + elif status_code == 403: + logger.error("Insufficient permissions to set team access") + else: + logger.error(f"Error setting team permissions: status code {status_code}") + + try: + error_details = e.response.json() + logger.error(f"Error details: {json.dumps(error_details)}") + except Exception: + logger.error(f"Error response: {e.response.text}") + + # Return empty dict on failure rather than raising an exception + return {} \ No newline at end of file diff --git a/template_automation/gitlab_client.py b/template_automation/gitlab_client.py new file mode 100644 index 00000000..dd747772 --- /dev/null +++ b/template_automation/gitlab_client.py @@ -0,0 +1,634 @@ +"""GitLab client module for template automation. + +This module provides the GitLabClient class which handles all interactions with the GitLab API +for template repository automation using the requests library directly. +""" + +import base64 +import json +import logging +import time +import urllib.parse +from typing import List, Optional, Dict, Any, Union + +import requests + +logger = logging.getLogger(__name__) + +class GitLabClient: + """A client for interacting with GitLab's API in the context of template automation. + + This class provides methods for template repository operations including: + - Creating projects from templates + - Managing project contents + - Setting up group access + - Configuring project settings + + Attributes: + api_base_url (str): Base URL for the GitLab API + token (str): GitLab authentication token + group_name (str): GitLab group name + commit_author_name (str): Name to use for automated commits + commit_author_email (str): Email to use for automated commits + verify_ssl (bool): Whether to verify SSL certificates + + Example: + ```python + client = GitLabClient( + api_base_url="https://gitlab.example.com", + token="glpat-...", + group_name="my-group", + commit_author_name="Template Bot", + commit_author_email="bot@example.com" + ) + + project = client.create_project_from_template( + template_project_name="template-service", + new_project_name="new-service", + visibility="private" + ) + ``` + """ + + def __init__( + self, + api_base_url: str, + token: str, + group_name: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + """Initialize a new GitLab client. + + Args: + api_base_url: Base URL for the GitLab API + token: GitLab authentication token + group_name: GitLab group name + commit_author_name: Name to use for automated commits + commit_author_email: Email to use for automated commits + verify_ssl: Whether to verify SSL certificates + """ + self.api_base_url = api_base_url.rstrip('/') + self.token = token + self.group_name = group_name + self.commit_author_name = commit_author_name + self.commit_author_email = commit_author_email + self.verify_ssl = verify_ssl + + # Create session for connection reuse + self.session = requests.Session() + self.session.headers.update({ + 'Authorization': f'Bearer {token}', + 'Content-Type': 'application/json', + 'User-Agent': 'Template-Automation-Lambda' + }) + + # Get group ID for API calls + self.group_id = self._get_group_id(group_name) + + # Log initialization + logger.info(f"Initialized GitLab client for group: {group_name} (SSL verify: {verify_ssl})") + + def _get_group_id(self, group_name: str) -> int: + """Get the GitLab group ID from the group name. + + Args: + group_name: Name of the GitLab group + + Returns: + The group ID + """ + url = f"{self.api_base_url}/api/v4/groups" + params = {"search": group_name} + response = self._request("GET", url, params=params) + + for group in response: + if group["name"] == group_name or group["path"] == group_name: + return group["id"] + + raise ValueError(f"Group '{group_name}' not found") + + def _request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: + """Make a request to the GitLab API. + + Args: + method: HTTP method (GET, POST, PATCH, PUT, DELETE) + url: URL path or full URL to request + **kwargs: Additional arguments to pass to requests + + Returns: + Response data as a dictionary + + Raises: + requests.exceptions.RequestException: On request errors + """ + # Prepend base URL if not already an absolute URL + if not url.startswith('http'): + url = f"{self.api_base_url}{url}" + + # Set SSL verification + kwargs['verify'] = self.verify_ssl + + # Log the request + if 'json' in kwargs: + logger.info(f"GitLab API {method} request to {url} with payload: {json.dumps(kwargs['json'])}") + else: + logger.info(f"GitLab API {method} request to {url}") + + # Make the request + try: + response = self.session.request(method, url, **kwargs) + + # Raise exception for error status codes + if response.status_code >= 400: + logger.error(f"GitLab API error: {response.status_code} - {response.text}") + + response.raise_for_status() + + # Return JSON data for non-empty responses + if response.text: + try: + return response.json() + except json.JSONDecodeError: + logger.warning(f"Received non-JSON response: {response.text}") + return {"raw_content": response.text} + return {} + except requests.exceptions.RequestException as e: + if hasattr(e, 'response') and e.response is not None: + try: + # Try to parse JSON, but handle case where response is not JSON + if e.response.text.strip(): + try: + error_body = e.response.json() + logger.error(f"GitLab API error details: {json.dumps(error_body)}") + except json.JSONDecodeError: + logger.error(f"GitLab API returned non-JSON error: {e.response.text}") + else: + logger.error(f"GitLab API returned empty error response with status code: {e.response.status_code}") + except (ValueError, AttributeError): + logger.error(f"GitLab API error: Unable to parse response") + logger.error(f"Request failed: {str(e)}") + raise + + def get_project( + self, + project_name: str, + create: bool = False, + owning_group: Optional[str] = None + ) -> Dict[str, Any]: + """Get or create a GitLab project with optional group permissions. + + Args: + project_name: The name of the project to retrieve or create + create: Whether to create the project if it doesn't exist + owning_group: The name of the GitLab group to grant developer access + + Returns: + The project data + """ + try: + # Try to get the project + url = f"/api/v4/projects/{self.group_name}%2F{project_name}" + project = self._request("GET", url) + logger.info(f"Found existing project: {project_name}") + + if owning_group: + self.set_group_permission(project_name, owning_group, "developer") + + return project + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404 and create: + logger.info(f"Creating project {project_name}") + + # Create a new project with minimal parameters + url = f"/api/v4/projects" + try: + # Try with minimal parameters first + project = self._request("POST", url, json={ + "name": project_name, + "path": project_name, + "namespace_id": self.group_id, + "visibility": "private", + "initialize_with_readme": True + }) + except requests.exceptions.HTTPError as create_error: + # Safe handling of response parsing + error_message = str(create_error) + logger.error(f"Failed to create project with error: {error_message}") + + # If we got an HTML response instead of JSON (likely an error page) + if "" in error_message or " Dict[str, Any]: + """Get branch information. + + Args: + project_name: Name of the project + branch_name: Name of the branch + + Returns: + Branch data + """ + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/branches/{branch_name}" + return self._request("GET", url) + + def get_default_branch(self, project_name: str) -> str: + """Get the default branch name of a project. + + Args: + project_name: Name of the project + + Returns: + Default branch name (usually 'main' or 'master') + """ + project = self.get_project(project_name) + return project["default_branch"] + + def create_branch(self, project_name: str, branch_name: str, from_ref: str = "main") -> None: + """Create a new branch in the project. + + Args: + project_name: Name of the project + branch_name: Name of the branch to create + from_ref: Reference to create branch from + """ + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/branches" + self._request("POST", url, json={ + "branch": branch_name, + "ref": from_ref + }) + + logger.info(f"Created branch {branch_name} in {project_name}") + + def write_file( + self, + project: Dict[str, Any], + path: str, + content: str, + branch: str = "main", + commit_message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a project. + + Args: + project: The project object + path: Path where to create/update the file + content: Content to write to the file + branch: Branch to commit to + commit_message: Commit message to use + + Returns: + The created/updated file content + """ + project_name = project["name"] + content_base64 = base64.b64encode(content.encode("utf-8")).decode("utf-8") + + # Try to get the existing file to check if it exists + try: + file = self.get_file_contents(project_name, path, branch) + # Update existing file + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/files/{urllib.parse.quote(path, safe='')}" + result = self._request("PUT", url, json={ + "branch": branch, + "content": content_base64, + "commit_message": commit_message or f"Update {path}", + "encoding": "base64", + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + logger.info(f"Updated file {path} in project {project_name}") + return result + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + # Create new file + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/files/{urllib.parse.quote(path, safe='')}" + result = self._request("POST", url, json={ + "branch": branch, + "content": content_base64, + "commit_message": commit_message or f"Create {path}", + "encoding": "base64", + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + logger.info(f"Created new file {path} in project {project_name}") + return result + raise + + def get_file_contents(self, project_name: str, path: str, ref: str = "main") -> Dict[str, Any]: + """Get the contents of a file in a project. + + Args: + project_name: Name of the project + path: Path to the file + ref: Branch, tag, or commit SHA + + Returns: + File data + """ + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/files/{urllib.parse.quote(path, safe='')}" + params = {"ref": ref} + return self._request("GET", url, params=params) + + def read_file(self, project: Dict[str, Any], path: str, ref: str = "main") -> str: + """Read a file from a project. + + Args: + project: The project object + path: Path to the file to read + ref: Git reference (branch, tag, commit) to read from + + Returns: + The file contents as a string + """ + project_name = project["name"] + file = self.get_file_contents(project_name, path, ref) + content = base64.b64decode(file["content"]).decode("utf-8") + return content + + def create_merge_request( + self, + repo_name: str, + title: str, + description: str, + source_branch: str, + target_branch: str = "main" + ) -> Dict[str, Any]: + """Create a merge request in a project. + + Args: + repo_name: Name of the project + title: Title of the merge request + description: Description/body of the merge request + source_branch: Branch containing the changes + target_branch: Branch to merge into + + Returns: + The created merge request object + """ + url = f"/api/v4/projects/{self.group_name}%2F{repo_name}/merge_requests" + mr = self._request("POST", url, json={ + "title": title, + "description": description, + "source_branch": source_branch, + "target_branch": target_branch, + "remove_source_branch": True + }) + + logger.info(f"Created MR !{mr['iid']} in {repo_name}: {title}") + return mr + + def trigger_pipeline( + self, + project_name: str, + ref: str, + variables: Optional[Dict[str, Any]] = None + ) -> None: + """Trigger a GitLab CI/CD pipeline. + + Args: + project_name: Name of the project + ref: Git reference to run the pipeline on + variables: Pipeline variables + """ + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/pipeline" + pipeline_variables = [] + + if variables: + pipeline_variables = [{"key": k, "value": v} for k, v in variables.items()] + + self._request("POST", url, json={ + "ref": ref, + "variables": pipeline_variables + }) + + logger.info(f"Triggered pipeline in {project_name} on {ref}") + + def set_group_permission(self, project_name: str, group_name: str, access_level: str) -> None: + """Share a project with a group. + + Args: + project_name: Name of the project + group_name: Name of the group + access_level: Access level ('guest', 'reporter', 'developer', 'maintainer', 'owner') + """ + # Map access level names to GitLab integers + access_level_map = { + "guest": 10, + "reporter": 20, + "developer": 30, + "maintainer": 40, + "owner": 50 + } + + access_level_int = access_level_map.get(access_level.lower()) + if not access_level_int: + raise ValueError(f"Invalid access level: {access_level}") + + # First get the target group ID + try: + target_group_id = self._get_group_id(group_name) + + # Share project with group + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/share" + self._request("POST", url, json={ + "group_id": target_group_id, + "group_access": access_level_int + }) + logger.info(f"Shared {project_name} with group {group_name} at {access_level} level") + except requests.exceptions.HTTPError as e: + logger.error(f"Failed to share project with group {group_name}: {str(e)}") + if e.response.status_code == 404: + logger.warning(f"Group {group_name} not found, skipping permission assignment") + else: + raise + + def update_project_topics(self, project_name: str, topics: List[str]) -> None: + """Update the topics of a project. + + Args: + project_name: Name of the project + topics: List of topics to set + """ + url = f"/api/v4/projects/{self.group_name}%2F{project_name}" + + self._request("PUT", url, json={"topics": topics}) + + logger.info(f"Updated topics for {project_name}: {topics}") + + def create_project_from_template( + self, + template_project_name: str, + new_project_name: str, + visibility: str = "private", + description: Optional[str] = None, + topics: Optional[List[str]] = None + ) -> Dict[str, Any]: + """Create a new project from a template. + + Args: + template_project_name: Name of the template project + new_project_name: Name for the new project + visibility: Visibility level ('private', 'internal', 'public') + description: Description for the new project + topics: List of topics to add to the project + + Returns: + The newly created project + """ + # GitLab doesn't have direct template creation like GitHub, so we'll fork and rename + template_project = self.get_project(template_project_name) + + # Fork the template project + url = f"/api/v4/projects/{template_project['id']}/fork" + new_project = self._request("POST", url, json={ + "name": new_project_name, + "path": new_project_name, + "namespace_id": self.group_id, + "visibility": visibility + }) + + # Update description if provided + if description: + update_url = f"/api/v4/projects/{new_project['id']}" + self._request("PUT", update_url, json={"description": description}) + + # Add topics if provided + if topics: + self.update_project_topics(new_project_name, topics) + + logger.info(f"Created new project: {new_project_name} from template: {template_project_name}") + return new_project + + def create_readme_file(self, project_name: str) -> Dict[str, Any]: + """Create a README.md file in an empty project to initialize it. + + Args: + project_name: Name of the project + + Returns: + The created file content data + """ + content = f"""# {project_name} + +This project was created automatically by the template automation system. + """ + content_base64 = base64.b64encode(content.encode("utf-8")).decode("utf-8") + + url = f"/api/v4/projects/{self.group_name}%2F{project_name}/repository/files/README.md" + result = self._request("POST", url, json={ + "branch": "main", + "content": content_base64, + "commit_message": "Initialize project with README", + "encoding": "base64", + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + }) + + logger.info(f"Created README.md in project {project_name} to initialize it") + return result + + def clone_repository_contents( + self, + source_repo_name: str, + target_repo_name: str, + source_branch: str = "main", + target_branch: str = "main", + commit_message: str = "Initial project setup from template" + ) -> None: + """Clone all files from a source project to a target project. + + This method copies all files from the source project to the target project, + effectively implementing project templating by copying file content. + + Args: + source_repo_name: Name of the source/template project + target_repo_name: Name of the target project where files will be copied + source_branch: Branch to copy files from in the source project + target_branch: Branch to copy files to in the target project + commit_message: Commit message for the file creation commit + + Raises: + ValueError: If source project or branch doesn't exist + """ + logger.info(f"Cloning contents from {source_repo_name}:{source_branch} to {target_repo_name}:{target_branch}") + + try: + # Get the source project info + source_project = self.get_project(source_repo_name) + + # Get all files from the source project repository tree + tree_url = f"/api/v4/projects/{source_project['id']}/repository/tree" + params = {"ref": source_branch, "recursive": True, "per_page": 100} + tree_data = self._request("GET", tree_url, params=params) + + # Filter out directories, only keep files + files = [item for item in tree_data if item["type"] == "blob"] + logger.info(f"Found {len(files)} files to copy from {source_repo_name}") + + # Copy each file to the target project + actions = [] + for file_item in files: + file_path = file_item["path"] + + # Skip .git directory and other metadata files if they exist + if file_path.startswith(".git/") or file_path == ".git": + continue + + try: + # Get file content from source + file_content = self.get_file_contents(source_repo_name, file_path, source_branch) + content = file_content["content"] + + # Add to commit actions + actions.append({ + "action": "create", + "file_path": file_path, + "content": base64.b64decode(content).decode("utf-8"), + "encoding": "text" + }) + except Exception as file_err: + logger.error(f"Failed to get content for file {file_path}: {str(file_err)}") + continue + + # Create a single commit with all files + if actions: + commit_url = f"/api/v4/projects/{self.group_name}%2F{target_repo_name}/repository/commits" + commit_data = { + "branch": target_branch, + "commit_message": commit_message, + "actions": actions, + "author_name": self.commit_author_name, + "author_email": self.commit_author_email + } + + self._request("POST", commit_url, json=commit_data) + logger.info(f"Successfully copied {len(actions)} files from {source_repo_name} to {target_repo_name}") + else: + logger.warning(f"No files were copied from {source_repo_name} to {target_repo_name}") + + except requests.exceptions.HTTPError as e: + logger.error(f"Failed to clone repository contents: {str(e)}") + raise ValueError(f"Could not clone contents from {source_repo_name} to {target_repo_name}") + except Exception as e: + logger.error(f"Unexpected error during repository cloning: {str(e)}") + raise \ No newline at end of file diff --git a/template_automation/gitlab_provider.py b/template_automation/gitlab_provider.py new file mode 100644 index 00000000..9f4bf1e0 --- /dev/null +++ b/template_automation/gitlab_provider.py @@ -0,0 +1,330 @@ +"""GitLab repository provider implementation. + +This module provides the GitLab implementation of the repository provider interface. +""" + +import base64 +import json +import logging +import time +import urllib.parse +from typing import Dict, List, Optional, Any, Union + +import requests + +from .repository_provider import ( + RepositoryProvider, + RepositorySettings, + FileContent, + MergeRequestSettings +) + +logger = logging.getLogger(__name__) + +class GitLabProvider(RepositoryProvider): + """GitLab implementation of the repository provider interface.""" + + def __init__( + self, + api_base_url: str, + token: str, + organization: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + """Initialize GitLab provider with required settings.""" + super().__init__( + api_base_url=api_base_url, + token=token, + organization=organization, + commit_author_name=commit_author_name, + commit_author_email=commit_author_email, + verify_ssl=verify_ssl + ) + self.session = requests.Session() + self.session.headers.update({ + 'Authorization': f'Bearer {token}', + 'Content-Type': 'application/json', + 'User-Agent': 'Template-Automation-Lambda' + }) + self._group_id = None + + @property + def group_id(self) -> int: + """Get the GitLab group ID, caching it for subsequent use.""" + if self._group_id is None: + group = self._request('GET', f'/api/v4/groups/{urllib.parse.quote(self.organization, safe="")}') + self._group_id = group['id'] + return self._group_id + + def _request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: + """Make a request to the GitLab API. + + Args: + method: HTTP method (GET, POST, PUT, PATCH, DELETE) + url: URL path or full URL + **kwargs: Additional arguments to pass to requests + + Returns: + Response data as dictionary + + Raises: + requests.exceptions.RequestException: On request failure + """ + if not url.startswith('http'): + url = f"{self.api_base_url}{url}" + + kwargs['verify'] = self.verify_ssl + + if 'json' in kwargs: + logger.info(f"GitLab API {method} request to {url} with payload: {json.dumps(kwargs['json'])}") + else: + logger.info(f"GitLab API {method} request to {url}") + + try: + response = self.session.request(method, url, **kwargs) + response.raise_for_status() + + if response.text: + try: + return response.json() + except json.JSONDecodeError: + logger.warning(f"Received non-JSON response: {response.text}") + return {"raw_content": response.text} + return {} + except requests.exceptions.RequestException as e: + logger.error(f"GitLab API request failed: {str(e)}") + if hasattr(e, 'response') and e.response is not None: + try: + # Check if response text is empty before attempting to parse JSON + if e.response.text.strip(): + error_details = e.response.json() + logger.error(f"Error details: {json.dumps(error_details)}") + else: + logger.error(f"Empty error response with status code: {e.response.status_code}") + except json.JSONDecodeError: + logger.error(f"Non-JSON error response: {e.response.text}") + raise + + def get_repository( + self, + name: str, + create: bool = False, + settings: Optional[RepositorySettings] = None + ) -> Dict[str, Any]: + """Get or create a repository. + + Args: + name: Repository name + create: Whether to create if it doesn't exist + settings: Repository settings if creating new + + Returns: + Repository data + + Raises: + ValueError: If repository doesn't exist and create=False + """ + try: + path = urllib.parse.quote(f"{self.organization}/{name}", safe="") + return self._request('GET', f'/api/v4/projects/{path}') + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404 and create: + if not settings: + settings = RepositorySettings() + + # Create project + create_data = { + 'name': name, + 'path': name, + 'namespace_id': self.group_id, + 'visibility': settings.visibility, + 'initialize_with_readme': True + } + if settings.description: + create_data['description'] = settings.description + + project = self._request('POST', '/api/v4/projects', json=create_data) + + # Add topics if provided + if settings.topics: + self._request('PUT', f'/api/v4/projects/{project["id"]}', json={ + 'topics': settings.topics + }) + + # Wait for project to be ready + time.sleep(2) + return self._request('GET', f'/api/v4/projects/{project["id"]}') + raise ValueError(f"Repository {name} not found") + + def get_branch(self, repo_name: str, branch: str) -> Dict[str, Any]: + """Get a branch from a repository. + + Args: + repo_name: Repository name + branch: Branch name + + Returns: + Branch data + """ + path = urllib.parse.quote(f"{self.organization}/{repo_name}", safe="") + branch_path = urllib.parse.quote(branch, safe="") + return self._request('GET', f'/api/v4/projects/{path}/repository/branches/{branch_path}') + + def create_branch( + self, + repo_name: str, + branch: str, + from_branch: str = "main" + ) -> None: + """Create a new branch in a repository. + + Args: + repo_name: Repository name + branch: New branch name + from_branch: Base branch name + """ + path = urllib.parse.quote(f"{self.organization}/{repo_name}", safe="") + self._request('POST', f'/api/v4/projects/{path}/repository/branches', json={ + 'branch': branch, + 'ref': from_branch + }) + logger.info(f"Created branch {branch} in {repo_name} from {from_branch}") + + def write_file( + self, + repo_name: str, + file: FileContent, + branch: str = "main", + message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a repository. + + Args: + repo_name: Repository name + file: File content and metadata + branch: Branch to write to + message: Commit message + + Returns: + Updated file data + """ + path = urllib.parse.quote(f"{self.organization}/{repo_name}", safe="") + file_path = urllib.parse.quote(file.path, safe="") + url = f'/api/v4/projects/{path}/repository/files/{file_path}' + + try: + # Check if file exists + self._request('GET', url, params={'ref': branch}) + method = 'PUT' + except requests.exceptions.HTTPError as e: + if e.response.status_code != 404: + raise + method = 'POST' + + # Convert content to base64 + content = base64.b64encode(file.content.encode(file.encoding)).decode('ascii') + + return self._request(method, url, json={ + 'branch': branch, + 'content': content, + 'commit_message': message or f"{'Update' if method == 'PUT' else 'Create'} {file.path}", + 'encoding': 'base64', + 'author_name': self.commit_author_name, + 'author_email': self.commit_author_email + }) + + def create_merge_request( + self, + repo_name: str, + settings: MergeRequestSettings + ) -> Dict[str, Any]: + """Create a merge request. + + Args: + repo_name: Repository name + settings: Merge request settings + + Returns: + Created merge request data + """ + path = urllib.parse.quote(f"{self.organization}/{repo_name}", safe="") + return self._request('POST', f'/api/v4/projects/{path}/merge_requests', json={ + 'title': settings.title, + 'description': settings.description, + 'source_branch': settings.source_branch, + 'target_branch': settings.target_branch, + 'remove_source_branch': True + }) + + def create_pull_request( + self, + repo_name: str, + settings: MergeRequestSettings + ) -> Dict[str, Any]: + """Create a pull request (GitLab calls them merge requests).""" + return self.create_merge_request(repo_name, settings) + + def clone_repository_contents( + self, + source_repo: str, + target_repo: str, + source_branch: str = "main", + target_branch: str = "main", + message: str = "Initial project setup from template" + ) -> None: + """Clone contents from one repository to another. + + Args: + source_repo: Source repository name + target_repo: Target repository name + source_branch: Source branch name + target_branch: Target branch name + message: Commit message + """ + source_path = urllib.parse.quote(f"{self.organization}/{source_repo}", safe="") + target_path = urllib.parse.quote(f"{self.organization}/{target_repo}", safe="") + + # Get all files from source repository + tree = self._request('GET', + f'/api/v4/projects/{source_path}/repository/tree', + params={'ref': source_branch, 'recursive': True, 'per_page': 100}) + + # Filter for files only + files = [item for item in tree if item['type'] == 'blob'] + + # Batch all file operations into a single commit + actions = [] + for file_item in files: + if file_item['path'].startswith('.git/'): + continue + + try: + # Get file content + file_content = self._request('GET', + f'/api/v4/projects/{source_path}/repository/files/{urllib.parse.quote(file_item["path"], safe="")}', + params={'ref': source_branch}) + + content = base64.b64decode(file_content['content']).decode('utf-8') + + # Add to commit actions + actions.append({ + 'action': 'create', + 'file_path': file_item['path'], + 'content': content + }) + except Exception as e: + logger.error(f"Failed to copy file {file_item['path']}: {str(e)}") + continue + + # Commit all files at once + if actions: + self._request('POST', f'/api/v4/projects/{target_path}/repository/commits', json={ + 'branch': target_branch, + 'commit_message': message, + 'actions': actions, + 'author_name': self.commit_author_name, + 'author_email': self.commit_author_email + }) + logger.info(f"Cloned {len(actions)} files from {source_repo} to {target_repo}") \ No newline at end of file diff --git a/template_automation/models.py b/template_automation/models.py new file mode 100644 index 00000000..73222965 --- /dev/null +++ b/template_automation/models.py @@ -0,0 +1,217 @@ +"""Models for template automation.""" + +from typing import List, Dict, Any, Optional +from pydantic import BaseModel, Field + +class GitHubConfig(BaseModel): + """Configuration settings for GitHub API interactions. + + This class defines the settings needed to interact with the GitHub API, + including the API URL, authentication token, organization name, and template + repository information. + + Attributes: + api_base_url (str): The base URL for all GitHub API requests. For example, + "https://api.github.com" for public GitHub. + token (str): Personal access token for GitHub API authentication. + org_name (str): Organization name where repositories will be created. + template_repo_name (Optional[str]): Name of the template repository to use + as a base. Default is None. + source_version (Optional[str]): Git reference (branch, tag, commit) to use + from the template repository. Default is None. + """ + api_base_url: str + token: str + org_name: str + commit_author_name: str = "Template Automation" + commit_author_email: str = "automation@example.com" + source_version: Optional[str] = None + template_repo_name: Optional[str] = None + config_file_name: str = "config.json" + +class WorkflowConfig(BaseModel): + """Configuration for GitHub Actions workflow files. + + This class defines the structure for configuring GitHub Actions workflow files, + including the workflow name, template source and destination paths, and any + variables needed for template rendering. + + Attributes: + name (str): Name of the workflow, used for identification and logging. + template_path (str): Path to the workflow template file, relative to the + template root directory. + output_path (str): Destination path where the rendered workflow file should + be written in the target repository. + variables (Dict[str, Any]): Variables to use when rendering the workflow + template with Jinja2. Keys are variable names and values can be any + type that Jinja2 can handle. Defaults to an empty dict. + + Example: + >>> workflow = WorkflowConfig( + ... name="CI/CD", + ... template_path="workflows/ci.yml.j2", + ... output_path=".github/workflows/ci.yml", + ... variables={ + ... "runner": "ubuntu-latest", + ... "python_version": "3.9" + ... } + ... ) + """ + name: str + template_path: str + output_path: str + variables: Dict[str, Any] = Field(default_factory=dict) + +class PRConfig(BaseModel): + """Specifies the configuration for creating pull requests. + + This class defines the structure and default values for pull request creation, + including templates for title and body, branch configuration, and PR metadata + like labels and reviewers. + + Attributes: + title_template (str): Jinja2 template for the pull request title. Variables + available include: repo_name, template_repo. + body_template (str): Jinja2 template for the pull request body. Variables + available include: repo_name, template_repo, workflow_files. + base_branch (str): The target branch for the pull request. Defaults to "main". + branch_prefix (str): Prefix for the feature branch name. The final branch name + will be {prefix}-{repo_name}. + labels (List[str]): Labels to automatically apply to the pull request. + Defaults to ["automated"]. + reviewers (List[str]): GitHub usernames of reviewers to assign. + assignees (List[str]): GitHub usernames of users to assign to the PR. + + Example: + >>> pr_config = PRConfig( + ... title_template="Initialize {{ repo_name }} from template", + ... labels=["infrastructure", "automated"], + ... reviewers=["alice", "bob"] + ... ) + """ + title_template: str = "Initialize {{ repo_name }} from template" + body_template: str = """ + Automated pull request for initializing {{ repo_name }} from template {{ template_repo }}. + + This PR was created by the Template Automation system. + + ## Changes + - Initial repository setup from template + - Configuration files added + {% if workflow_files %} + - Added workflow files: + {% for workflow in workflow_files %} + - {{ workflow }} + {% endfor %} + {% endif %} + """ + base_branch: str = "main" + branch_prefix: str = "init" + labels: List[str] = Field(default_factory=lambda: ["automated"]) + reviewers: List[str] = Field(default_factory=list) + assignees: List[str] = Field(default_factory=list) + +class TemplateInput(BaseModel): + """Represents the input data required for template automation. + + This class defines the structure of input data needed to create a new + repository from a template. It includes project metadata, template-specific + settings, and optional configurations for repository ownership and + initialization. + + Attributes: + project_name (str): Name of the project/repository to create. This will + be used as the repository name and in various template substitutions. + template_settings (Dict[str, Any]): Dictionary of template-specific + settings that will be written to the configuration file in the new + repository. The structure depends on the template being used. + trigger_init_workflow (bool): Whether to automatically trigger the + initialization workflow after repository creation. Defaults to False. + owning_team (Optional[str]): The GitHub team slug that should be granted + admin access to the new repository. If None, no team access will be + configured. + + Example: + >>> input_data = TemplateInput( + ... project_name="my-new-service", + ... template_settings={ + ... "environment": "production", + ... "region": "us-west-2" + ... }, + ... trigger_init_workflow=True + ... ) + """ + project_name: str + template_settings: Dict[str, Any] + trigger_init_workflow: bool = False + owning_team: Optional[str] = None + +class TemplateConfig(BaseModel): + """Configuration for template repository automation. + + This class defines the overall configuration for how a template repository + should be processed, including pull request settings and workflow automations. + + Attributes: + pr (PRConfig): Configuration settings for pull request creation, including + templates for title and body, branch names, and PR metadata. + workflows (List[WorkflowConfig]): List of workflow configurations that should be + applied to repositories created from this template. + + Example: + >>> config = TemplateConfig( + ... pr=PRConfig( + ... title_template="Initialize {{ repo_name }}", + ... reviewers=["team-lead"] + ... ), + ... workflows=[ + ... WorkflowConfig( + ... template_path="workflows/ci.yml", + ... variables={"runner": "ubuntu-latest"} + ... ) + ... ] + ... ) + """ + pr: PRConfig = Field( + default_factory=lambda: PRConfig( + title_template="Initialize {{ repo_name }} from template", + body_template=""" + Automated pull request for initializing {{ repo_name }} from template {{ template_repo }}. + + This PR was created by the Template Automation system. + {% if workflow_files %} + ## Added Workflows + {% for workflow in workflow_files %} + - {{ workflow }} + {% endfor %} + {% endif %} + """, + base_branch="main", + branch_prefix="init", + labels=["automated"], + reviewers=[], + assignees=[] + ) + ) + workflows: List[WorkflowConfig] = Field(default_factory=list) + + class Config: + """Pydantic model configuration. + + This inner class defines metadata for the TemplateConfig model, + including example configurations and schema information. + """ + json_schema_extra = { + "example": { + "pr": { + "title_template": "Initialize {{ repo_name }} from template", + "body_template": "Template PR body...", + "base_branch": "main", + "branch_prefix": "init", + "labels": ["automated"], + "reviewers": [], + "assignees": [] + }, + "workflows": [] + } + } diff --git a/template_automation/old.py b/template_automation/old.py new file mode 100644 index 00000000..eb1a1a81 --- /dev/null +++ b/template_automation/old.py @@ -0,0 +1,473 @@ +"""GitHub client module for template automation. + +This module provides the GitHubClient class which handles all interactions with the GitHub API +for template repository automation using the requests library directly. +""" + +import base64 +import json +import logging +import time +import urllib.parse +from typing import List, Optional, Dict, Any, Union + +import requests + +logger = logging.getLogger(__name__) + +class GitHubClient: + """A client for interacting with GitHub's API in the context of template automation. + + This class provides methods for template repository operations including: + - Creating repositories from templates + - Managing repository contents + - Setting up team access + - Configuring repository settings + + Attributes: + api_base_url (str): Base URL for the GitHub API + token (str): GitHub authentication token + org_name (str): GitHub organization name + commit_author_name (str): Name to use for automated commits + commit_author_email (str): Email to use for automated commits + verify_ssl (bool): Whether to verify SSL certificates + + Example: + ```python + client = GitHubClient( + api_base_url="https://api.github.com", + token="ghp_...", + org_name="my-org", + commit_author_name="Template Bot", + commit_author_email="bot@example.com" + ) + + repo = client.create_repository_from_template( + template_repo_name="template-service", + new_repo_name="new-service", + private=True + ) + ``` + """ + + def __init__( + self, + api_base_url: str, + token: str, + org_name: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + """Initialize a new GitHub client. + + Args: + api_base_url: Base URL for the GitHub API + token: GitHub authentication token + org_name: GitHub organization name + commit_author_name: Name to use for automated commits + commit_author_email: Email to use for automated commits + verify_ssl: Whether to verify SSL certificates + """ + self.api_base_url = api_base_url.rstrip('/') + self.token = token + self.org_name = org_name + self.commit_author_name = commit_author_name + self.commit_author_email = commit_author_email + self.verify_ssl = verify_ssl + + # Create session for connection reuse + self.session = requests.Session() + self.session.headers.update({ + 'Authorization': f'token {token}', + 'Accept': 'application/vnd.github.v3+json', + 'User-Agent': 'Template-Automation-Lambda' + }) + + # Log initialization + logger.info(f"Initialized GitHub client for org: {org_name} (SSL verify: {verify_ssl})") + + def _request(self, method: str, url: str, **kwargs) -> Dict[str, Any]: + """Make a request to the GitHub API. + + Args: + method: HTTP method (GET, POST, PATCH, PUT, DELETE) + url: URL path or full URL to request + **kwargs: Additional arguments to pass to requests + + Returns: + Response data as a dictionary + + Raises: + requests.exceptions.RequestException: On request errors + """ + # Prepend base URL if not already an absolute URL + if not url.startswith('http'): + url = f"{self.api_base_url}{url}" + + # Set SSL verification + kwargs['verify'] = self.verify_ssl + + # Log the request + logger.debug(f"GitHub API {method} request: {url}") + + # Make the request + response = self.session.request(method, url, **kwargs) + + # Raise exception for error status codes + response.raise_for_status() + + # Return JSON data for non-empty responses + if response.text: + return response.json() + return {} + + def get_repository( + self, + repo_name: str, + create: bool = False, + owning_team: Optional[str] = None + ) -> Dict[str, Any]: + """Get or create a GitHub repository with optional team permissions. + + Args: + repo_name: The name of the repository to retrieve or create + create: Whether to create the repository if it doesn't exist + owning_team: The name of the GitHub team to grant admin access + + Returns: + The repository data + """ + try: + # Try to get the repository + url = f"/repos/{self.org_name}/{repo_name}" + repo = self._request("GET", url) + logger.info(f"Found existing repository: {repo_name}") + + if owning_team: + self.set_team_permission(repo_name, owning_team, "admin") + + return repo + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404 and create: + logger.info(f"Creating repository {repo_name}") + + # Create a new repository + url = f"/orgs/{self.org_name}/repos" + repo = self._request("POST", url, json={ + "name": repo_name, + "private": True, + "auto_init": True, + "allow_squash_merge": True, + "allow_merge_commit": True, + "allow_rebase_merge": True, + "delete_branch_on_merge": True + }) + + # Wait for repository initialization + max_retries = 100 + retry_delay = 1 + for _ in range(max_retries): + try: + self.get_branch(repo_name, "main") + break + except requests.exceptions.HTTPError: + time.sleep(retry_delay) + else: + raise Exception(f"Repository {repo_name} initialization timed out") + + if owning_team: + self.set_team_permission(repo_name, owning_team, "admin") + + return repo + raise + + def get_branch(self, repo_name: str, branch_name: str) -> Dict[str, Any]: + """Get branch information. + + Args: + repo_name: Name of the repository + branch_name: Name of the branch + + Returns: + Branch data + """ + url = f"/repos/{self.org_name}/{repo_name}/branches/{branch_name}" + return self._request("GET", url) + + def get_default_branch(self, repo_name: str) -> str: + """Get the default branch name of a repository. + + Args: + repo_name: Name of the repository + + Returns: + Default branch name (usually 'main' or 'master') + """ + repo = self.get_repository(repo_name) + return repo["default_branch"] + + def create_branch(self, repo_name: str, branch_name: str, from_ref: str = "main") -> None: + """Create a new branch in the repository. + + Args: + repo_name: Name of the repository + branch_name: Name of the branch to create + from_ref: Reference to create branch from + """ + # Get the SHA of the source branch + source_branch = self.get_branch(repo_name, from_ref) + commit_sha = source_branch["commit"]["sha"] + + # Create the new branch + url = f"/repos/{self.org_name}/{repo_name}/git/refs" + self._request("POST", url, json={ + "ref": f"refs/heads/{branch_name}", + "sha": commit_sha + }) + + logger.info(f"Created branch {branch_name} in {repo_name}") + + def create_reference(self, repo_name: str, ref: str, sha: str) -> None: + """Create a Git reference. + + Args: + repo_name: Name of the repository + ref: The name of the reference + sha: The SHA1 value to set this reference to + """ + url = f"/repos/{self.org_name}/{repo_name}/git/refs" + self._request("POST", url, json={ + "ref": ref, + "sha": sha + }) + + logger.info(f"Created reference {ref} in {repo_name}") + + def update_reference(self, repo_name: str, ref: str, sha: str, force: bool = False) -> None: + """Update a Git reference. + + Args: + repo_name: Name of the repository + ref: The name of the reference without 'refs/' prefix + sha: The SHA1 value to set this reference to + force: Force update if not a fast-forward update + """ + url = f"/repos/{self.org_name}/{repo_name}/git/refs/{ref}" + self._request("PATCH", url, json={ + "sha": sha, + "force": force + }) + + logger.info(f"Updated reference {ref} in {repo_name}") + + def write_file( + self, + repo: Dict[str, Any], + path: str, + content: str, + branch: str = "main", + commit_message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a repository. + + Args: + repo: The repository object + path: Path where to create/update the file + content: Content to write to the file + branch: Branch to commit to + commit_message: Commit message to use + + Returns: + The created/updated file content + """ + repo_name = repo["name"] + content_bytes = content.encode("utf-8") + content_base64 = base64.b64encode(content_bytes).decode("utf-8") + + # Try to get the existing file to check if it exists + try: + file = self.get_file_contents(repo_name, path, branch) + # Update existing file + url = f"/repos/{self.org_name}/{repo_name}/contents/{path}" + result = self._request("PUT", url, json={ + "message": commit_message or f"Update {path}", + "content": content_base64, + "sha": file["sha"], + "branch": branch, + "committer": { + "name": self.commit_author_name, + "email": self.commit_author_email + } + }) + logger.info(f"Updated file {path} in repo {repo_name}") + return result["content"] + except requests.exceptions.HTTPError as e: + if e.response.status_code == 404: + # Create new file + url = f"/repos/{self.org_name}/{repo_name}/contents/{path}" + result = self._request("PUT", url, json={ + "message": commit_message or f"Create {path}", + "content": content_base64, + "branch": branch, + "committer": { + "name": self.commit_author_name, + "email": self.commit_author_email + } + }) + logger.info(f"Created new file {path} in repo {repo_name}") + return result["content"] + raise + + def get_file_contents(self, repo_name: str, path: str, ref: str = "main") -> Dict[str, Any]: + """Get the contents of a file in a repository. + + Args: + repo_name: Name of the repository + path: Path to the file + ref: Branch, tag, or commit SHA + + Returns: + File data + """ + url = f"/repos/{self.org_name}/{repo_name}/contents/{path}" + params = {"ref": ref} + return self._request("GET", url, params=params) + + def read_file(self, repo: Dict[str, Any], path: str, ref: str = "main") -> str: + """Read a file from a repository. + + Args: + repo: The repository object + path: Path to the file to read + ref: Git reference (branch, tag, commit) to read from + + Returns: + The file contents as a string + """ + repo_name = repo["name"] + file = self.get_file_contents(repo_name, path, ref) + content = base64.b64decode(file["content"]).decode("utf-8") + return content + + def create_pull_request( + self, + repo_name: str, + title: str, + body: str, + head_branch: str, + base_branch: str = "main" + ) -> Dict[str, Any]: + """Create a pull request in a repository. + + Args: + repo_name: Name of the repository + title: Title of the pull request + body: Description/body of the pull request + head_branch: Branch containing the changes + base_branch: Branch to merge into + + Returns: + The created pull request object + """ + url = f"/repos/{self.org_name}/{repo_name}/pulls" + pr = self._request("POST", url, json={ + "title": title, + "body": body, + "head": head_branch, + "base": base_branch, + "maintainer_can_modify": True + }) + + logger.info(f"Created PR #{pr['number']} in {repo_name}: {title}") + return pr + + def trigger_workflow( + self, + repo_name: str, + workflow_id: str, + ref: str, + inputs: Optional[Dict[str, Any]] = None + ) -> None: + """Trigger a GitHub Actions workflow. + + Args: + repo_name: Name of the repository + workflow_id: ID or filename of the workflow + ref: Git reference to run the workflow on + inputs: Input parameters for the workflow + """ + url = f"/repos/{self.org_name}/{repo_name}/actions/workflows/{workflow_id}/dispatches" + workflow_inputs = inputs if inputs is not None else {} + + self._request("POST", url, json={ + "ref": ref, + "inputs": workflow_inputs + }) + + logger.info(f"Triggered workflow {workflow_id} in {repo_name} on {ref}") + + def set_team_permission(self, repo_name: str, team_name: str, permission: str) -> None: + """Set a team's permission on a repository. + + Args: + repo_name: Name of the repository + team_name: Name of the team + permission: Permission level ('pull', 'push', 'admin', 'maintain', 'triage') + """ + url = f"/orgs/{self.org_name}/teams/{team_name}/repos/{self.org_name}/{repo_name}" + self._request("PUT", url, json={"permission": permission}) + + logger.info(f"Set {team_name} permission on {repo_name} to {permission}") + + def update_repository_topics(self, repo_name: str, topics: List[str]) -> None: + """Update the topics of a repository. + + Args: + repo_name: Name of the repository + topics: List of topics to set + """ + # GitHub API requires a special media type for repository topics + headers = {"Accept": "application/vnd.github.mercy-preview+json"} + url = f"/repos/{self.org_name}/{repo_name}/topics" + + self._request("PUT", url, json={"names": topics}, headers=headers) + + logger.info(f"Updated topics for {repo_name}: {topics}") + + def create_repository_from_template( + self, + template_repo_name: str, + new_repo_name: str, + private: bool = True, + description: Optional[str] = None, + topics: Optional[List[str]] = None + ) -> Dict[str, Any]: + """Create a new repository from a template. + + Args: + template_repo_name: Name of the template repository + new_repo_name: Name for the new repository + private: Whether the new repository should be private + description: Description for the new repository + topics: List of topics to add to the repository + + Returns: + The newly created repository + """ + url = f"/repos/{self.org_name}/{template_repo_name}/generate" + + # Create repository from template + new_repo = self._request("POST", url, json={ + "name": new_repo_name, + "owner": self.org_name, + "description": description or f"Repository created from template: {template_repo_name}", + "private": private + }) + + # Add topics if provided + if topics: + self.update_repository_topics(new_repo_name, topics) + + logger.info(f"Created new repository: {new_repo_name} from template: {template_repo_name}") + return new_repo \ No newline at end of file diff --git a/template_automation/pytest.ini b/template_automation/pytest.ini new file mode 100644 index 00000000..a2bd95ba --- /dev/null +++ b/template_automation/pytest.ini @@ -0,0 +1,11 @@ +[pytest] +markers = + integration: marks tests as integration tests (deselect with '-m "not integration"') +testpaths = + tests +python_files = + test_*.py + *_test.py +addopts = + -v + --strict-markers \ No newline at end of file diff --git a/template_automation/repository_provider.py b/template_automation/repository_provider.py new file mode 100644 index 00000000..5483843f --- /dev/null +++ b/template_automation/repository_provider.py @@ -0,0 +1,114 @@ +"""Repository provider interface models. + +This module defines the common interface that all repository providers (GitHub, GitLab) must implement. +""" + +from abc import ABC, abstractmethod +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field + +class RepositorySettings(BaseModel): + """Settings for repository creation and management.""" + visibility: str = Field(default="private", description="Repository visibility (private, internal, public)") + description: Optional[str] = Field(default=None, description="Repository description") + topics: Optional[List[str]] = Field(default=None, description="Repository topics/tags") + +class FileContent(BaseModel): + """File content and metadata.""" + path: str = Field(..., description="Path to the file within repository") + content: Any = Field(..., description="Content of the file (string or bytes)") + encoding: str = Field(default="utf-8", description="Encoding of the content") + + class Config: + arbitrary_types_allowed = True + +class MergeRequestSettings(BaseModel): + """Settings for merge/pull request creation.""" + title: str = Field(..., description="Title of the merge/pull request") + description: str = Field(..., description="Description/body of the merge/pull request") + source_branch: str = Field(..., description="Source branch containing changes") + target_branch: str = Field(default="main", description="Target branch to merge into") + +class RepositoryProvider(ABC): + """Base interface that all repository providers must implement.""" + + def __init__( + self, + api_base_url: str, + token: str, + organization: str, + commit_author_name: str = "Template Automation", + commit_author_email: str = "automation@example.com", + verify_ssl: bool = True + ): + self.api_base_url = api_base_url + self.token = token + self.organization = organization + self.commit_author_name = commit_author_name + self.commit_author_email = commit_author_email + self.verify_ssl = verify_ssl + + @abstractmethod + def get_repository( + self, + name: str, + create: bool = False, + settings: Optional[RepositorySettings] = None + ) -> Dict[str, Any]: + """Get or create a repository.""" + pass + + @abstractmethod + def get_branch(self, repo_name: str, branch: str) -> Dict[str, Any]: + """Get a branch from a repository.""" + pass + + @abstractmethod + def create_branch( + self, + repo_name: str, + branch: str, + from_branch: str = "main" + ) -> None: + """Create a new branch in a repository.""" + pass + + @abstractmethod + def write_file( + self, + repo_name: str, + file: FileContent, + branch: str = "main", + message: Optional[str] = None + ) -> Dict[str, Any]: + """Write or update a file in a repository.""" + pass + + @abstractmethod + def clone_repository_contents( + self, + source_repo: str, + target_repo: str, + source_branch: str = "main", + target_branch: str = "main", + message: str = "Initial project setup from template" + ) -> None: + """Clone contents from one repository to another.""" + pass + + def create_merge_request( + self, + repo_name: str, + settings: MergeRequestSettings + ) -> Dict[str, Any]: + """Create a merge request (default implementation delegates to create_pull_request).""" + return self.create_pull_request(repo_name, settings) + + @abstractmethod + def create_pull_request( + self, + repo_name: str, + settings: MergeRequestSettings + ) -> Dict[str, Any]: + """Create a pull request.""" + pass \ No newline at end of file diff --git a/template_automation/requirements.txt b/template_automation/requirements.txt new file mode 100644 index 00000000..e06cdc01 --- /dev/null +++ b/template_automation/requirements.txt @@ -0,0 +1,12 @@ +# pylint +# black +# pre-commit + +boto3 +requests + +# Testing dependencies +pytest>=7.0.0 +pytest-mock>=3.10.0 +requests-mock>=1.11.0 +coverage>=7.2.0 diff --git a/template_automation/template_manager.py b/template_automation/template_manager.py new file mode 100644 index 00000000..eea8986f --- /dev/null +++ b/template_automation/template_manager.py @@ -0,0 +1,122 @@ +"""Template management and configuration using Jinja2.""" + +import os +import json +from typing import Dict, Any, List, Optional +from jinja2 import Environment, FileSystemLoader, Template +from pydantic import ValidationError +from .models import WorkflowConfig, PRConfig, TemplateConfig + +class TemplateManager: + """Handles the management and rendering of templates for workflows and pull requests. + + This class provides utilities to load template configurations, render workflow files, + and generate pull request details based on templates and user-defined variables. + + Attributes: + env (Environment): The Jinja2 environment for rendering templates. + template_repo_name (str): The name of the template repository. + config (TemplateConfig): The loaded template configuration. + """ + + def __init__(self, template_root: Optional[str] = None, template_repo_name: Optional[str] = None): + """Initialize the TemplateManager with optional template root and repository name. + + Args: + template_root (str, optional): The root directory for templates. Defaults to the + 'templates' directory in the same location as this file. + template_repo_name (str, optional): The name of the template repository. + """ + default_template_path = os.path.join(os.path.dirname(__file__), "templates") + + effective_template_root: str + if isinstance(template_root, str): + effective_template_root = template_root + elif template_root is None: + effective_template_root = default_template_path + else: + # template_root is not a string and not None (e.g., a tuple was passed) + print( + f"Warning: TemplateManager's template_root argument expected str or None, " + f"but received type {type(template_root)}. Using default template path: " + f"'{default_template_path}'" + ) + effective_template_root = default_template_path + + self.env = Environment( + loader=FileSystemLoader(effective_template_root), + trim_blocks=True, + lstrip_blocks=True + ) + self.template_repo_name = template_repo_name + self.config = self._load_template_config() + + def _load_template_config(self) -> TemplateConfig: + """Load the template configuration from a .template-config.json file. + + Returns: + TemplateConfig: The loaded configuration with validation. + + Raises: + ValidationError: If the configuration is invalid. + """ + try: + config_path = os.path.join(os.getcwd(), ".template-config.json") + if os.path.exists(config_path): + with open(config_path, "r") as f: + template_config = json.load(f) + return TemplateConfig(**template_config) + return TemplateConfig() # Use defaults if no config file exists + except ValidationError as e: + print(f"Warning: Template config validation failed: {str(e)}") + return TemplateConfig() # Use defaults on validation error + except Exception as e: + print(f"Warning: Could not load template config: {str(e)}") + return TemplateConfig() # Use defaults on any other error + + def render_workflow(self, workflow: WorkflowConfig) -> str: + """Render a GitHub Actions workflow template. + + Args: + workflow (WorkflowConfig): The workflow configuration containing template details. + + Returns: + str: The rendered workflow content as a string. + """ + template = self.env.get_template(workflow.template_path) + return template.render(**workflow.variables) + + def render_pr_details(self, repo_name: str, workflow_files: Optional[List[str]] = None) -> Dict[str, Any]: + """Generate pull request details by rendering templates and configurations. + + Args: + repo_name (str): The name of the repository being created. + workflow_files (List[str], optional): A list of workflow files being added. + + Returns: + Dict[str, Any]: A dictionary containing the rendered pull request details. + """ + pr_config = self.config.pr + variables = { + "repo_name": repo_name, + "template_repo": self.template_repo_name, + "workflow_files": workflow_files + } + + return { + "title": self.env.from_string(pr_config.title_template).render(**variables), + "body": self.env.from_string(pr_config.body_template).render(**variables), + "base_branch": pr_config.base_branch, + "branch_name": f"{pr_config.branch_prefix}-{repo_name}", + "labels": pr_config.labels, + "reviewers": pr_config.reviewers, + "assignees": pr_config.assignees + } + + def get_workflow_configs(self) -> List[WorkflowConfig]: + """Retrieve workflow configurations from the template configuration. + + Returns: + List[WorkflowConfig]: A list of workflow configurations. + """ + return self.config.workflows diff --git a/template_automation/templates/workflows/ansible.yml.j2 b/template_automation/templates/workflows/ansible.yml.j2 new file mode 100644 index 00000000..8f0d0b4d --- /dev/null +++ b/template_automation/templates/workflows/ansible.yml.j2 @@ -0,0 +1,42 @@ +name: "Ansible Playbook" + +on: + workflow_dispatch: + pull_request: + paths: + - 'ansible/**' + - '.github/workflows/ansible.yml' + +jobs: + ansible: + runs-on: {{ runner | default('ubuntu-latest') }} + permissions: + id-token: write + contents: read + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: {{ python_version | default('3.9') }} + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: {{ aws_role }} + aws-region: {{ aws_region | default('us-west-2') }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install ansible + if [ -f ansible/requirements.txt ]; then + pip install -r ansible/requirements.txt + fi + + - name: Run Ansible Playbook + run: | + cd ansible + ansible-playbook {{ playbook_path | default('generate_hcl_files.yml') }} diff --git a/template_automation/templates/workflows/terraform.yml.j2 b/template_automation/templates/workflows/terraform.yml.j2 new file mode 100644 index 00000000..b854588a --- /dev/null +++ b/template_automation/templates/workflows/terraform.yml.j2 @@ -0,0 +1,52 @@ +name: "Terraform {{ environment | default('Production') }}" + +on: + pull_request: + branches: [ {{ target_branch | default('main') }} ] + paths: + - '**.tf' + - '**.tfvars' + - '**.hcl' + push: + branches: [ {{ target_branch | default('main') }} ] + paths: + - '**.tf' + - '**.tfvars' + - '**.hcl' + +jobs: + terraform: + runs-on: {{ runner | default('ubuntu-latest') }} + environment: {{ environment | default('Production') }} + permissions: + id-token: write + contents: read + + steps: + - uses: actions/checkout@v3 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: {{ aws_role }} + aws-region: {{ aws_region | default('us-west-2') }} + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_version: {{ terraform_version | default('1.5.0') }} + + - name: Terraform Format + run: terraform fmt -check -recursive + + - name: Terraform Init + run: terraform init + + - name: Terraform Plan + if: github.event_name == 'pull_request' + run: terraform plan -no-color + continue-on-error: true + + - name: Terraform Apply + if: github.ref == '{{ target_branch | default('refs/heads/main') }}' && github.event_name == 'push' + run: terraform apply -auto-approve diff --git a/template_automation/test_payload.json b/template_automation/test_payload.json new file mode 100644 index 00000000..01b2e4fd --- /dev/null +++ b/template_automation/test_payload.json @@ -0,0 +1,26 @@ +{ + "project_name": "eks-automation-lambda-test1", + "eks_settings": { + "attrs": { + "account_name": "dev-account", + "aws_region": "us-east-1", + "cluster_mailing_list": "someone@example.com", + "cluster_name": "example-cluster-dev", + "eks_instance_disk_size": 100, + "eks_ng_desired_size": 2, + "eks_ng_max_size": 10, + "eks_ng_min_size": 2, + "environment": "development", + "environment_abbr": "dev", + "organization": "example:dept:team", + "finops_project_name": "example_project", + "finops_project_number": "fp00000001", + "finops_project_role": "example_project_app", + "vpc_domain_name": "dev.example.com", + "vpc_name": "vpc-dev" + }, + "tags": { + "slim:schedule": "8:00-17:00" + } + } +} \ No newline at end of file diff --git a/template_automation/tests/__init__.py b/template_automation/tests/__init__.py new file mode 100644 index 00000000..739954cb --- /dev/null +++ b/template_automation/tests/__init__.py @@ -0,0 +1 @@ +# Tests package \ No newline at end of file diff --git a/template_automation/tests/conftest.py b/template_automation/tests/conftest.py new file mode 100644 index 00000000..4ae8ce7c --- /dev/null +++ b/template_automation/tests/conftest.py @@ -0,0 +1,122 @@ +import os +import pytest +from github import Github +import time + +@pytest.fixture(scope="session") +def github_client(): + """Create a GitHub client for integration tests.""" + token = os.environ.get("GITHUB_TOKEN") + if not token: + pytest.skip("GITHUB_TOKEN environment variable not set") + + api_url = os.environ.get("GITHUB_API", "https://api.github.com") + return Github(base_url=api_url, login_or_token=token) + +@pytest.fixture(scope="session") +def cleanup_mode(): + """Determine if repositories should be deleted or just archived.""" + return os.environ.get("INTEGRATION_TEST_DELETE_REPOS", "").lower() in ("true", "1", "yes") + +@pytest.fixture +def test_repo(github_client, cleanup_mode, request): + """Create a test repository and clean it up after the test.""" + org_name = os.environ.get("GITHUB_ORG") + if not org_name: + pytest.skip("GITHUB_ORG environment variable not set") + + # Create a unique repo name for this test + repo_name = f"test-repo-{pytest.config.getoption('--timestamp', default='')}-{id(request)}" + + org = github_client.get_organization(org_name) + repo = org.create_repo( + repo_name, + description="Temporary repository for integration testing", + private=True + ) + + yield repo + + # Clean up after the test + if cleanup_mode: + # Delete the repository + repo.delete() + else: + # Archive the repository (the original behavior) + repo.edit(archived=True) + +@pytest.fixture +def github_client_params(): + """Fixture providing standard GitHubClient parameters""" + return { + "api_base_url": "https://api.github.example.com", + "token": "test-token", + "org_name": "test-org", + "commit_author_name": "Test Author", + "commit_author_email": "test@example.com", + "source_version": "v1.0.0", + "template_repo_name": "template-repo", + "config_file_name": "config.json" + } + +@pytest.fixture +def mock_repository_response(): + """Fixture providing a standard repository API response""" + return { + "id": 1234, + "name": "test-repo", + "default_branch": "main", + "private": True, + "description": "Test repository" + } + +@pytest.fixture +def mock_tree_response(): + """Fixture providing a standard tree API response""" + return { + "sha": "test-tree-sha", + "tree": [ + { + "path": "test.txt", + "mode": "100644", + "type": "blob", + "sha": "test-blob-sha", + "size": 100 + } + ] + } + +@pytest.fixture +def mock_blob_response(): + """Fixture providing a standard blob API response""" + return { + "sha": "test-blob-sha", + "content": "SGVsbG8gV29ybGQh", # Base64 encoded "Hello World!" + "encoding": "base64" + } + +@pytest.fixture +def mock_commit_response(): + """Fixture providing a standard commit API response""" + return { + "sha": "test-commit-sha", + "tree": { + "sha": "test-tree-sha" + } + } + +@pytest.fixture +def mock_reference_response(): + """Fixture providing a standard reference API response""" + return { + "ref": "refs/heads/main", + "object": { + "sha": "test-commit-sha", + "type": "commit" + } + } + +def pytest_addoption(parser): + """Add custom command line options.""" + timestamp = int(time.time()) + parser.addoption("--timestamp", action="store", default=str(timestamp)) \ No newline at end of file diff --git a/template_automation/tests/integration/test_github_operations.py b/template_automation/tests/integration/test_github_operations.py new file mode 100644 index 00000000..caf95fb7 --- /dev/null +++ b/template_automation/tests/integration/test_github_operations.py @@ -0,0 +1,16 @@ +import pytest +import os + +@pytest.mark.integration +def test_repository_operations(test_repo, cleanup_mode): + """Test basic repository operations.""" + # Your test code here that uses the test_repo + + # This is just an example verification + assert test_repo.name.startswith("test-repo-") + + # Log what will happen to this repository + if cleanup_mode: + print(f"Repository {test_repo.name} will be DELETED after this test") + else: + print(f"Repository {test_repo.name} will be ARCHIVED after this test") diff --git a/template_automation/tests/pytest.ini b/template_automation/tests/pytest.ini new file mode 100644 index 00000000..948fe8b2 --- /dev/null +++ b/template_automation/tests/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +markers = + integration: marks tests as integration tests (requires GitHub API access) +addopts = --timestamp=%(timestamp)s +python_functions = test_* diff --git a/template_automation/tests/test_github_client.py b/template_automation/tests/test_github_client.py new file mode 100644 index 00000000..89bcd656 --- /dev/null +++ b/template_automation/tests/test_github_client.py @@ -0,0 +1,245 @@ +import os +import pytest +import base64 +import tempfile +import shutil +from datetime import datetime +from urllib.parse import urljoin + +import requests +import requests_mock + +from ..app import GitHubClient + +class TestGitHubClient: + """Test suite for GitHubClient class""" + + def test_init(self, github_client_params): + """Test GitHubClient initialization""" + client = GitHubClient(**github_client_params) + assert client.api_base_url == github_client_params["api_base_url"] + assert client.token == github_client_params["token"] + assert client.org_name == github_client_params["org_name"] + assert client.commit_author_name == github_client_params["commit_author_name"] + assert client.commit_author_email == github_client_params["commit_author_email"] + assert "Authorization" in client.headers + assert client.headers["Authorization"] == f"token {github_client_params['token']}" + + def test_get_repository_existing(self, requests_mock, github_client_params, mock_repository_response): + """Test getting an existing repository""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + + # Mock the API response + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + json=mock_repository_response + ) + + repo = client.get_repository(repo_name) + assert repo["name"] == mock_repository_response["name"] + assert repo["default_branch"] == mock_repository_response["default_branch"] + + def test_get_repository_create_new(self, requests_mock, github_client_params, mock_repository_response): + """Test creating a new repository""" + client = GitHubClient(**github_client_params) + repo_name = "new-test-repo" + + # Mock 404 for get request and success for create + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + status_code=404 + ) + requests_mock.post( + f"{github_client_params['api_base_url']}/orgs/{github_client_params['org_name']}/repos", + json=mock_repository_response + ) + + repo = client.get_repository(repo_name, create=True) + assert repo["name"] == mock_repository_response["name"] + + def test_get_default_branch(self, requests_mock, github_client_params, mock_repository_response): + """Test getting repository default branch""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + json=mock_repository_response + ) + + branch = client.get_default_branch(repo_name) + assert branch == mock_repository_response["default_branch"] + + def test_create_blob(self, requests_mock, github_client_params, mock_blob_response): + """Test creating a blob""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + content = b"Hello World!" + + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/blobs", + json=mock_blob_response + ) + + blob_sha = client.create_blob(repo_name, content) + assert blob_sha == mock_blob_response["sha"] + + def test_create_tree(self, requests_mock, github_client_params, mock_tree_response): + """Test creating a tree""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + tree_items = [{ + "path": "test.txt", + "mode": "100644", + "type": "blob", + "sha": "test-blob-sha" + }] + + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/trees", + json=mock_tree_response + ) + + tree_sha = client.create_tree(repo_name, tree_items) + assert tree_sha == mock_tree_response["sha"] + + def test_create_commit(self, requests_mock, github_client_params, mock_commit_response): + """Test creating a commit""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + message = "Test commit" + tree_sha = "test-tree-sha" + parent_shas = ["parent-sha"] + + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/commits", + json=mock_commit_response + ) + + commit_sha = client.create_commit(repo_name, message, tree_sha, parent_shas) + assert commit_sha == mock_commit_response["sha"] + + def test_update_reference(self, requests_mock, github_client_params): + """Test updating a reference""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + ref = "heads/main" + sha = "test-commit-sha" + + requests_mock.patch( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/refs/{ref}", + status_code=200 + ) + + # Should not raise an exception + client.update_reference(repo_name, ref, sha) + + def test_create_reference(self, requests_mock, github_client_params): + """Test creating a reference""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + ref = "refs/heads/main" + sha = "test-commit-sha" + + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/refs", + status_code=201 + ) + + # Should not raise an exception + client.create_reference(repo_name, ref, sha) + + def test_clone_repository_contents(self, requests_mock, github_client_params, mock_repository_response, + mock_reference_response, mock_tree_response, mock_blob_response, tmp_path): + """Test cloning repository contents""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + target_dir = str(tmp_path) + + # Mock all required API calls + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + json=mock_repository_response + ) + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/refs/heads/main", + json=mock_reference_response + ) + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/trees/{mock_reference_response['object']['sha']}?recursive=1", + json=mock_tree_response + ) + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/blobs/{mock_tree_response['tree'][0]['sha']}", + json=mock_blob_response + ) + + default_branch = client.clone_repository_contents(repo_name, target_dir) + assert default_branch == mock_repository_response["default_branch"] + assert os.path.exists(os.path.join(target_dir, mock_tree_response["tree"][0]["path"])) + + def test_commit_repository_contents(self, requests_mock, github_client_params, mock_repository_response, + mock_reference_response, mock_tree_response, mock_commit_response, tmp_path): + """Test committing repository contents""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + work_dir = str(tmp_path) + + # Create a test file + test_file = os.path.join(work_dir, "test.txt") + with open(test_file, "w") as f: + f.write("test content") + + # Mock all required API calls + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + json=mock_repository_response + ) + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/refs/heads/main", + json=mock_reference_response + ) + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/commits/{mock_reference_response['object']['sha']}", + json=mock_commit_response + ) + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/blobs", + json={"sha": "new-blob-sha"} + ) + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/trees", + json={"sha": "new-tree-sha"} + ) + requests_mock.post( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/commits", + json={"sha": "new-commit-sha"} + ) + requests_mock.patch( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}/git/refs/heads/main", + status_code=200 + ) + + default_branch = client.commit_repository_contents(repo_name, work_dir, "Test commit") + assert default_branch == mock_repository_response["default_branch"] + + def test_error_handling(self, requests_mock, github_client_params): + """Test error handling in GitHubClient methods""" + client = GitHubClient(**github_client_params) + repo_name = "test-repo" + + # Test error on repository creation + requests_mock.get( + f"{github_client_params['api_base_url']}/repos/{github_client_params['org_name']}/{repo_name}", + status_code=404 + ) + requests_mock.post( + f"{github_client_params['api_base_url']}/orgs/{github_client_params['org_name']}/repos", + status_code=500, + text="Internal Server Error" + ) + + with pytest.raises(Exception) as exc_info: + client.get_repository(repo_name, create=True) + assert "Failed to create repository" in str(exc_info.value) \ No newline at end of file diff --git a/template_automation/tests/test_github_client_integration.py b/template_automation/tests/test_github_client_integration.py new file mode 100644 index 00000000..bec46acf --- /dev/null +++ b/template_automation/tests/test_github_client_integration.py @@ -0,0 +1,178 @@ +import os +import json +import pytest +import requests +import tempfile +import shutil +import uuid +import time +import logging +from datetime import datetime + +from ..app import GitHubClient + +# Skip all tests if no GitHub token is available +pytestmark = [ + pytest.mark.skipif( + not os.environ.get("GITHUB_TOKEN") or + not os.environ.get("GITHUB_API") or + not os.environ.get("GITHUB_ORG"), + reason="Missing required GitHub environment variables" + ), + pytest.mark.integration +] + +class TestGitHubClientIntegration: + """Integration tests for GitHubClient class""" + + @pytest.fixture(autouse=True) + def setup_client(self): + """Setup GitHubClient instance for tests""" + self.client = GitHubClient( + os.environ["GITHUB_API"], + os.environ["GITHUB_TOKEN"], + os.environ["GITHUB_ORG"], + "Integration Test", + "test@example.com" + ) + + @pytest.fixture + def cleanup_repo(self): + """Fixture to track and cleanup test repositories""" + created_repos = [] + + def _register_repo(repo_name): + created_repos.append(repo_name) + return repo_name + + yield _register_repo + + # Cleanup: Archive all created test repositories + for repo in created_repos: + try: + archive_url = f"{os.environ['GITHUB_API']}/repos/{os.environ['GITHUB_ORG']}/{repo}" + response = requests.patch( + archive_url, + headers={ + "Authorization": f"token {os.environ['GITHUB_TOKEN']}", + "Accept": "application/vnd.github.v3+json" + }, + json={"archived": True}, + verify=False + ) + if response.status_code != 200: + logging.warning(f"Failed to archive repository {repo}: {response.status_code}") + except Exception as e: + logging.warning(f"Error archiving repository {repo}: {str(e)}") + + @pytest.fixture + def temp_repo_name(self): + """Generate a unique temporary repository name""" + return f"temp-test-repo-{uuid.uuid4().hex[:8]}" + + def test_repository_creation(self, temp_repo_name, cleanup_repo): + """Test repository creation""" + repo_name = cleanup_repo(temp_repo_name) + + # Create new repository + repo = self.client.get_repository(repo_name, create=True) + + assert repo["name"] == repo_name + assert not repo["archived"] + assert repo["private"] + + def test_file_operations(self, temp_repo_name, cleanup_repo): + """Test file operations""" + repo_name = cleanup_repo(temp_repo_name) + + # Create new repository + repo = self.client.get_repository(repo_name, create=True) + + # Create a test file + test_content = { + "test": True, + "timestamp": datetime.utcnow().isoformat() + } + + # Create temporary directory + with tempfile.TemporaryDirectory() as work_dir: + test_file = os.path.join(work_dir, "test-config.json") + + # Write test content + with open(test_file, "w") as f: + json.dump(test_content, f, indent=2) + + # Commit the file + branch = self.client.commit_repository_contents( + repo_name, + work_dir, + "Test commit from integration tests" + ) + assert branch == "main" + + # Add a small delay to ensure GitHub API has processed the commit + time.sleep(2) + + # Verify we can clone the repository with the file + output_dir = os.path.join(work_dir, "clone") + cloned_branch = self.client.clone_repository_contents(repo_name, output_dir) + + assert cloned_branch == "main" + assert os.path.exists(os.path.join(output_dir, "test-config.json")) + + def test_branch_operations(self, temp_repo_name, cleanup_repo): + """Test branch operations""" + repo_name = cleanup_repo(temp_repo_name) + + # Create new repository + repo = self.client.get_repository(repo_name, create=True) + + # Create a test file in main branch + with tempfile.TemporaryDirectory() as work_dir: + # Initial commit on main branch + main_file = os.path.join(work_dir, "test.txt") + with open(main_file, "w") as f: + f.write("main branch content") + + self.client.commit_repository_contents( + repo_name, + work_dir, + "Initial commit on main" + ) + + # Create and switch to a test branch + test_branch = "test-branch" + # Clean directory for test branch changes + for file in os.listdir(work_dir): + file_path = os.path.join(work_dir, file) + if os.path.isfile(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + + # Create different content in test branch + with open(main_file, "w") as f: + f.write("test branch content") + + self.client.commit_repository_contents( + repo_name, + work_dir, + "Commit on test branch", + branch=test_branch + ) + + # Clone and verify main branch content + main_output = os.path.join(work_dir, "clone-main") + os.makedirs(main_output, exist_ok=True) + self.client.clone_repository_contents(repo_name, main_output, branch="main") + + with open(os.path.join(main_output, "test.txt")) as f: + assert f.read().strip() == "main branch content" + + # Clone and verify test branch content + test_output = os.path.join(work_dir, "clone-test") + os.makedirs(test_output, exist_ok=True) + self.client.clone_repository_contents(repo_name, test_output, branch=test_branch) + + with open(os.path.join(test_output, "test.txt")) as f: + assert f.read().strip() == "test branch content" diff --git a/test_service_catalog.py b/test_service_catalog.py new file mode 100755 index 00000000..18435f23 --- /dev/null +++ b/test_service_catalog.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +"""Test script for Service Catalog event parsing.""" + +import json +import sys +from pathlib import Path + +# Add parent directory to path +sys.path.insert(0, str(Path(__file__).parent)) + +from template_automation.app import ServiceCatalogInput + + +def test_service_catalog_parsing(): + """Test parsing of Service Catalog event.""" + + # Load test event + with open('events/service-catalog-event.json', 'r') as f: + event = json.load(f) + + print("Testing Service Catalog event parsing...") + print("=" * 60) + + # Extract provisioning parameters + if 'detail' not in event: + print("❌ ERROR: Event missing 'detail' field") + return False + + detail = event['detail'] + + if 'provisioningParameters' not in detail: + print("❌ ERROR: Event detail missing 'provisioningParameters' field") + return False + + provisioning_params = detail['provisioningParameters'] + print(f"✓ Found {len(provisioning_params)} provisioning parameters") + print() + + # Parse with Pydantic model + try: + service_catalog_input = ServiceCatalogInput(**provisioning_params) + print("✓ ServiceCatalogInput validation successful") + print(f" - project_name: {service_catalog_input.project_name}") + print(f" - owning_team: {service_catalog_input.owning_team}") + print() + except Exception as e: + print(f"❌ ERROR: Validation failed: {e}") + return False + + # Convert to template settings + try: + template_settings = service_catalog_input.to_template_settings() + print("✓ Converted to template settings format:") + print(f" - attrs keys: {list(template_settings['attrs'].keys())}") + print(f" - tags keys: {list(template_settings['tags'].keys())}") + print() + print("Full template_settings:") + print(json.dumps(template_settings, indent=2)) + print() + except Exception as e: + print(f"❌ ERROR: Conversion failed: {e}") + return False + + print("=" * 60) + print("✓ All tests passed!") + return True + + +if __name__ == "__main__": + success = test_service_catalog_parsing() + sys.exit(0 if success else 1) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..d4d2672a --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,15 @@ +import os +import pytest + +@pytest.fixture(autouse=True) +def clean_environment(): + """Clean environment variables before each test""" + # Save original environment + env_orig = dict(os.environ) + + # Run test + yield + + # Restore original environment + os.environ.clear() + os.environ.update(env_orig) diff --git a/tests/integration/test_github_client_integration.py b/tests/integration/test_github_client_integration.py new file mode 100644 index 00000000..b3744d9a --- /dev/null +++ b/tests/integration/test_github_client_integration.py @@ -0,0 +1,143 @@ +import os +import pytest +from template_automation.github_client import GitHubClient +from github import GithubException + +# Configuration from environment variables +GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") +GITHUB_API_URL = os.getenv("GITHUB_API_URL", "https://api.github.com") +GITHUB_ORG = os.getenv("GITHUB_ORG", "test-organization") +TEST_REPO_PREFIX = "test-automation-" + +def is_integration_test_enabled(): + """Check if integration tests should run based on environment variables.""" + return bool(GITHUB_TOKEN and GITHUB_ORG) + +@pytest.fixture(scope="session") +def github_client(): + """Create a GitHub client for testing.""" + if not is_integration_test_enabled(): + pytest.skip("Integration tests disabled - missing required environment variables") + + return GitHubClient( + api_base_url=GITHUB_API_URL, + token=GITHUB_TOKEN, + org_name=GITHUB_ORG + ) + +@pytest.fixture(autouse=True) +def cleanup_test_repos(github_client): + """Cleanup test repositories before and after tests.""" + if not is_integration_test_enabled(): + return + + # Cleanup before test + try: + repos = github_client.org.get_repos() + for repo in repos: + if repo.name.startswith(TEST_REPO_PREFIX): + repo.delete() + except GithubException as e: + print(f"Cleanup warning: {e}") + + yield + + # Cleanup after test + try: + repos = github_client.org.get_repos() + for repo in repos: + if repo.name.startswith(TEST_REPO_PREFIX): + repo.delete() + except GithubException as e: + print(f"Cleanup warning: {e}") + +@pytest.mark.integration +def test_repository_creation(github_client): + """Test basic repository creation and deletion.""" + repo_name = f"{TEST_REPO_PREFIX}basic" + + # Test repository creation + repo = github_client.get_repository(repo_name, create=True) + assert repo.name == repo_name + assert repo.private is True + + # Verify repository exists + repo = github_client.get_repository(repo_name) + assert repo.name == repo_name + +@pytest.mark.integration +def test_branch_operations(github_client): + """Test branch creation and management.""" + repo_name = f"{TEST_REPO_PREFIX}branches" + branch_name = "test-branch" + + # Create repository and branch + repo = github_client.get_repository(repo_name, create=True) + github_client.create_branch(repo_name, branch_name) + + # Verify branch exists + repo = github_client.get_repository(repo_name) + branch = repo.get_branch(branch_name) + assert branch.name == branch_name + +@pytest.mark.integration +def test_file_operations(github_client): + """Test file creation, reading, and updating.""" + repo_name = f"{TEST_REPO_PREFIX}files" + test_file = "test.txt" + initial_content = "Hello, World!" + updated_content = "Updated content" + + # Create repository and file + repo = github_client.get_repository(repo_name, create=True) + github_client.write_file(repo, test_file, initial_content) + + # Read and verify content + content = github_client.read_file(repo, test_file) + assert content == initial_content + + # Update and verify + github_client.write_file(repo, test_file, updated_content) + content = github_client.read_file(repo, test_file) + assert content == updated_content + +@pytest.mark.integration +def test_pull_request_workflow(github_client): + """Test pull request creation workflow.""" + repo_name = f"{TEST_REPO_PREFIX}pr" + branch_name = "feature-branch" + + # Setup repository and branch + repo = github_client.get_repository(repo_name, create=True) + github_client.create_branch(repo_name, branch_name) + + # Create PR + pr = github_client.create_pull_request( + repo_name=repo_name, + title="Test PR", + body="Testing pull request creation", + head_branch=branch_name + ) + + assert pr.title == "Test PR" + assert pr.head.ref == branch_name + assert pr.base.ref == "main" + +@pytest.mark.integration +def test_team_permissions(github_client): + """Test team permission management.""" + repo_name = f"{TEST_REPO_PREFIX}team-perms" + team_name = os.getenv("GITHUB_TEST_TEAM") + + if not team_name: + pytest.skip("Skipping team permission test - GITHUB_TEST_TEAM not set") + + # Create repository + repo = github_client.get_repository(repo_name, create=True) + + # Set and verify team permissions + github_client.set_team_permission(repo_name, team_name, "admin") + + # Verify team has access (this will raise an exception if access is not granted) + team = github_client.org.get_team_by_slug(team_name) + assert team.has_in_repos(repo) diff --git a/tests/test_app.py b/tests/test_app.py new file mode 100644 index 00000000..8a32b7f5 --- /dev/null +++ b/tests/test_app.py @@ -0,0 +1,110 @@ +import os +import pytest +import json +from unittest.mock import patch, MagicMock +from botocore.exceptions import ClientError +from eks_automation.app import get_parameter, operate_github + +@pytest.fixture +def mock_ssm(): + with patch('boto3.client') as mock_client: + ssm_client = MagicMock() + mock_client.return_value = ssm_client + yield ssm_client + +@pytest.fixture +def mock_secrets(): + with patch('eks_automation.app.github_token') as mock_token: + mock_token.return_value = 'fake-token' + yield mock_token + +def test_get_parameter_from_ssm(mock_ssm): + # Setup + mock_ssm.get_parameter.return_value = { + 'Parameter': {'Value': 'param-value'} + } + + # Test + result = get_parameter('test-param') + + # Assert + assert result == 'param-value' + mock_ssm.get_parameter.assert_called_once_with( + Name='/template-automation/test-param', + WithDecryption=False + ) + +def test_get_parameter_from_env(mock_ssm): + # Setup + mock_ssm.get_parameter.side_effect = ClientError( + {'Error': {'Code': 'ParameterNotFound'}}, + 'GetParameter' + ) + os.environ['test-param'] = 'env-value' + + # Test + result = get_parameter('test-param') + + # Assert + assert result == 'env-value' + +def test_get_parameter_with_default(mock_ssm): + # Setup + mock_ssm.get_parameter.side_effect = ClientError( + {'Error': {'Code': 'ParameterNotFound'}}, + 'GetParameter' + ) + + # Test + result = get_parameter('missing-param', default='default-value') + + # Assert + assert result == 'default-value' + +@patch('eks_automation.app.GitHubClient') +def test_operate_github_success(mock_github_client, mock_secrets): + # Setup + mock_client = MagicMock() + mock_github_client.return_value = mock_client + + # Set required environment variables + os.environ['GITHUB_API'] = 'https://api.github.com' + os.environ['GITHUB_ORG_NAME'] = 'test-org' + os.environ['TEMPLATE_REPO_NAME'] = 'template-repo' + + # Test data + new_repo_name = 'test-repo' + template_settings = {'key': 'value'} + + # Test + operate_github(new_repo_name, template_settings) + + # Assert + mock_client.get_repository.assert_called() + mock_client.commit_repository_contents.assert_called() + mock_client.update_repository_topics.assert_called() + +@pytest.mark.parametrize('missing_param', ['GITHUB_API', 'GITHUB_ORG_NAME', 'TEMPLATE_REPO_NAME']) +def test_operate_github_missing_required_params(missing_param, mock_secrets): + # Setup + required_params = { + 'GITHUB_API': 'https://api.github.com', + 'GITHUB_ORG_NAME': 'test-org', + 'TEMPLATE_REPO_NAME': 'template-repo' + } + + # Remove one required parameter + test_params = required_params.copy() + del test_params[missing_param] + + # Set environment variables + for key, value in test_params.items(): + os.environ[key] = value + if missing_param in os.environ: + del os.environ[missing_param] + + # Test + with pytest.raises(ValueError) as exc_info: + operate_github('test-repo', {'key': 'value'}) + + assert missing_param in str(exc_info.value) diff --git a/tests/test_github_client.py b/tests/test_github_client.py new file mode 100644 index 00000000..381c6401 --- /dev/null +++ b/tests/test_github_client.py @@ -0,0 +1,71 @@ +import os +import pytest +from unittest.mock import patch, MagicMock +from botocore.exceptions import ClientError +from eks_automation.app import GitHubClient + +@pytest.fixture +def mock_secrets_manager(): + with patch('boto3.session.Session') as mock_session: + secrets_client = MagicMock() + mock_session.return_value.client.return_value = secrets_client + yield secrets_client + +@pytest.fixture +def github_client_env(): + os.environ['GITHUB_TOKEN_SECRET_NAME'] = 'test/github-token' + os.environ['GITHUB_ORG_NAME'] = 'test-org' + yield + del os.environ['GITHUB_TOKEN_SECRET_NAME'] + del os.environ['GITHUB_ORG_NAME'] + +def test_github_client_init_success(mock_secrets_manager, github_client_env): + # Setup + mock_secrets_manager.get_secret_value.return_value = { + 'SecretString': 'fake-token' + } + + # Test + client = GitHubClient() + + # Assert + assert client.token == 'fake-token' + assert client.org_name == 'test-org' + assert client.headers['Authorization'] == 'Bearer fake-token' + mock_secrets_manager.get_secret_value.assert_called_once_with( + SecretId='test/github-token' + ) + +def test_github_client_missing_secret_name(): + # Test + with pytest.raises(ValueError, match="GITHUB_TOKEN_SECRET_NAME environment variable is required"): + GitHubClient() + +def test_github_client_secret_not_found(mock_secrets_manager, github_client_env): + # Setup + mock_secrets_manager.get_secret_value.side_effect = ClientError( + {'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Secret not found'}}, + 'GetSecretValue' + ) + + # Test + with pytest.raises(Exception, match="Failed to retrieve GitHub token from Secrets Manager"): + GitHubClient() + +def test_github_client_trigger_workflow_success(mock_secrets_manager, github_client_env): + # Setup + mock_secrets_manager.get_secret_value.return_value = { + 'SecretString': 'fake-token' + } + + with patch('requests.post') as mock_post: + mock_post.return_value.status_code = 204 + client = GitHubClient() + + # Test + result = client.trigger_workflow('test-repo') + + # Assert + assert result == {"status": "success"} + mock_post.assert_called_once() + assert mock_post.call_args[1]['headers']['Authorization'] == 'Bearer fake-token' diff --git a/tests/test_integration.py b/tests/test_integration.py new file mode 100644 index 00000000..2d554ba6 --- /dev/null +++ b/tests/test_integration.py @@ -0,0 +1,62 @@ +import os +import json +import pytest +import uuid +from eks_automation.app import lambda_handler + +# Test environment variables +os.environ["GITHUB_TOKEN_SECRET_NAME"] = "github-token" # Uses AWS Secrets Manager +os.environ["GITHUB_API"] = "https://api.github.com" +os.environ["GITHUB_ORG_NAME"] = "your-org-name" # Replace with test org +os.environ["TEMPLATE_REPO_NAME"] = "template-eks-cluster" +os.environ["TEMPLATE_SOURCE_VERSION"] = "main" # Or specific tag/SHA for testing + +@pytest.fixture +def test_event(): + """Create test event with unique repository name""" + repo_name = f"test-eks-cluster-{uuid.uuid4().hex[:8]}" + return { + "body": { + "project_name": repo_name, + "eks_settings": { + "cluster_name": "test-cluster", + "kubernetes_version": "1.27", + "region": "us-west-2", + "vpc_config": { + "vpc_id": "vpc-test123", + "subnet_ids": ["subnet-test1", "subnet-test2"] + }, + "nodegroups": [{ + "name": "test-ng", + "instance_types": ["t3.medium"], + "desired_size": 2, + "min_size": 1, + "max_size": 3 + }] + } + } + } + +@pytest.fixture +def lambda_context(): + """Mock Lambda context object""" + class MockContext: + def __init__(self): + self.aws_request_id = "test-request-id" + def get_remaining_time_in_millis(self): + return 30000 + return MockContext() + +def test_lambda_handler_creates_repository(test_event, lambda_context): + """Test that Lambda handler creates repository with correct settings""" + # Execute Lambda handler + response = lambda_handler(test_event, lambda_context) + + assert response["statusCode"] == 200 + assert "Success" in response["body"] + + # Additional assertions could verify: + # - Repository was created in GitHub + # - Config file contains correct settings + # - Topics were set correctly + # But these require GitHub API access diff --git a/varfiles/default.json b/varfiles/default.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/varfiles/default.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/varfiles/default.tfvars b/varfiles/default.tfvars new file mode 100644 index 00000000..08e90254 --- /dev/null +++ b/varfiles/default.tfvars @@ -0,0 +1,16 @@ +aws_region = "us-east-1" +repository_name = "template-automation-lambda" + +catalog_data = { + about_text = "Template Automation Lambda Image" + architectures = ["x86_64"] + description = "Lambda container image for template automation" + operating_systems = ["AmazonLinux2"] + usage_text = "Creates a Template Automation Lambda container image" +} + +tags = { + env = "production" + managed_by = "terraform" + project = "template-automation" +} diff --git a/varfiles/packer.pkrvars.hcl b/varfiles/packer.pkrvars.hcl new file mode 100644 index 00000000..e69de29b diff --git a/variables.tf b/variables.tf new file mode 100644 index 00000000..53651e9e --- /dev/null +++ b/variables.tf @@ -0,0 +1,51 @@ + +# Note: GitHub-specific variables (github_api, github_org_name, template_repo_name, etc.) +# have been moved to the terraform-aws-template-automation module. +# They are now configured as SSM parameters in that module. +# +# This file contains only variables related to the container image and +# ECR repository setup. + +variable "aws_region" { + description = "AWS region where resources will be created" + type = string + default = "us-east-1" +} + +variable "repository_name" { + description = "Name of the ECR public repository" + type = string + default = "template-automation-lambda" +} + +variable "environment" { + description = "Environment tag value" + type = string + default = "production" +} + +variable "catalog_data" { + description = "Configuration for the ECR repository catalog data" + type = object({ + about_text = string + architectures = list(string) + description = string + operating_systems = list(string) + usage_text = string + }) + default = { + about_text = "Template Automation Lambda Image" + architectures = ["x86_64"] + description = "Lambda container image for template automation" + operating_systems = ["AmazonLinux2"] + usage_text = "Creates a Template Automation Lambda container image" + } +} + +variable "tags" { + description = "Tags to apply to all resources" + type = map(string) + default = { + env = "production" + } +} \ No newline at end of file