Skip to content

Commit

Permalink
initial
Browse files Browse the repository at this point in the history
  • Loading branch information
badra001 committed Mar 10, 2026
1 parent 2e82a98 commit 2572d10
Show file tree
Hide file tree
Showing 2 changed files with 148 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
#!/usr/bin/env python
import json, argparse, sys, os, glob
from collections import Counter, defaultdict

__version__ = "1.0.0"

def find_latest_file(pattern):
files = glob.glob(pattern)
return max(files, key=os.path.getctime) if files else None

def main():
parser = argparse.ArgumentParser(description="PowerSchedule & FinOps Assessor")
parser.add_argument("--input", help="JSON audit file")
args = parser.parse_args()

input_file = args.input or find_latest_file("audit_results.check_scheduling.*.json")
if not input_file: print("Error: No file found."); sys.exit(1)

with open(input_file, 'r') as f: data = json.load(f)

# env -> schedule_value -> count
matrix = defaultdict(Counter)
total_resources = 0
env_totals = Counter()

for account in data:
checks = account.get("data", {})
for key, val in checks.items():
if key == "account_summary" or ":" not in key: continue

total_resources += 1
tags = val.get("tags", {})

# Normalize Environment key
env = tags.get('Environment') or tags.get('environment') or "Undefined"
schedule = tags.get('PowerSchedule', "No Schedule")

matrix[env][schedule] += 1
env_totals[env] += 1

report_width = 120
print("-" * report_width)
print(f"POWERSCHEDULE COMPLIANCE ASSESSMENT | Input: {os.path.basename(input_file)}")
print("-" * report_width)

# Sort environments for consistent output
for env in sorted(matrix.keys()):
print(f"\nEnvironment: {env}")
print(f" {'Schedule Value':<30} | {'Count':<10} | {'Percentage'}")
print(f" {'-'*30} | {'-'*10} | {'-'*10}")

for sched, count in matrix[env].items():
pct = (count / env_totals[env]) * 100
print(f" {sched:<30} | {count:<10} | {pct:.1f}%")

print("\n" + "=" * report_width)
print(f"ORGANIZATION SUMMARY")
print(f" Total Resources Scanned: {total_resources}")
print(f" Environments Found: {len(matrix)}")
print("=" * report_width)

if __name__ == "__main__": main()
86 changes: 86 additions & 0 deletions local-app/python-tools/cross-organization/check_scheduling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import boto3
from datetime import datetime

# --- VERSIONING ---
__version__ = "1.0.0"

def get_tags(tag_list):
"""Helper to filter specific keys and FinOps tags."""
if not tag_list: return {}
found = {}
target_keys = ['Name', 'Environment', 'environment', 'PowerSchedule']
for t in tag_list:
k, v = t['Key'], t['Value']
if k in target_keys or k.lower().startswith('finops_'):
found[k] = v
return found

def account_task(account_session, account_id, account_name, region):
results = {"alias": "N/A", "data": {}}
try:
ec2_global = account_session.client('ec2', region_name=region)
regions = [r['RegionName'] for r in ec2_global.describe_regions()['Regions']]

for reg in regions:
ec2 = account_session.client('ec2', region_name=reg)
asg = account_session.client('autoscaling', region_name=reg)
rds = account_session.client('rds', region_name=reg)

# 1. Map ASGs to Instances
asg_map = {}
try:
asg_paginator = asg.get_paginator('describe_auto_scaling_groups')
for page in asg_paginator.paginate():
for group in page['AutoScalingGroups']:
for inst in group['Instances']:
asg_map[inst['InstanceId']] = group['AutoScalingGroupName']
except: pass

# 2. Process EC2 Instances
try:
ec2_paginator = ec2.get_paginator('describe_instances')
for page in ec2_paginator.paginate():
for res in page['Reservations']:
for inst in res['Instances']:
i_id = inst['InstanceId']
tags = get_tags(inst.get('Tags', []))

# Determine Type
is_eks = any(k.startswith('kubernetes.io/cluster/') for k in tags.keys())
asg_name = asg_map.get(i_id)

i_type = "plain"
if is_eks: i_type = "eks_node"
elif asg_name: i_type = "asg_member"

results["data"][f"{reg}:{i_id}"] = {
"resource": f"arn:aws:ec2:{reg}:{account_id}:instance/{i_id}",
"type": i_type,
"asg_name": asg_name or "N/A",
"region": reg,
"tags": tags
}
except: pass

# 3. Process RDS
try:
rds_paginator = rds.get_paginator('describe_db_instances')
for page in rds_paginator.paginate():
for db in page['DBInstances']:
db_id = db['DBInstanceIdentifier']
# RDS Tags require a separate call per resource
t_resp = rds.list_tags_for_resource(ResourceName=db['DBInstanceArn'])
tags = get_tags(t_resp.get('TagList', []))

results["data"][f"{reg}:{db_id}"] = {
"resource": db['DBInstanceArn'],
"type": "rds",
"region": reg,
"tags": tags
}
except: pass

results["data"]["account_summary"] = {"_summary": f"RESOURCES:{len(results['data'])}"}
except Exception as e:
results["error"] = str(e)
return results

0 comments on commit 2572d10

Please sign in to comment.