Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(compliance): New AWS Performance Pillar Framework #3170

Open
wants to merge 10 commits into
base: v3
Choose a base branch
from
Open
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"Framework": "AWS-Well-Architected-Framework-Performance-Efficiency-Pillar",
"Version": "",
"Provider": "AWS",
"Description": "Best Practices for the AWS Well-Architected Framework Performance Efficiency Pillar encompass the ability of a workload to use computing resources efficiently and meet requirements. Performance efficiency focuses on the efficient use of cloud resources to meet requirements and avoid unnecessary costs.",
"Requirements": [
{
"Id": "PERF03-BP02",
"Description": "Optimize the use of computing resources by selecting the right instance types and sizes for your workload. Regularly review and update your choices based on the workload's evolving requirements.",
"Attributes": [
{
"Name": "PERF03-BP02 Optimize instance types and sizes",
"WellArchitectedQuestionId": "selecting-right-instance-types",
"WellArchitectedPracticeId": "perf_selecting_right_instance_types",
"Section": "Resource optimization",
"SubSection": "Selecting the right instance types",
"LevelOfRisk": "Medium",
"AssessmentMethod": "Manual",
"Description": "Optimize the use of computing resources by selecting the right instance types and sizes for your workload. Regularly review and update your choices based on the workload's evolving requirements.",
"ImplementationGuidanceUrl": "https://docs.aws.amazon.com/wellarchitected/latest/performance-pillar/perf_selecting_right_instance_types.html#implementation-guidance"
}
],
"Checks": [
"ec2_instance_type_optimized",
"autoscaling_group_scaling_enabled",
"awslambda_function_serverless_architecture"
]
}
]
}
13 changes: 4 additions & 9 deletions prowler/lib/banner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,10 @@


def print_banner(args):
banner = f"""{banner_color} _
_ __ _ __ _____ _| | ___ _ __
| '_ \| '__/ _ \ \ /\ / / |/ _ \ '__|
| |_) | | | (_) \ V V /| | __/ |
| .__/|_| \___/ \_/\_/ |_|\___|_|v{prowler_version}
|_|{Fore.BLUE} the handy cloud security tool

{Fore.YELLOW}Date: {timestamp.strftime("%Y-%m-%d %H:%M:%S")}{Style.RESET_ALL}
"""
banner = f"""{banner_color}
The handy cloud security tool
{Fore.YELLOW}Date: {timestamp.strftime("%Y-%m-%d %H:%M:%S")}{Style.RESET_ALL}
"""
print(banner)

if args.verbose or args.quiet:
Expand Down
51 changes: 40 additions & 11 deletions prowler/lib/outputs/file_descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,28 +9,32 @@
json_file_suffix,
json_ocsf_file_suffix,
)

from prowler.lib.logger import logger
from prowler.lib.outputs.html import add_html_header
from prowler.lib.outputs.models import (
Aws_Check_Output_CSV,
Azure_Check_Output_CSV,
Check_Output_CSV_AWS_CIS,
Check_Output_CSV_AWS_ISO27001_2013,
Check_Output_CSV_AWS_Well_Architected,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_GCP_CIS,
Check_Output_CSV_Generic_Compliance,
Check_Output_MITRE_ATTACK,
Gcp_Check_Output_CSV,
generate_csv_fields,
)
from prowler.lib.utils.utils import file_exists, open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.common.outputs import get_provider_output_model
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info


def initialize_file_descriptor(
filename: str,
output_mode: str,
audit_info: Any,
audit_info: AWS_Audit_Info,
format: Any = None,
) -> TextIOWrapper:
"""Open/Create the output file. If needed include headers or the required format"""
Expand Down Expand Up @@ -72,15 +76,27 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
for output_mode in output_modes:
if output_mode == "csv":
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
output_model = get_provider_output_model(
audit_info.__class__.__name__
)
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
output_model,
)
if isinstance(audit_info, AWS_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Aws_Check_Output_CSV,
)
if isinstance(audit_info, Azure_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Azure_Check_Output_CSV,
)
if isinstance(audit_info, GCP_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Gcp_Check_Output_CSV,
)
file_descriptors.update({output_mode: file_descriptor})

elif output_mode == "json":
Expand Down Expand Up @@ -172,6 +188,19 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})

elif (
output_mode
== "aws_well_architected_framework_performance_pillar_aws"
):
filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_performance_pillar_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Check_Output_CSV_AWS_Well_Architected,
)
file_descriptors.update({output_mode: file_descriptor})

elif output_mode == "iso27001_2013_aws":
filename = f"{output_directory}/{output_filename}_iso27001_2013_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"Provider": "aws",
"CheckID": "autoscaling_group_scaling_enabled",
"CheckTitle": "Ensure Auto Scaling group has scaling enabled",
"CheckType": ["Service"],
"ServiceName": "autoscaling",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Ensure Auto Scaling group has scaling enabled.",
"Risk": "If Auto Scaling group does not have scaling enabled, it may not effectively respond to changes in demand, leading to suboptimal resource utilization.",
"RelatedUrl": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scale-based-on-demand.html",
"Remediation": {
"Code": {
"CLI": "aws autoscaling update-auto-scaling-group --auto-scaling-group-name <group_name> --min-size <min_size> --max-size <max_size> --desired-capacity <desired_capacity>",
"NativeIaC": "",
"Other": "",
"Terraform": "resource \"aws_autoscaling_group\" \"example\" {\n desired_capacity = <desired_capacity>\n min_size = <min_size>\n max_size = <max_size>\n}"
},
"Recommendation": {
"Text": "We recommend enabling scaling for Auto Scaling groups to effectively respond to changes in demand.",
"Url": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-scale-based-on-demand.html"
}
},
"Categories": ["autoscaling"],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.autoscaling.autoscaling_client import autoscaling_client

class autoscaling_group_scaling_enabled(Check):
def execute(self):
findings = []

# Fetch the audit configuration value from prowler config.yaml
max_autoscaling_group_size = autoscaling_client.audit_config.get(
"max_autoscaling_group_size", 10
)

for autoscaling_group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = autoscaling_group.region
report.resource_id = autoscaling_group.name
report.resource_arn = autoscaling_group.arn
report.resource_tags = autoscaling_group.tags

report.status = "PASS"
report.status_extended = f"Auto Scaling group {autoscaling_group.name} has scaling enabled."

# Check if scaling is enabled
if not autoscaling_group.scaling_enabled:
report.status = "FAIL"
report.status_extended = f"Auto Scaling group {autoscaling_group.name} does not have scaling enabled."

findings.append(report)

return findings
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{
"Provider": "aws",
"CheckID": "awslambda_function_serverless_architecture",
"CheckTitle": "Ensure AWS Lambda functions use serverless architecture",
"CheckType": ["Service"],
"ServiceName": "awslambda",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Ensure AWS Lambda functions use serverless architecture.",
"Risk": "Using non-serverless architecture for Lambda functions may require infrastructure provisioning and maintenance, reducing the efficiency of resource usage.",
"RelatedUrl": "https://docs.aws.amazon.com/lambda/latest/dg/serverless_app_arch.html",
"Remediation": {
"Code": {
"CLI": "N/A",
"NativeIaC": "",
"Other": "",
"Terraform": "N/A"
},
"Recommendation": {
"Text": "We recommend leveraging serverless architecture for AWS Lambda functions to eliminate the need for infrastructure provisioning and maintenance.",
"Url": "https://docs.aws.amazon.com/lambda/latest/dg/serverless_app_arch.html"
}
},
"Categories": ["compute"],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client # Update with the correct import path

class awslambda_function_serverless_architecture(Check):
"""awslambda_function_serverless_architecture verifies if an AWS Lambda function uses a serverless architecture"""

def execute(self):
findings = []

for lambda_function in awslambda_client.functions:
report = Check_Report_AWS(self.metadata())

report.region = lambda_function.region
report.resource_id = lambda_function.name
report.resource_arn = lambda_function.arn
report.resource_tags = lambda_function.tags

report.status = "PASS"
report.status_extended = f"AWS Lambda function {lambda_function.name} is not using a serverless architecture."

# Replace the condition with the actual logic to check if the Lambda function uses a serverless architecture
if not lambda_function.serverless_architecture:
report.status = "FAIL"
report.status_extended = f"AWS Lambda function {lambda_function.name} is using a serverless architecture."

findings.append(report)

return findings
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.ec2.ec2_client import ec2_client

class ec2_instance_type_optimized(Check):
def execute(self):
findings = []

for instance in ec2_client.instances:
report = Check_Report_AWS(self.metadata())
report.region = instance.region
report.resource_id = instance.id
report.resource_arn = instance.arn
report.resource_tags = instance.tags

report.status = "PASS"
report.status_extended = f"EC2 instance {instance.id} is using an optimized instance type."

# Check if instance type is optimized
if not instance.is_instance_type_optimized:
report.status = "FAIL"
report.status_extended = f"EC2 instance {instance.id} is not using an optimized instance type."

findings.append(report)

return findings