Aller au contenu

Prowler aide-mémoire

Overview

Prowler is an Open Source security tool to perform AWS, Azure, and GCP security best practices assessments, audits, réponse aux incidents, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOX, AWS FTR, ENS and custom security frameworks. Prowler is designed to be run in a CI/CD pipeline or as a standalone tool for security assessments.

⚠️ Warning: Only use Prowler against cloud environments you own or have explicit permission to audit. Unauthorized Sécurité Cloud scanning may violate terms of service or local laws.

Installation

Python Package Installation

# Install via pip (recommended)
pip3 install prowler

# Install with all cloud providers
pip3 install prowler[aws,azure,gcp]

# Install specific cloud provider
pip3 install prowler[aws]
pip3 install prowler[azure]
pip3 install prowler[gcp]

# Verify Installation
prowler --version

Docker Installation

# Pull Prowler Docker image
docker pull toniblyx/prowler:latest

# Run Prowler in Docker for AWS
docker run -it --rm \
    -v ~/.aws:/root/.aws \
    -v $(pwd):/prowler/output \
    toniblyx/prowler:latest aws

# Create alias for easier utilisation
echo 'alias prowler="docker run -it --rm -v ~/.aws:/root/.aws -v $(pwd):/prowler/output toniblyx/prowler:latest"' >> ~/.bashrc
source ~/.bashrc

Manual Installation

# Clone repository
git clone https://github.com/prowler-cloud/prowler.git
cd prowler

# Install dependencies
pip3 install -r requirements.txt

# Install Prowler
pip3 install .

# Or run directly
python3 prowler.py --help

Homebrew Installation (macOS)

# Install via Homebrew
brew install prowler

# Verify Installation
prowler --version

AWS configuration

AWS identifiants Setup

# Install AWS CLI
pip3 install awscli

# Configure AWS identifiants
aws configure
# Enter Access clé ID, Secret Access clé, Region, Output format

# Use environment variables
export AWS_ACCESS_clé_ID="your_access_clé"
export AWS_SECRET_ACCESS_clé="your_secret_clé"
export AWS_DEFAULT_REGION="us-east-1"

# Use AWS profiles
aws configure --profile production
aws configure --profile development

# Use IAM roles (recommended)
aws sts assume-role --role-arn arn:aws:iam::123456789012:role/ProwlerRole --role-session-name prowler-session

IAM Permissions for Prowler

\\\\{
    "Version": "2012-10-17",
    "Statement": [
        \\\\{
            "Effect": "Allow",
            "Action": [
                "access-analyzer:List*",
                "account:Get*",
                "acm:Describe*",
                "acm:List*",
                "apigateway:GET",
                "application-autoscaling:Describe*",
                "appstream:Describe*",
                "appstream:List*",
                "autoscaling:Describe*",
                "backup:List*",
                "cloudformation:Describe*",
                "cloudformation:Get*",
                "cloudformation:List*",
                "cloudfront:Get*",
                "cloudfront:List*",
                "cloudtrail:Describe*",
                "cloudtrail:Get*",
                "cloudtrail:List*",
                "cloudwatch:Describe*",
                "cloudwatch:Get*",
                "cloudwatch:List*",
                "codebuild:List*",
                "config:Describe*",
                "config:Get*",
                "config:List*",
                "dax:Describe*",
                "dax:List*",
                "directconnect:Describe*",
                "dms:Describe*",
                "dms:List*",
                "ds:Describe*",
                "ds:Get*",
                "ds:List*",
                "dynamodb:Describe*",
                "dynamodb:List*",
                "ec2:Describe*",
                "ec2:Get*",
                "ecr:Describe*",
                "ecr:Get*",
                "ecr:List*",
                "ecs:Describe*",
                "ecs:List*",
                "efs:Describe*",
                "eks:Describe*",
                "eks:List*",
                "elasticache:Describe*",
                "elasticbeanstalk:Describe*",
                "elasticfilesystem:Describe*",
                "elasticloadbalancing:Describe*",
                "elasticmapreduce:Describe*",
                "elasticmapreduce:List*",
                "es:Describe*",
                "es:List*",
                "events:Describe*",
                "events:List*",
                "firehose:Describe*",
                "firehose:List*",
                "fsx:Describe*",
                "fsx:List*",
                "glue:Get*",
                "glue:List*",
                "guardduty:Get*",
                "guardduty:List*",
                "iam:Generate*",
                "iam:Get*",
                "iam:List*",
                "iam:Simulate*",
                "inspector:Describe*",
                "inspector:Get*",
                "inspector:List*",
                "kinesis:Describe*",
                "kinesis:List*",
                "kms:Describe*",
                "kms:Get*",
                "kms:List*",
                "lambda:Get*",
                "lambda:List*",
                "logs:Describe*",
                "logs:Get*",
                "logs:List*",
                "macie2:Get*",
                "macie2:List*",
                "organizations:Describe*",
                "organizations:List*",
                "rds:Describe*",
                "rds:List*",
                "redshift:Describe*",
                "route53:Get*",
                "route53:List*",
                "route53domains:Get*",
                "route53domains:List*",
                "s3:Get*",
                "s3:List*",
                "sagemaker:Describe*",
                "sagemaker:List*",
                "secretsmanager:Describe*",
                "secretsmanager:Get*",
                "secretsmanager:List*",
                "securityhub:Describe*",
                "securityhub:Get*",
                "securityhub:List*",
                "ses:Get*",
                "ses:List*",
                "shield:Describe*",
                "shield:Get*",
                "shield:List*",
                "sns:Get*",
                "sns:List*",
                "sqs:Get*",
                "sqs:List*",
                "ssm:Describe*",
                "ssm:Get*",
                "ssm:List*",
                "sts:Get*",
                "support:Describe*",
                "trustedadvisor:Describe*",
                "waf:Get*",
                "waf:List*",
                "wafv2:Get*",
                "wafv2:List*",
                "workspaces:Describe*"
            ],
            "Resource": "*"
        \\\\}
    ]
\\\\}

Basic AWS Scanning

# Basic AWS scan
prowler aws

# Scan with specific profile
prowler aws --profile production

# Scan specific regions
prowler aws --region us-east-1,us-west-2

# Scan all regions
prowler aws --region all

# Exclude specific regions
prowler aws --excluded-regions us-gov-east-1,us-gov-west-1

# Scan specific services
prowler aws --services s3,iam,ec2

# Exclude specific services
prowler aws --excluded-services cloudformation,organizations

Azure configuration

Azure identifiants Setup

# Install Azure CLI
curl -sL https://aka.ms/InstallAzureCLIDeb|sudo bash

# Login to Azure
az login

# List subscriptions
az account list --output table

# Set default subscription
az account set --subscription "subscription-id"

# Create service principal for Prowler
az ad sp create-for-rbac --name "Prowler" --role "Reader" --scopes "/subscriptions/subscription-id"

# Use service principal
export AZURE_CLIENT_ID="client-id"
export AZURE_CLIENT_SECRET="client-secret"
export AZURE_TENANT_ID="tenant-id"
export AZURE_SUBSCRIPTION_ID="subscription-id"

Basic Azure Scanning

# Basic Azure scan
prowler azure

# Scan with service principal
prowler azure --sp-env-auth

# Scan specific subscription
prowler azure --subscription-id subscription-id

# Scan all subscriptions
prowler azure --subscription-id all

# Scan specific services
prowler azure --services clévault,storage,compute

# Exclude specific services
prowler azure --excluded-services monitor,network

Google Cloud Platform configuration

GCP identifiants Setup

# Install Google Cloud SDK
curl https://sdk.cloud.google.com|bash
exec -l $SHELL

# Initialize gcloud
gcloud init

# Authenticate
gcloud auth login

# Set default project
gcloud config set project PROJECT_ID

# Create service account for Prowler
gcloud iam service-accounts create prowler \
    --display-name="Prowler service Account"

# Grant necessary roles
gcloud projects add-iam-policy-binding PROJECT_ID \
    --member="serviceAccount:prowler@PROJECT_ID.iam.gserviceaccount.com" \
    --role="roles/viewer"

gcloud projects add-iam-policy-binding PROJECT_ID \
    --member="serviceAccount:prowler@PROJECT_ID.iam.gserviceaccount.com" \
    --role="roles/security.securityReviewer"

# Create and download clé
gcloud iam service-accounts clés create prowler-clé.json \
    --iam-account=prowler@PROJECT_ID.iam.gserviceaccount.com

# Set environment variable
export GOOGLE_APPLICATION_identifiants="prowler-clé.json"

Basic GCP Scanning

# Basic GCP scan
prowler gcp

# Scan with service account clé
prowler gcp --identifiants-file prowler-clé.json

# Scan specific project
prowler gcp --project-id PROJECT_ID

# Scan all projects
prowler gcp --project-id all

# Scan specific services
prowler gcp --services compute,storage,iam

# Exclude specific services
prowler gcp --excluded-services logging,monitoring

Advanced Scanning options

Compliance Frameworks

# CIS Benchmark
prowler aws --compliance cis_1.5_aws

# PCI-DSS
prowler aws --compliance pci_3.2.1_aws

# ISO 27001
prowler aws --compliance iso27001_2013_aws

# GDPR
prowler aws --compliance gdpr_aws

# HIPAA
prowler aws --compliance hipaa_aws

# SOX
prowler aws --compliance soc2_aws

# Multiple compliance frameworks
prowler aws --compliance cis_1.5_aws,pci_3.2.1_aws

# List available compliance frameworks
prowler aws --list-compliance

Custom Checks and Filters

# Run specific checks
prowler aws --check s3_bucket_public_access_block,iam_root_access_clé_check

# Exclude specific checks
prowler aws --excluded-checks cloudtrail_chiffrement_enabled

# Run checks by severity
prowler aws --severity critical,high

# Run checks by category
prowler aws --categories secrets,chiffrement

# Custom check file
prowler aws --checks-file custom_checks.txt

# List all available checks
prowler aws --list-checks

Output and Reporting

# Specify output directory
prowler aws --output-directory /tmp/prowler-results

# Custom output formats
prowler aws --output-formats json,csv,html

# Specific output filename
prowler aws --output-filename aws-security-audit

# Include compliance mapping
prowler aws --compliance cis_1.5_aws --output-formats json,html

# Quiet mode
prowler aws --quiet

# Verbose mode
prowler aws --verbose

# No banner
prowler aws --no-banner

Automation Scripts

Multi-Account AWS Security Assessment

#!/bin/bash
# Comprehensive multi-account AWS security assessment with Prowler

ACCOUNTS_FILE="aws_accounts.txt"
OUTPUT_BASE_DIR="prowler_assessments_$(date +%Y%m%d_%H%M%S)"
COMPLIANCE_FRAMEWORKS="cis_1.5_aws,pci_3.2.1_aws,iso27001_2013_aws"
PARALLEL_JOBS=3

# Create accounts file if it doesn't exist
if [ ! -f "$ACCOUNTS_FILE" ]; then
    cat > "$ACCOUNTS_FILE" << 'EOF'
# AWS Accounts configuration
| # Format: PROFILE_NAME | ACCOUNT_ID | ENVIRONMENT | Description |
| production | 123456789012 | prod | Production Environment |
| staging | 123456789013 | staging | Staging Environment |
| development | 123456789014 | dev | Development Environment |
| security | 123456789015 | security | Security Tools Account |
EOF
    echo "Created $ACCOUNTS_FILE - please configure with your AWS accounts"
    exit 1
fi

mkdir -p "$OUTPUT_BASE_DIR"

# Function to assess single account
assess_account() \\\\{
    local profile="$1"
    local account_id="$2"
    local environment="$3"
    local Description="$4"
    local output_dir="$OUTPUT_BASE_DIR/$profile"

    echo "[+] Assessing account: $profile ($account_id) - $environment"

    # Create account-specific output directory
    mkdir -p "$output_dir"

    # Run Prowler assessment
    prowler aws \
        --profile "$profile" \
        --compliance "$COMPLIANCE_FRAMEWORKS" \
        --output-directory "$output_dir" \
        --output-filename "$profile-assessment" \
        --output-formats json,csv,html \
        --severity critical,high,medium \
        --quiet \
        2>&1|tee "$output_dir/prowler.log"

    local exit_code=$?

    if [ $exit_code -eq 0 ]; then
        echo "  ✓ Assessment completed: $profile"

        # Generate account summary
        generate_account_summary "$profile" "$account_id" "$environment" "$output_dir"

        return 0
    else
        echo "  ✗ Assessment failed: $profile (exit code: $exit_code)"
        return 1
    fi
\\\\}

# Function to generate account summary
generate_account_summary() \\\\{
    local profile="$1"
    local account_id="$2"
    local environment="$3"
    local output_dir="$4"

    echo "[+] Generating summary for $profile"

    local json_file="$output_dir/$\\\\{profile\\\\}-assessment.json"
    local summary_file="$output_dir/account_summary.txt"

    if [ -f "$json_file" ]; then
        python3 << EOF
import json
import sys
from collections import defaultdict

try:
    with open('$json_file', 'r') as f:
        data = json.load(f)

    # Count findings by status and severity
    status_counts = defaultdict(int)
    severity_counts = defaultdict(int)
    service_counts = defaultdict(int)
    compliance_counts = defaultdict(lambda: defaultdict(int))

    for finding in data.get('findings', []):
        status = finding.get('status', 'unknown')
        severity = finding.get('severity', 'unknown')
        service = finding.get('service_name', 'unknown')

        status_counts[status] += 1
        severity_counts[severity] += 1
        service_counts[service] += 1

        # Count compliance framework results
        for compliance in finding.get('compliance', \\\\{\\\\}):
            for framework, requirements in compliance.items():
                for requirement in requirements:
                    compliance_counts[framework][status] += 1

    # Generate summary
    summary = f"""
Account Security Assessment Summary
==================================
Profile: $profile
Account ID: $account_id
Environment: $environment
Assessment Date: $(date)

Finding Status Summary:
"""

    for status, count in sorted(status_counts.items()):
        summary += f"  \\\\{status.upper()\\\\}: \\\\{count\\\\}\\n"

    summary += "\\nSeverity Breakdown:\\n"
    for severity, count in sorted(severity_counts.items()):
        summary += f"  \\\\{severity.upper()\\\\}: \\\\{count\\\\}\\n"

    summary += "\\nTop 10 services by Findings:\\n"
    sorted_services = sorted(service_counts.items(), clé=lambda x: x[1], reverse=True)[:10]
    for service, count in sorted_services:
        summary += f"  \\\\{service\\\\}: \\\\{count\\\\}\\n"

    summary += "\\nCompliance Framework Results:\\n"
    for framework, results in compliance_counts.items():
        summary += f"  \\\\{framework.upper()\\\\}:\\n"
        for status, count in sorted(results.items()):
            summary += f"    \\\\{status.upper()\\\\}: \\\\{count\\\\}\\n"

    with open('$summary_file', 'w') as f:
        f.write(summary)

    print(f"Summary generated: $summary_file")

except Exception as e:
    print(f"Error generating summary: \\\\{e\\\\}")
    sys.exit(1)
EOF
    else
        echo "  ⚠ JSON file not found: $json_file"
    fi
\\\\}

# Function to generate consolidated report
generate_consolidated_report() \\\\{
    echo "[+] Generating consolidated assessment report"

    local consolidated_dir="$OUTPUT_BASE_DIR/consolidated"
    local report_file="$consolidated_dir/multi_account_security_report.html"

    mkdir -p "$consolidated_dir"

    cat > "$report_file" << 'EOF'
<!DOCTYPE html>
<html>
<head>
    <title>Multi-Account AWS Security Assessment</title>
    <style>
        body \\\\{ font-family: Arial, sans-serif; margin: 20px; \\\\}
        .header \\\\{ background-color: #f0f0f0; padding: 20px; border-radius: 5px; margin-bottom: 20px; \\\\}
        .account \\\\{ margin: 20px 0; padding: 15px; border: 1px solid #ddd; border-radius: 5px; \\\\}
        .critical \\\\{ border-color: #f44336; background-color: #ffebee; \\\\}
        .high \\\\{ border-color: #ff9800; background-color: #fff3e0; \\\\}
        .medium \\\\{ border-color: #2196f3; background-color: #e3f2fd; \\\\}
        .low \\\\{ border-color: #4caf50; background-color: #e8f5e8; \\\\}
        table \\\\{ border-collapse: collapse; width: 100%; margin: 10px 0; \\\\}
        th, td \\\\{ border: 1px solid #ddd; padding: 8px; text-align: left; \\\\}
        th \\\\{ background-color: #f2f2f2; \\\\}
        .fail \\\\{ color: #d32f2f; font-weight: bold; \\\\}
        .pass \\\\{ color: #388e3c; font-weight: bold; \\\\}
        .manual \\\\{ color: #f57c00; font-weight: bold; \\\\}
        .chart \\\\{ margin: 20px 0; \\\\}
    </style>
    <script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
</head>
<body>
    <div class="header">
        <h1>Multi-Account AWS Security Assessment Report</h1>
        <p>Generated: $(date)</p>
        <p>Compliance Frameworks: $COMPLIANCE_FRAMEWORKS</p>
    </div>
EOF

    # processus each account
    local total_critical=0
    local total_high=0
    local total_medium=0
    local total_low=0

    while IFS='|' read -r profile account_id environment Description; do
        # Skip comments and empty lines
| [[ "$profile" =~ ^#.*$ |  | -z "$profile" ]] && continue |

        local account_dir="$OUTPUT_BASE_DIR/$profile"
        local json_file="$account_dir/$\\\\{profile\\\\}-assessment.json"
        local summary_file="$account_dir/account_summary.txt"

        echo "    <div class=\"account\">" >> "$report_file"
        echo "        <h2>$profile - $Description</h2>" >> "$report_file"
        echo "        <p><strong>Account ID:</strong> $account_id</p>" >> "$report_file"
        echo "        <p><strong>Environment:</strong> $environment</p>" >> "$report_file"

        if [ -f "$json_file" ]; then
            # Extract summary statistics
| local critical=$(jq '[.findings[] | select(.severity == "critical" and .status == "FAIL")] | length' "$json_file" 2>/dev/null |  | echo "0") |
| local high=$(jq '[.findings[] | select(.severity == "high" and .status == "FAIL")] | length' "$json_file" 2>/dev/null |  | echo "0") |
| local medium=$(jq '[.findings[] | select(.severity == "medium" and .status == "FAIL")] | length' "$json_file" 2>/dev/null |  | echo "0") |
| local low=$(jq '[.findings[] | select(.severity == "low" and .status == "FAIL")] | length' "$json_file" 2>/dev/null |  | echo "0") |

            total_critical=$((total_critical + critical))
            total_high=$((total_high + high))
            total_medium=$((total_medium + medium))
            total_low=$((total_low + low))

            echo "        <table>" >> "$report_file"
            echo "            <tr><th>Severity</th><th>Failed Checks</th></tr>" >> "$report_file"
            echo "            <tr><td class=\"fail\">Critical</td><td>$critical</td></tr>" >> "$report_file"
            echo "            <tr><td class=\"fail\">High</td><td>$high</td></tr>" >> "$report_file"
            echo "            <tr><td class=\"manual\">Medium</td><td>$medium</td></tr>" >> "$report_file"
            echo "            <tr><td class=\"pass\">Low</td><td>$low</td></tr>" >> "$report_file"
            echo "        </table>" >> "$report_file"

            # Link to detailed reports
            echo "        <p>" >> "$report_file"
            echo "            <a href=\"../$profile/$\\\\{profile\\\\}-assessment.html\" cible=\"_blank\">View Detailed HTML Report</a>|" >> "$report_file"
            echo "            <a href=\"../$profile/$\\\\{profile\\\\}-assessment.json\" cible=\"_blank\">Download JSON Report</a>|" >> "$report_file"
            echo "            <a href=\"../$profile/$\\\\{profile\\\\}-assessment.csv\" cible=\"_blank\">Download CSV Report</a>" >> "$report_file"
            echo "        </p>" >> "$report_file"
        else
            echo "        <p style=\"color: red;\">❌ Assessment failed or report not found</p>" >> "$report_file"
        fi

        echo "    </div>" >> "$report_file"

    done < "$ACCOUNTS_FILE"

    # Add summary statistics
    cat >> "$report_file" << EOF
    <div class="account">
        <h2>Overall Summary</h2>
        <div class="chart">
            <canvas id="severityChart" width="400" height="200"></canvas>
        </div>
        <table>
            <tr><th>Total Critical</th><td>$total_critical</td></tr>
            <tr><th>Total High</th><td>$total_high</td></tr>
            <tr><th>Total Medium</th><td>$total_medium</td></tr>
            <tr><th>Total Low</th><td>$total_low</td></tr>
        </table>
    </div>

    <script>
        const ctx = document.getElementById('severityChart').getContext('2d');
        const chart = new Chart(ctx, \\\\{
            type: 'doughnut',
            data: \\\\{
                labels: ['Critical', 'High', 'Medium', 'Low'],
                datasets: [\\\\{
                    data: [$total_critical, $total_high, $total_medium, $total_low],
                    backgroundColor: ['#f44336', '#ff9800', '#2196f3', '#4caf50']
                \\\\}]
            \\\\},
            options: \\\\{
                responsive: true,
                plugins: \\\\{
                    title: \\\\{
                        display: true,
                        text: 'Security Findings by Severity'
                    \\\\}
                \\\\}
            \\\\}
        \\\\});
    </script>
</body>
</html>
EOF

    echo "[+] Consolidated report generated: $report_file"
\\\\}

# Function to generate executive summary
generate_executive_summary() \\\\{
    echo "[+] Generating executive summary"

    local exec_summary="$OUTPUT_BASE_DIR/executive_summary.md"

    cat > "$exec_summary" ``<< EOF
# AWS Multi-Account Security Assessment - Executive Summary

**Assessment Date:** $(date +"%B %d, %Y")
**Compliance Frameworks:** $COMPLIANCE_FRAMEWORKS
**Accounts Assessed:** $(grep -v '^#' "$ACCOUNTS_FILE"|grep -v '^

### Continuous Security Monitoring
```bash
#!/bin/bash
# Continuous Sécurité Cloud monitoring with Prowler

CONFIG_FILE="prowler_monitoring.conf"
LOG_DIR="prowler_monitoring_logs"
ALERT_EMAIL="security@company.com"
SCAN_INTERVAL=86400  # 24 hours

mkdir -p "$LOG_DIR"

# Create default configuration
if [ ! -f "$CONFIG_FILE" ]; then
    cat >`` "$CONFIG_FILE" ``<< 'EOF'
# Prowler Continuous Monitoring configuration

# AWS configuration
AWS_ENABLED=true
AWS_PROFILES="production,staging,development"
AWS_COMPLIANCE="cis_1.5_aws,pci_3.2.1_aws"
AWS_SEVERITY="critical,high"

# Azure configuration
AZURE_ENABLED=false
AZURE_SUBSCRIPTIONS="sub1,sub2"
AZURE_COMPLIANCE="cis_1.3.0_azure"

# GCP configuration
GCP_ENABLED=false
GCP_PROJECTS="project1,project2"
GCP_COMPLIANCE="cis_1.2.0_gcp"

# Alerting configuration
ALERT_ON_NEW_FINDINGS=true
ALERT_ON_REGRESSION=true
CRITICAL_THRESHOLD=5
HIGH_THRESHOLD=20

# Reporting
GENERATE_TRENDS=true
KEEP_HISTORICAL_DATA=true
RETENTION_DAYS=90
EOF
    echo "Created $CONFIG_FILE - please configure monitoring settings"
    exit 1
fi

source "$CONFIG_FILE"

# Function to run AWS monitoring
monitor_aws() \\{
    if [ "$AWS_ENABLED" != "true" ]; then
        return 0
    fi

    echo "[+] Running AWS security monitoring"

    IFS=',' read -ra PROFILES <<< "$AWS_PROFILES"
    for profile in "$\\{PROFILES[@]\\}"; do
        timestamp=$(date +%Y%m%d_%H%M%S)
        output_dir="$LOG_DIR/aws_$\\{profile\\}_$timestamp"

        echo "[+] Scanning AWS profile: $profile"

        prowler aws \
            --profile "$profile" \
            --compliance "$AWS_COMPLIANCE" \
            --severity "$AWS_SEVERITY" \
            --output-directory "$output_dir" \
            --output-filename "aws-$profile-$timestamp" \
            --output-formats json,csv \
            --quiet

        if [ $? -eq 0 ]; then
            echo "  ✓ AWS scan completed: $profile"
            analyze_aws_findings "$output_dir" "$profile" "$timestamp"
        else
            echo "  ✗ AWS scan failed: $profile"
        fi
    done
\\}

# Function to analyze AWS findings
analyze_aws_findings() \\{
    local output_dir="$1"
    local profile="$2"
    local timestamp="$3"

    local json_file="$output_dir/aws-$profile-$timestamp.json"

    if [ ! -f "$json_file" ]; then
        echo "[-] JSON file not found: $json_file"
        return 1
    fi

    echo "[+] Analyzing findings for AWS:$profile"

    # Count findings by severity
| local critical_count=$(jq '[.findings[] | select(.severity == "critical" and .status == "FAIL")] | length' "$json_file") |
| local high_count=$(jq '[.findings[] | select(.severity == "high" and .status == "FAIL")] | length' "$json_file") |

    echo "  Critical findings: $critical_count"
    echo "  High findings: $high_count"

    # Check thresholds
    if [ "$critical_count" -ge "$CRITICAL_THRESHOLD" ]; then
        send_alert "CRITICAL" "AWS" "$profile" "$critical_count" "critical findings detected"
    fi

    if [ "$high_count" -ge "$HIGH_THRESHOLD" ]; then
        send_alert "HIGH" "AWS" "$profile" "$high_count" "high-severity findings detected"
    fi

    # Compare with previous scan
    if [ "$ALERT_ON_NEW_FINDINGS" = "true" ]; then
        compare_with_previous "aws" "$profile" "$json_file"
    fi

    # Update trends
    if [ "$GENERATE_TRENDS" = "true" ]; then
        update_trends "aws" "$profile" "$critical_count" "$high_count"
    fi
\\}

# Function to send alerts
send_alert() \\{
    local severity="$1"
    local cloud_provider="$2"
    local account="$3"
    local count="$4"
    local message="$5"

    local subject="[$severity] Prowler Security Alert: $cloud_provider:$account"
    local body="Security alert for $cloud_provider account '$account': $count $message at $(date)"

| echo "$body" | mail -s "$subject" "$ALERT_EMAIL" 2>``/dev/null |  | \ |
        echo "Alert: $subject - $body (email failed)"
\}

# Function to compare with previous scan
compare_with_previous() \{
    local cloud_provider="$1"
    local account="$2"
    local current_file="$3"

    # Find previous scan file
| local previous_file=$(find "$LOG_DIR" -name "*$\{cloud_provider\}_$\{account\}_*.json" -type f | sort | tail -2 | head -1) |

    if [ -f "$previous_file" ] && [ "$previous_file" != "$current_file" ]; then
        echo "[+] Comparing with previous scan"

        # Extract check IDs from current and previous scans
| local current_fails=$(jq -r '.findings[] | select(.status == "FAIL") | .check_id' "$current_file" | sort) |
| local previous_fails=$(jq -r '.findings[] | select(.status == "FAIL") | .check_id' "$previous_file" | sort) |

        # Find new failures
        local new_fails=$(comm -23 <(echo "$current_fails") <(echo "$previous_fails"))

        if [ -n "$new_fails" ]; then
            local new_count=$(echo "$new_fails"|wc -l)
            send_alert "NEW FINDINGS" "$cloud_provider" "$account" "$new_count" "new security findings since last scan"
        fi

        # Find regressions (previously passing, now failing)
| local current_passes=$(jq -r '.findings[] | select(.status == "PASS") | .check_id' "$current_file" | sort) |
| local previous_passes=$(jq -r '.findings[] | select(.status == "PASS") | .check_id' "$previous_file" | sort) |

        local regressions=$(comm -23 <(echo "$previous_passes") <(echo "$current_passes"))

        if [ -n "$regressions" ] && [ "$ALERT_ON_REGRESSION" = "true" ]; then
            local regression_count=$(echo "$regressions"|wc -l)
            send_alert "REGRESSION" "$cloud_provider" "$account" "$regression_count" "security regressions detected"
        fi
    fi
\}

# Function to update trends
update_trends() \{
    local cloud_provider="$1"
    local account="$2"
    local critical_count="$3"
    local high_count="$4"

    local trends_file="$LOG_DIR/security_trends.csv"

    # Create header if file doesn't exist
    if [ ! -f "$trends_file" ]; then
        echo "Date,Cloud,Account,Critical,High" > "$trends_file"
    fi

    # Add current data
    echo "$(date +%Y-%m-%d),$cloud_provider,$account,$critical_count,$high_count" >> "$trends_file"
\}

# Function to generate trend report
generate_trend_report() \{
    if [ "$GENERATE_TRENDS" != "true" ]; then
        return 0
    fi

    echo "[+] Generating security trend report"

    local trends_file="$LOG_DIR/security_trends.csv"
    local html_report="$LOG_DIR/security_trends.html"

    if [ ! -f "$trends_file" ]; then
        echo "[-] No trends data available"
        return 1
    fi

    python3 << EOF
import csv
import matplotlib.pyplot as plt
import pandas as pd
from datetime import datetime, timedelta
import os

try:
    # Read trends data
    df = pd.read_csv('$trends_file')
    df['Date'] = pd.to_datetime(df['Date'])

    # Create trend charts
    fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10))

    # Critical findings trend
    for account in df['Account'].unique():
        account_data = df[df['Account'] == account]
        ax1.plot(account_data['Date'], account_data['Critical'],
                marker='o', label=f"\{account_data['Cloud'].iloc[0]\}:\{account\}")

    ax1.set_title('Critical Security Findings Trend')
    ax1.set_xlabel('Date')
    ax1.set_ylabel('Critical Findings')
    ax1.legend()
    ax1.grid(True, alpha=0.3)

    # High findings trend
    for account in df['Account'].unique():
        account_data = df[df['Account'] == account]
        ax2.plot(account_data['Date'], account_data['High'],
                marker='s', label=f"\{account_data['Cloud'].iloc[0]\}:\{account\}")

    ax2.set_title('High Severity Findings Trend')
    ax2.set_xlabel('Date')
    ax2.set_ylabel('High Findings')
    ax2.legend()
    ax2.grid(True, alpha=0.3)

    plt.tight_layout()
    plt.savefig('$LOG_DIR/security_trends.png', dpi=150, bbox_inches='tight')
    plt.close()

    # Generate HTML report
    html_content = f"""
<!DOCTYPE html>
<html>
<head>
    <title>Security Trends Report</title>

</head>
<body>
    <h1>Security Trends Report</h1>
    <p>Generated: \{datetime.now().strftime('%Y-%m-%d %H:%M:%S')\}</p>

    <div class="chart">
        <img src="security_trends.png" alt="Security Trends Chart" style="max-width: 100%;">
    </div>

    <h2>Recent Data</h2>
    <table>
        <tr><th>Date</th><th>Cloud</th><th>Account</th><th>Critical</th><th>High</th></tr>
"""

    # Add recent data (last 30 days)
    recent_data = df[df['Date'] >= (datetime.now() - timedelta(days=30))]
    for _, row in recent_data.iterrows():
        html_content += f"""
        <tr>
            <td>\{row['Date'].strftime('%Y-%m-%d')\}</td>
            <td>\{row['Cloud']\}</td>
            <td>\{row['Account']\}</td>
            <td>\{row['Critical']\}</td>
            <td>\{row['High']\}</td>
        </tr>
"""

    html_content += """
    </table>
</body>
</html>
"""

    with open('$html_report', 'w') as f:
        f.write(html_content)

    print(f"Trend report generated: $html_report")

except Exception as e:
    print(f"Error generating trend report: \{e\}")
EOF
\}

# Function to cleanup old data
cleanup_old_data() \{
    if [ "$KEEP_HISTORICAL_DATA" = "true" ]; then
        echo "[+] Cleaning up data older than $RETENTION_DAYS days"
| find "$LOG_DIR" -type d -mtime +$RETENTION_DAYS -exec rm -rf \{\} + 2>/dev/null |  | true |

        # Keep only recent trends data
        if [ -f "$LOG_DIR/security_trends.csv" ]; then
            python3 << EOF
import pandas as pd
from datetime import datetime, timedelta

try:
    df = pd.read_csv('$LOG_DIR/security_trends.csv')
    df['Date'] = pd.to_datetime(df['Date'])

    # Keep only last $RETENTION_DAYS days
    cutoff_date = datetime.now() - timedelta(days=$RETENTION_DAYS)
    recent_df = df[df['Date'] >= cutoff_date]

    recent_df.to_csv('$LOG_DIR/security_trends.csv', index=False)
    print(f"Cleaned trends data, kept \{len(recent_df)\} records")

except Exception as e:
    print(f"Error cleaning trends data: \{e\}")
EOF
        fi
    fi
\}

# Main monitoring loop
echo "[+] Starting continuous security monitoring with Prowler"
echo "[+] Scan interval: $((SCAN_INTERVAL / 3600)) hours"

while true; do
    echo "[+] Starting monitoring cycle at $(date)"

    # Run monitoring for enabled cloud providers
    monitor_aws
    # monitor_azure  # Implement similar to monitor_aws
    # monitor_gcp    # Implement similar to monitor_aws

    # Generate reports and cleanup
    generate_trend_report
    cleanup_old_data

    echo "[+] Monitoring cycle completed at $(date)"
    echo "[+] Next scan in $((SCAN_INTERVAL / 3600)) hours"

    sleep "$SCAN_INTERVAL"
done

Prowler CI/CD Integration

# GitHub Actions exemple
name: Sécurité Cloud Assessment

on:
  schedule:
    - cron: '0 2 * * *'  # Daily at 2 AM
  push:
    branches: [ main ]
  pull_request:
    branches: [ main ]

jobs:
  aws-security-assessment:
    runs-on: ubuntu-latest

    steps:
    - uses: actions/checkout@v3

    - name: Set up Python
      uses: actions/setup-python@v4
      with:
        python-version: '3.9'

    - name: Install Prowler
      run: |
        pip install prowler

    - name: Configure AWS identifiants
      uses: aws-actions/configure-aws-identifiants@v2
      with:
        aws-access-clé-id: $\{\{ secrets.AWS_ACCESS_clé_ID \}\}
        aws-secret-access-clé: $\{\{ secrets.AWS_SECRET_ACCESS_clé \}\}
        aws-region: us-east-1

    - name: Run Prowler Assessment
      run: |
        prowler aws \
          --compliance cis_1.5_aws \
          --severity critical,high \
          --output-formats json,html \
          --output-directory ./prowler-results \
          --quiet

    - name: Upload Results
      uses: actions/upload-artifact@v3
      with:
        name: prowler-security-assessment
        path: ./prowler-results/

    - name: Check for Critical Findings
      run: |
| CRITICAL_COUNT=$(jq '[.findings[] | select(.severity == "critical" and .status == "FAIL")] | length' ./prowler-results/*.json) |
        if [ "$CRITICAL_COUNT" -gt 0 ]; then
          echo "❌ Found $CRITICAL_COUNT critical security findings"
          exit 1
        else
          echo "✅ No critical security findings detected"
        fi

dépannage

Common Issues

authentification Problems

# Check AWS identifiants
aws sts get-caller-identity

# Check Azure authentification
az account show

# Check GCP authentification
gcloud auth list

# Test Prowler authentification
prowler aws --list-checks
prowler azure --list-checks
prowler gcp --list-checks

Installation Issues

# Update pip
pip3 install --upgrade pip

# Install with verbose output
pip3 install -v prowler

# Install from source
git clone https://github.com/prowler-cloud/prowler.git
cd prowler
pip3 install -e .

# Check dependencies
pip3 check prowler

Permission Issues

# Check IAM permissions
aws iam simulate-principal-policy \
    --policy-source-arn arn:aws:iam::123456789012:user/prowler \
    --action-names s3:GetBucketAcl \
    --resource-arns arn:aws:s3:::exemple-bucket

# Test specific service access
aws s3 ls
aws iam list-users
aws ec2 describe-instances

Performance Issues

# Limit regions
prowler aws --region us-east-1

# Skip large services
prowler aws --excluded-services organizations,support

# Use specific checks only
prowler aws --check s3_bucket_public_access_block

# Increase timeout
export PROWLER_TIMEOUT=300

Debugging and Logging

# Enable verbose output
prowler aws --verbose

# Enable debug mode
prowler aws --log-level DEBUG

# Custom log file
prowler aws --log-file /tmp/prowler.log

# Check Prowler version
prowler --version

Resources


This aide-mémoire provides a comprehensive reference for using Prowler for Sécurité Cloud assessments and compliance auditing. Always ensure you have proper autorisation before using this tool in any environment.

|wc -l)

clé Findings

Security Posture Overview

EOF

# Calculate overall statistics
local total_accounts=0
local successful_assessments=0
local total_findings=0

while IFS='|' read -r profile account_id environment Description; do

| [[ "$profile" =~ ^#.*$ | | -z "$profile" ]] && continue |

    total_accounts=$((total_accounts + 1))

    local json_file="$OUTPUT_BASE_DIR/$profile/$\\\\{profile\\\\}-assessment.json"
    if [ -f "$json_file" ]; then
        successful_assessments=$((successful_assessments + 1))

| local account_findings=$(jq '[.findings[] | select(.status == "FAIL")] | length' "$json_file" 2>/dev/null | | echo "0") | total_findings=$((total_findings + account_findings)) fi done < "$ACCOUNTS_FILE"

cat >> "$exec_summary" << EOF
  • Total Accounts: $total_accounts
  • Successfully Assessed: $successful_assessments
  • Total Security Findings: $total_findings
  • Assessment Coverage: $((successful_assessments * 100 / total_accounts))%

Recommendations

  1. Immediate Actions Required:

    • Address all CRITICAL severity findings
    • Review and remediate HIGH severity findings
    • Implement missing security controls
  2. Short-term Improvements:

    • Establish continuous monitoring
    • Implement automated remediation where possible
    • Regular security assessments
  3. Long-term Strategy:

    • Adopt Infrastructure as Code (IaC) with security scanning
    • Implement security training programs
    • Establish security metrics and KPIs

Next Steps

  1. Review detailed findings in individual account reports
  2. Prioritize remediation based on risk and business Impact
  3. Establish regular assessment schedule
  4. Implement continuous monitoring solutions

This assessment was conducted using Prowler v$(prowler --version 2>/dev/null|head -1) with industry-standard compliance frameworks. EOF

echo "[+] Executive summary generated: $exec_summary"

\\}

Main execution

echo "[+] Starting multi-account AWS security assessment" echo "[+] Output directory: $OUTPUT_BASE_DIR" echo "[+] Compliance frameworks: $COMPLIANCE_FRAMEWORKS"

Check dependencies

if ! commande -v prowler &> /dev/null; then echo "[-] Prowler not found. Please install Prowler first." exit 1 fi

if ! commande -v aws &> /dev/null; then echo "[-] AWS CLI not found. Please install AWS CLI first." exit 1 fi

if ! commande -v jq &> /dev/null; then echo "[-] jq not found. Please install jq for JSON processusing." exit 1 fi

Create job control for parallel execution

job_count=0 max_jobs=$PARALLEL_JOBS

Assess each account

while IFS='|' read -r profile account_id environment Description; do # Skip comments and empty lines | [[ "$profile" =~ ^#.*$ | | -z "$profile" ]] && continue |

# Wait if we've reached max parallel jobs
while [ $job_count -ge $max_jobs ]; do
    wait -n  # Wait for any job to complete
    job_count=$((job_count - 1))
done

# Start assessment in background
assess_account "$profile" "$account_id" "$environment" "$Description" &
job_count=$((job_count + 1))

done < "$ACCOUNTS_FILE"

Wait for all remaining jobs to complete

wait

echo "[+] All assessments completed"

Generate consolidated reports

generate_consolidated_report generate_executive_summary

echo "[+] Multi-account assessment completed successfully" echo "[+] Results available in: $OUTPUT_BASE_DIR" echo "[+] Open $OUTPUT_BASE_DIR/consolidated/multi_account_security_report.html for overview" ```

Continuous Security Monitoring

CODE_BLOCK_15

Prowler CI/CD Integration

CODE_BLOCK_16

dépannage

Common Issues

authentification Problems

CODE_BLOCK_17

Installation Issues

CODE_BLOCK_18

Permission Issues

CODE_BLOCK_19

Performance Issues

CODE_BLOCK_20

Debugging and Logging

CODE_BLOCK_21

Resources


This aide-mémoire provides a comprehensive reference for using Prowler for Sécurité Cloud assessments and compliance auditing. Always ensure you have proper autorisation before using this tool in any environment.