コンテンツにスキップ

KICS (Keeping Infrastructure as Code Secure) Cheat Sheet

Overview

KICS (Keeping Infrastructure as Code Secure) is an open-source static code analysis tool that finds security vulnerabilities, compliance issues, and infrastructure misconfigurations in Infrastructure as Code. KICS supports Terraform, CloudFormation, Ansible, Kubernetes, Docker, and more, providing over 2000 queries across multiple cloud providers and platforms.

💡 Key Features: 2000+ security queries, multi-IaC support, custom query creation, CI/CD integration, compliance frameworks (CIS, NIST, OWASP), and comprehensive reporting with remediation guidance.

Installation and Setup

Binary Installation

# Download latest release for Linux
curl -sfL 'https://raw.githubusercontent.com/Checkmarx/kics/master/install.sh' | sh

# Manual download for Linux
KICS_VERSION=$(curl -s https://api.github.com/repos/Checkmarx/kics/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/')
curl -L "https://github.com/Checkmarx/kics/releases/download/${KICS_VERSION}/kics_${KICS_VERSION}_linux_x64.tar.gz" -o kics.tar.gz

# Extract and install
tar -xzf kics.tar.gz
sudo mv kics /usr/local/bin/
sudo chmod +x /usr/local/bin/kics

# Verify installation
kics version

# Download for macOS
curl -L "https://github.com/Checkmarx/kics/releases/download/${KICS_VERSION}/kics_${KICS_VERSION}_darwin_x64.tar.gz" -o kics.tar.gz

# Download for Windows
curl -L "https://github.com/Checkmarx/kics/releases/download/${KICS_VERSION}/kics_${KICS_VERSION}_windows_x64.zip" -o kics.zip

Package Manager Installation

# Homebrew (macOS/Linux)
brew install kics

# Chocolatey (Windows)
choco install kics

# Scoop (Windows)
scoop install kics

# Arch Linux (AUR)
yay -S kics-bin

# Alpine Linux
apk add kics

# Verify installation
kics version

Docker Installation

# Pull Docker image
docker pull checkmarx/kics:latest

# Create alias for easier usage
echo 'alias kics="docker run -t -v $(pwd):/path checkmarx/kics:latest scan -p /path"' >> ~/.bashrc
source ~/.bashrc

# Test installation
docker run checkmarx/kics:latest version

# Run with volume mount
docker run -t -v $(pwd):/path checkmarx/kics:latest scan -p /path

# Create Docker wrapper script
cat > kics-docker.sh << 'EOF'
#!/bin/bash
docker run -t \
  -v $(pwd):/path \
  -v ~/.kics:/app/.kics \
  checkmarx/kics:latest "$@"
EOF

chmod +x kics-docker.sh
sudo mv kics-docker.sh /usr/local/bin/kics-docker

Source Installation

# Install Go (if not already installed)
wget https://golang.org/dl/go1.19.linux-amd64.tar.gz
sudo tar -C /usr/local -xzf go1.19.linux-amd64.tar.gz
export PATH=$PATH:/usr/local/go/bin

# Clone repository
git clone https://github.com/Checkmarx/kics.git
cd kics

# Build from source
make build

# Install binary
sudo cp bin/kics /usr/local/bin/

# Verify installation
kics version

# Build with specific version
git checkout v1.7.0
make build

Configuration and Setup

# Initialize KICS configuration
mkdir -p ~/.kics

# Create configuration file
cat > ~/.kics/kics.config << 'EOF'
{
  "exclude-categories": [],
  "exclude-paths": [
    ".git/",
    "node_modules/",
    "vendor/",
    ".terraform/"
  ],
  "exclude-queries": [],
  "exclude-results": [],
  "exclude-severities": [],
  "include-queries": [],
  "libraries-path": "",
  "log-file": "",
  "log-format": "pretty",
  "log-level": "INFO",
  "log-path": "",
  "minimal-ui": false,
  "no-color": false,
  "no-progress": false,
  "output-name": "",
  "output-path": "",
  "payload-path": "",
  "preview-lines": 3,
  "queries-path": "",
  "report-formats": ["json"],
  "timeout": 60,
  "type": [],
  "verbose": false
}
EOF

# Set environment variables
export KICS_CONFIG=~/.kics/kics.config
export KICS_QUERIES_PATH=~/.kics/queries

# Create queries directory
mkdir -p ~/.kics/queries

# Download additional queries
git clone https://github.com/Checkmarx/kics.git /tmp/kics-queries
cp -r /tmp/kics-queries/assets/queries/* ~/.kics/queries/

# Create custom queries directory
mkdir -p ~/.kics/custom-queries

Basic Usage and Commands

Simple Scanning

# Scan current directory
kics scan -p .

# Scan specific directory
kics scan -p /path/to/iac/files

# Scan specific file
kics scan -p main.tf

# Scan with specific type
kics scan -p . -t Terraform

# Scan with multiple types
kics scan -p . -t Terraform,Kubernetes,Dockerfile

# Scan with output to file
kics scan -p . -o results.json

# Scan with specific output format
kics scan -p . --report-formats json,sarif,html

Advanced Scanning Options

# Scan with severity filtering
kics scan -p . --exclude-severities LOW
kics scan -p . --exclude-severities LOW,MEDIUM

# Scan with category filtering
kics scan -p . --exclude-categories "Access Control"

# Scan with query filtering
kics scan -p . --exclude-queries 89154a9d-a1d5-4b1e-8e5d-4f1e8e5d4f1e

# Scan with path exclusions
kics scan -p . --exclude-paths ".git/,node_modules/,*.test.tf"

# Scan with custom queries
kics scan -p . --queries-path ~/.kics/custom-queries

# Scan with timeout
kics scan -p . --timeout 300

# Scan with verbose output
kics scan -p . --verbose

# Scan with minimal UI
kics scan -p . --minimal-ui

# Scan with no progress bar
kics scan -p . --no-progress

Multi-Platform Scanning

# Terraform scanning
kics scan -p ./terraform/ -t Terraform

# Kubernetes scanning
kics scan -p ./k8s/ -t Kubernetes

# CloudFormation scanning
kics scan -p ./cloudformation/ -t CloudFormation

# Ansible scanning
kics scan -p ./ansible/ -t Ansible

# Dockerfile scanning
kics scan -p . -t Dockerfile

# Docker Compose scanning
kics scan -p . -t DockerCompose

# Helm chart scanning
kics scan -p ./helm/ -t Kubernetes

# Azure Resource Manager scanning
kics scan -p ./arm/ -t AzureResourceManager

# Google Deployment Manager scanning
kics scan -p ./gdm/ -t GoogleDeploymentManager

# Pulumi scanning
kics scan -p ./pulumi/ -t Pulumi

# Scan multiple platforms
kics scan -p . -t Terraform,Kubernetes,Dockerfile,CloudFormation

Advanced Query Management

Custom Query Creation

# Create custom query directory
mkdir -p ~/.kics/custom-queries/terraform

# Create custom Terraform query
cat > ~/.kics/custom-queries/terraform/s3_bucket_custom_tags.rego << 'EOF'
package Cx

import rego.v1

CxPolicy[result] {
    resource := input.document[i].resource.aws_s3_bucket[name]

    # Check if Environment tag is missing
    not resource.tags.Environment

    result := {
        "documentId": input.document[i].id,
        "resourceType": "aws_s3_bucket",
        "resourceName": tf_lib.get_resource_name(resource, name),
        "searchKey": sprintf("aws_s3_bucket[%s].tags", [name]),
        "issueType": "MissingAttribute",
        "keyExpectedValue": "aws_s3_bucket should have Environment tag",
        "keyActualValue": "aws_s3_bucket does not have Environment tag",
        "searchLine": common_lib.build_search_line(["resource", "aws_s3_bucket", name, "tags"], []),
        "remediation": "Add Environment tag to S3 bucket",
        "remediationType": "addition"
    }
}

CxPolicy[result] {
    resource := input.document[i].resource.aws_s3_bucket[name]

    # Check if Owner tag is missing
    not resource.tags.Owner

    result := {
        "documentId": input.document[i].id,
        "resourceType": "aws_s3_bucket",
        "resourceName": tf_lib.get_resource_name(resource, name),
        "searchKey": sprintf("aws_s3_bucket[%s].tags", [name]),
        "issueType": "MissingAttribute",
        "keyExpectedValue": "aws_s3_bucket should have Owner tag",
        "keyActualValue": "aws_s3_bucket does not have Owner tag",
        "searchLine": common_lib.build_search_line(["resource", "aws_s3_bucket", name, "tags"], []),
        "remediation": "Add Owner tag to S3 bucket",
        "remediationType": "addition"
    }
}
EOF

# Create query metadata
cat > ~/.kics/custom-queries/terraform/s3_bucket_custom_tags.json << 'EOF'
{
  "id": "custom-s3-tags-001",
  "queryName": "S3 Bucket Should Have Custom Tags",
  "severity": "MEDIUM",
  "category": "Best Practices",
  "descriptionText": "S3 buckets should have Environment and Owner tags for proper resource management",
  "descriptionUrl": "https://docs.aws.amazon.com/s3/latest/userguide/object-tagging.html",
  "platform": "Terraform",
  "cloudProvider": "aws"
}
EOF

# Use custom queries
kics scan -p . --queries-path ~/.kics/custom-queries

# Create Kubernetes custom query
cat > ~/.kics/custom-queries/kubernetes/pod_security_context.rego << 'EOF'
package Cx

import rego.v1

CxPolicy[result] {
    resource := input.document[i]
    resource.kind == "Pod"

    # Check if securityContext is missing
    not resource.spec.securityContext

    result := {
        "documentId": input.document[i].id,
        "resourceType": resource.kind,
        "resourceName": resource.metadata.name,
        "searchKey": "spec.securityContext",
        "issueType": "MissingAttribute",
        "keyExpectedValue": "Pod should have securityContext defined",
        "keyActualValue": "Pod does not have securityContext defined",
        "remediation": "Add securityContext to Pod specification",
        "remediationType": "addition"
    }
}

CxPolicy[result] {
    resource := input.document[i]
    resource.kind == "Pod"

    # Check if runAsNonRoot is not set to true
    resource.spec.securityContext.runAsNonRoot != true

    result := {
        "documentId": input.document[i].id,
        "resourceType": resource.kind,
        "resourceName": resource.metadata.name,
        "searchKey": "spec.securityContext.runAsNonRoot",
        "issueType": "IncorrectValue",
        "keyExpectedValue": "Pod should run as non-root user",
        "keyActualValue": "Pod may run as root user",
        "remediation": "Set runAsNonRoot to true in securityContext",
        "remediationType": "replacement"
    }
}
EOF

# Create Kubernetes query metadata
cat > ~/.kics/custom-queries/kubernetes/pod_security_context.json << 'EOF'
{
  "id": "custom-k8s-security-001",
  "queryName": "Pod Should Have Security Context",
  "severity": "HIGH",
  "category": "Resource Management",
  "descriptionText": "Pods should have securityContext defined with runAsNonRoot set to true",
  "descriptionUrl": "https://kubernetes.io/docs/tasks/configure-pod-container/security-context/",
  "platform": "Kubernetes",
  "cloudProvider": "common"
}
EOF

Query Management Scripts

#!/usr/bin/env python3
# Advanced query management for KICS

import json
import os
import subprocess
import argparse
import yaml
from pathlib import Path

class KICSQueryManager:
    """Advanced query management for KICS"""

    def __init__(self, queries_dir="~/.kics/queries"):
        self.queries_dir = Path(queries_dir).expanduser()
        self.custom_queries_dir = Path("~/.kics/custom-queries").expanduser()
        self.queries_dir.mkdir(parents=True, exist_ok=True)
        self.custom_queries_dir.mkdir(parents=True, exist_ok=True)

    def list_queries(self, platform=None, severity=None):
        """List available queries with filtering"""

        queries = []

        # Search in both default and custom queries
        search_dirs = [self.queries_dir, self.custom_queries_dir]

        for search_dir in search_dirs:
            for metadata_file in search_dir.rglob("*.json"):
                try:
                    with open(metadata_file, 'r') as f:
                        metadata = json.load(f)

                    # Apply filters
                    if platform and metadata.get('platform', '').lower() != platform.lower():
                        continue

                    if severity and metadata.get('severity', '').lower() != severity.lower():
                        continue

                    # Find corresponding .rego file
                    rego_file = metadata_file.with_suffix('.rego')
                    if rego_file.exists():
                        queries.append({
                            'id': metadata.get('id', 'unknown'),
                            'name': metadata.get('queryName', 'Unknown'),
                            'severity': metadata.get('severity', 'Unknown'),
                            'category': metadata.get('category', 'Unknown'),
                            'platform': metadata.get('platform', 'Unknown'),
                            'description': metadata.get('descriptionText', 'No description'),
                            'metadata_file': str(metadata_file),
                            'rego_file': str(rego_file),
                            'custom': 'custom-queries' in str(metadata_file)
                        })

                except Exception as e:
                    print(f"Error reading {metadata_file}: {e}")

        return queries

    def create_query_from_template(self, query_name, platform, severity="MEDIUM", category="Best Practices"):
        """Create a new query from template"""

        # Sanitize query name for file naming
        safe_name = query_name.lower().replace(' ', '_').replace('-', '_')
        platform_dir = self.custom_queries_dir / platform.lower()
        platform_dir.mkdir(parents=True, exist_ok=True)

        # Generate query ID
        query_id = f"custom-{platform.lower()}-{safe_name}"

        # Create Rego template based on platform
        if platform.lower() == 'terraform':
            rego_template = f'''package Cx

import rego.v1

CxPolicy[result] {{
    resource := input.document[i].resource.RESOURCE_TYPE[name]

    # Add your condition here
    condition := true  # Replace with actual condition
    condition

    result := {{
        "documentId": input.document[i].id,
        "resourceType": "RESOURCE_TYPE",
        "resourceName": tf_lib.get_resource_name(resource, name),
        "searchKey": sprintf("RESOURCE_TYPE[%s].ATTRIBUTE", [name]),
        "issueType": "MissingAttribute",
        "keyExpectedValue": "Expected condition description",
        "keyActualValue": "Actual condition description",
        "searchLine": common_lib.build_search_line(["resource", "RESOURCE_TYPE", name, "ATTRIBUTE"], []),
        "remediation": "Remediation steps",
        "remediationType": "addition"
    }}
}}
'''
        elif platform.lower() == 'kubernetes':
            rego_template = f'''package Cx

import rego.v1

CxPolicy[result] {{
    resource := input.document[i]
    resource.kind == "RESOURCE_KIND"

    # Add your condition here
    condition := true  # Replace with actual condition
    condition

    result := {{
        "documentId": input.document[i].id,
        "resourceType": resource.kind,
        "resourceName": resource.metadata.name,
        "searchKey": "spec.ATTRIBUTE",
        "issueType": "MissingAttribute",
        "keyExpectedValue": "Expected condition description",
        "keyActualValue": "Actual condition description",
        "remediation": "Remediation steps",
        "remediationType": "addition"
    }}
}}
'''
        else:
            rego_template = f'''package Cx

import rego.v1

CxPolicy[result] {{
    # Add your query logic here
    resource := input.document[i]

    # Add your condition here
    condition := true  # Replace with actual condition
    condition

    result := {{
        "documentId": input.document[i].id,
        "resourceType": "RESOURCE_TYPE",
        "resourceName": "resource_name",
        "searchKey": "search_key",
        "issueType": "MissingAttribute",
        "keyExpectedValue": "Expected condition description",
        "keyActualValue": "Actual condition description",
        "remediation": "Remediation steps",
        "remediationType": "addition"
    }}
}}
'''

        # Create metadata template
        metadata_template = {
            "id": query_id,
            "queryName": query_name,
            "severity": severity,
            "category": category,
            "descriptionText": f"Custom query: {query_name}",
            "descriptionUrl": "https://example.com/documentation",
            "platform": platform,
            "cloudProvider": "common"
        }

        # Write files
        rego_file = platform_dir / f"{safe_name}.rego"
        metadata_file = platform_dir / f"{safe_name}.json"

        with open(rego_file, 'w') as f:
            f.write(rego_template)

        with open(metadata_file, 'w') as f:
            json.dump(metadata_template, f, indent=2)

        print(f"Query created:")
        print(f"  Rego file: {rego_file}")
        print(f"  Metadata file: {metadata_file}")
        print(f"  Query ID: {query_id}")

        return rego_file, metadata_file

    def validate_queries(self):
        """Validate all custom queries"""

        print("Validating custom queries...")

        for rego_file in self.custom_queries_dir.rglob("*.rego"):
            try:
                # Use opa fmt to validate syntax
                result = subprocess.run(
                    ["opa", "fmt", str(rego_file)],
                    capture_output=True,
                    text=True
                )

                if result.returncode == 0:
                    print(f"✅ {rego_file.name}: Valid")
                else:
                    print(f"❌ {rego_file.name}: Invalid - {result.stderr}")

            except FileNotFoundError:
                print("⚠️  OPA not found. Install OPA to validate Rego queries.")
                break

    def test_query(self, query_path, test_file):
        """Test a query against test data"""

        try:
            # Run KICS with the specific query
            result = subprocess.run([
                "kics", "scan",
                "-p", test_file,
                "--queries-path", str(query_path.parent),
                "--include-queries", query_path.stem,
                "--report-formats", "json"
            ], capture_output=True, text=True)

            if result.returncode == 0:
                # Parse results
                lines = result.stdout.strip().split('\n')
                json_line = None

                for line in lines:
                    if line.startswith('{'):
                        json_line = line
                        break

                if json_line:
                    output = json.loads(json_line)
                    queries_results = output.get("queries", [])

                    print(f"Query test results for {query_path.name}:")
                    print(f"Issues found: {len(queries_results)}")

                    for query_result in queries_results:
                        print(f"- {query_result.get('query_name', 'Unknown')}: {query_result.get('description', 'No description')}")
                else:
                    print("No JSON output found in KICS results")
            else:
                print(f"Error testing query: {result.stderr}")

        except Exception as e:
            print(f"Error testing query: {e}")

    def generate_query_report(self):
        """Generate a comprehensive query report"""

        queries = self.list_queries()

        report = {
            "query_summary": {
                "total_queries": len(queries),
                "custom_queries": len([q for q in queries if q['custom']]),
                "platforms": {},
                "severities": {},
                "categories": {}
            },
            "queries": queries
        }

        # Calculate statistics
        for query in queries:
            platform = query['platform']
            severity = query['severity']
            category = query['category']

            # Count by platform
            if platform in report["query_summary"]["platforms"]:
                report["query_summary"]["platforms"][platform] += 1
            else:
                report["query_summary"]["platforms"][platform] = 1

            # Count by severity
            if severity in report["query_summary"]["severities"]:
                report["query_summary"]["severities"][severity] += 1
            else:
                report["query_summary"]["severities"][severity] = 1

            # Count by category
            if category in report["query_summary"]["categories"]:
                report["query_summary"]["categories"][category] += 1
            else:
                report["query_summary"]["categories"][category] = 1

        # Save report
        report_file = self.custom_queries_dir / "query_report.json"
        with open(report_file, 'w') as f:
            json.dump(report, f, indent=2)

        print(f"Query report generated: {report_file}")
        return report

def main():
    parser = argparse.ArgumentParser(description='KICS Query Manager')
    parser.add_argument('action', choices=['create', 'list', 'validate', 'test', 'report'])
    parser.add_argument('--name', help='Query name for creation')
    parser.add_argument('--platform', help='Platform for query (Terraform, Kubernetes, etc.)')
    parser.add_argument('--severity', default='MEDIUM', help='Query severity')
    parser.add_argument('--category', default='Best Practices', help='Query category')
    parser.add_argument('--query-path', help='Query file path for testing')
    parser.add_argument('--test-file', help='Test file for query testing')
    parser.add_argument('--filter-platform', help='Filter queries by platform')
    parser.add_argument('--filter-severity', help='Filter queries by severity')

    args = parser.parse_args()

    manager = KICSQueryManager()

    if args.action == 'create':
        if not args.name or not args.platform:
            print("Error: --name and --platform required for create action")
            return
        manager.create_query_from_template(args.name, args.platform, args.severity, args.category)

    elif args.action == 'list':
        queries = manager.list_queries(args.filter_platform, args.filter_severity)
        print(f"Found {len(queries)} queries:")
        for query in queries:
            custom_marker = " [CUSTOM]" if query['custom'] else ""
            print(f"- {query['name']} ({query['platform']}, {query['severity']}){custom_marker}")

    elif args.action == 'validate':
        manager.validate_queries()

    elif args.action == 'test':
        if not args.query_path or not args.test_file:
            print("Error: --query-path and --test-file required for test action")
            return
        manager.test_query(Path(args.query_path), args.test_file)

    elif args.action == 'report':
        manager.generate_query_report()

if __name__ == "__main__":
    main()

CI/CD Integration

GitHub Actions Integration

# .github/workflows/kics.yml
name: KICS Security Scan

on:
  push:
    branches: [ main, develop ]
  pull_request:
    branches: [ main ]

jobs:
  kics-scan:
    runs-on: ubuntu-latest

    steps:
    - name: Checkout code
      uses: actions/checkout@v3

    - name: Setup KICS
      uses: checkmarx/kics-github-action@v1.6
      with:
        path: '.'
        output_path: 'kics-results'
        output_formats: 'json,sarif,html'
        exclude_paths: '.git/,node_modules/,vendor/'
        exclude_severities: 'info'

    - name: Upload SARIF file
      uses: github/codeql-action/upload-sarif@v2
      with:
        sarif_file: kics-results/results.sarif

    - name: Upload scan results
      uses: actions/upload-artifact@v3
      with:
        name: kics-results
        path: kics-results/

  kics-multi-platform:
    runs-on: ubuntu-latest
    strategy:
      matrix:
        platform: [Terraform, Kubernetes, Dockerfile, CloudFormation]

    steps:
    - name: Checkout code
      uses: actions/checkout@v3

    - name: Install KICS
      run: |
        curl -sfL 'https://raw.githubusercontent.com/Checkmarx/kics/master/install.sh' | sh
        sudo mv ./bin/kics /usr/local/bin/

    - name: Scan ${{ matrix.platform }}
      run: |
        mkdir -p results
        kics scan \
          -p . \
          -t ${{ matrix.platform }} \
          --report-formats json,html \
          -o results/${{ matrix.platform }}-results.json \
          --output-name ${{ matrix.platform }}-results

    - name: Upload results
      uses: actions/upload-artifact@v3
      if: always()
      with:
        name: ${{ matrix.platform }}-scan-results
        path: results/

  kics-custom-queries:
    runs-on: ubuntu-latest

    steps:
    - name: Checkout code
      uses: actions/checkout@v3

    - name: Setup custom queries
      run: |
        mkdir -p ~/.kics/custom-queries
        # Copy custom queries from repository
        if [ -d ".kics/queries" ]; then
          cp -r .kics/queries/* ~/.kics/custom-queries/
        fi

    - name: Install KICS
      run: |
        curl -sfL 'https://raw.githubusercontent.com/Checkmarx/kics/master/install.sh' | sh
        sudo mv ./bin/kics /usr/local/bin/

    - name: Scan with custom queries
      run: |
        kics scan \
          -p . \
          --queries-path ~/.kics/custom-queries \
          --report-formats json,sarif \
          -o custom-scan-results.json

    - name: Process results
      run: |
        python3 << 'EOF'
import json
import sys

# Load results
with open('custom-scan-results.json', 'r') as f:
    results = json.load(f)

queries = results.get('queries', [])
high_severity = [q for q in queries if q.get('severity') == 'HIGH']
medium_severity = [q for q in queries if q.get('severity') == 'MEDIUM']

print(f"Total issues: {len(queries)}")
print(f"High severity: {len(high_severity)}")
print(f"Medium severity: {len(medium_severity)}")

# Fail build if high severity issues found
if len(high_severity) > 0:
    print("❌ High severity issues found!")
    sys.exit(1)
elif len(medium_severity) > 10:
    print("⚠️  Too many medium severity issues!")
    sys.exit(1)
else:
    print("✅ No critical issues found!")
EOF

GitLab CI Integration

# .gitlab-ci.yml
stages:
  - security-scan
  - report

variables:
  KICS_VERSION: "latest"

kics-scan:
  stage: security-scan
  image: checkmarx/kics:${KICS_VERSION}

  script:
    - |
      # Create results directory
      mkdir -p kics-results

      # Scan different IaC types
      if [ -d "./terraform" ]; then
        kics scan \
          -p ./terraform \
          -t Terraform \
          --report-formats json,html \
          -o kics-results/terraform-results.json \
          --output-name terraform-results
      fi

      if [ -d "./k8s" ]; then
        kics scan \
          -p ./k8s \
          -t Kubernetes \
          --report-formats json,html \
          -o kics-results/k8s-results.json \
          --output-name k8s-results
      fi

      if [ -f "./Dockerfile" ]; then
        kics scan \
          -p . \
          -t Dockerfile \
          --report-formats json,html \
          -o kics-results/dockerfile-results.json \
          --output-name dockerfile-results
      fi

      # Generate combined report
      python3 << 'EOF'
import json
import glob
import os

combined_results = {
    "scan_summary": {
        "total_files": 0,
        "total_queries": 0,
        "high_severity": 0,
        "medium_severity": 0,
        "low_severity": 0
    },
    "results_by_platform": {}
}

for result_file in glob.glob('kics-results/*-results.json'):
    platform = os.path.basename(result_file).replace('-results.json', '')

    try:
        with open(result_file, 'r') as f:
            data = json.load(f)

        queries = data.get('queries', [])
        combined_results["results_by_platform"][platform] = {
            "queries": queries,
            "count": len(queries)
        }

        combined_results["scan_summary"]["total_queries"] += len(queries)

        for query in queries:
            severity = query.get('severity', '').upper()
            if severity == 'HIGH':
                combined_results["scan_summary"]["high_severity"] += 1
            elif severity == 'MEDIUM':
                combined_results["scan_summary"]["medium_severity"] += 1
            elif severity == 'LOW':
                combined_results["scan_summary"]["low_severity"] += 1

    except Exception as e:
        print(f"Error processing {result_file}: {e}")

with open('kics-results/combined-results.json', 'w') as f:
    json.dump(combined_results, f, indent=2)

print(f"Combined results: {combined_results['scan_summary']}")
EOF

  artifacts:
    reports:
      junit: "kics-results/*.json"
    paths:
      - "kics-results/"
    expire_in: 1 week

  rules:
    - if: $CI_PIPELINE_SOURCE == "merge_request_event"
    - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH

kics-report:
  stage: report
  image: python:3.9-slim

  dependencies:
    - kics-scan

  script:
    - |
      pip install jinja2

      python3 << 'EOF'
import json
import jinja2
from datetime import datetime

# Load combined results
with open('kics-results/combined-results.json', 'r') as f:
    results = json.load(f)

summary = results['scan_summary']
platforms = results['results_by_platform']

# HTML template
template = jinja2.Template('''
<!DOCTYPE html>
<html>
<head>
    <title>KICS Security Report</title>
    <style>
        body { font-family: Arial, sans-serif; margin: 20px; }
        .header { background: #f4f4f4; padding: 20px; border-radius: 5px; }
        .stats { display: flex; gap: 20px; margin: 20px 0; }
        .stat { background: #e9e9e9; padding: 15px; border-radius: 5px; text-align: center; }
        .query { border: 1px solid #ddd; margin: 10px 0; padding: 15px; border-radius: 5px; }
        .high { border-left: 5px solid #d32f2f; }
        .medium { border-left: 5px solid #f57c00; }
        .low { border-left: 5px solid #388e3c; }
        .platform { margin: 20px 0; }
    </style>
</head>
<body>
    <div class="header">
        <h1>KICS Security Report</h1>
        <p>Generated on: {{ timestamp }}</p>
    </div>

    <div class="stats">
        <div class="stat">
            <h3>{{ summary.total_queries }}</h3>
            <p>Total Issues</p>
        </div>
        <div class="stat">
            <h3>{{ summary.high_severity }}</h3>
            <p>High Severity</p>
        </div>
        <div class="stat">
            <h3>{{ summary.medium_severity }}</h3>
            <p>Medium Severity</p>
        </div>
        <div class="stat">
            <h3>{{ summary.low_severity }}</h3>
            <p>Low Severity</p>
        </div>
    </div>

    {% for platform, data in platforms.items() %}
    <div class="platform">
        <h2>{{ platform.title() }} ({{ data.count }} issues)</h2>
        {% for query in data.queries %}
        <div class="query {{ query.severity.lower() }}">
            <h4>{{ query.query_name }}</h4>
            <p><strong>Severity:</strong> {{ query.severity }}</p>
            <p><strong>Category:</strong> {{ query.category }}</p>
            <p><strong>Description:</strong> {{ query.description }}</p>
            <p><strong>File:</strong> {{ query.file_name }}:{{ query.line }}</p>
        </div>
        {% endfor %}
    </div>
    {% endfor %}
</body>
</html>
''')

html_content = template.render(
    summary=summary,
    platforms=platforms,
    timestamp=datetime.now().strftime('%Y-%m-%d %H:%M:%S')
)

with open('kics-results/kics-report.html', 'w') as f:
    f.write(html_content)

print("HTML report generated: kics-results/kics-report.html")
EOF

  artifacts:
    paths:
      - "kics-results/kics-report.html"
    expire_in: 1 month

Jenkins Pipeline Integration

// Jenkinsfile
pipeline {
    agent any

    environment {
        KICS_VERSION = 'latest'
    }

    stages {
        stage('Checkout') {
            steps {
                checkout scm
            }
        }

        stage('Install KICS') {
            steps {
                script {
                    sh '''
                        # Download and install KICS
                        curl -sfL 'https://raw.githubusercontent.com/Checkmarx/kics/master/install.sh' | sh
                        sudo mv ./bin/kics /usr/local/bin/

                        # Verify installation
                        kics version
                    '''
                }
            }
        }

        stage('KICS Security Scan') {
            parallel {
                stage('Terraform Scan') {
                    when {
                        expression { fileExists('terraform/') }
                    }
                    steps {
                        script {
                            sh '''
                                mkdir -p kics-results
                                kics scan \
                                    -p ./terraform \
                                    -t Terraform \
                                    --report-formats json,html \
                                    -o kics-results/terraform-results.json \
                                    --output-name terraform-results \
                                    --exclude-severities INFO
                            '''
                        }
                    }
                }

                stage('Kubernetes Scan') {
                    when {
                        expression { fileExists('k8s/') }
                    }
                    steps {
                        script {
                            sh '''
                                mkdir -p kics-results
                                kics scan \
                                    -p ./k8s \
                                    -t Kubernetes \
                                    --report-formats json,html \
                                    -o kics-results/k8s-results.json \
                                    --output-name k8s-results \
                                    --exclude-severities INFO
                            '''
                        }
                    }
                }

                stage('Dockerfile Scan') {
                    when {
                        expression { fileExists('Dockerfile') }
                    }
                    steps {
                        script {
                            sh '''
                                mkdir -p kics-results
                                kics scan \
                                    -p . \
                                    -t Dockerfile \
                                    --report-formats json,html \
                                    -o kics-results/dockerfile-results.json \
                                    --output-name dockerfile-results \
                                    --exclude-severities INFO
                            '''
                        }
                    }
                }
            }
        }

        stage('Process Results') {
            steps {
                script {
                    sh '''
                        python3 << 'EOF'
import json
import glob
import os

# Process all result files
total_issues = 0
high_issues = 0
medium_issues = 0
critical_files = []

for result_file in glob.glob('kics-results/*-results.json'):
    if os.path.getsize(result_file) > 0:
        try:
            with open(result_file, 'r') as f:
                data = json.load(f)

            queries = data.get('queries', [])
            total_issues += len(queries)

            for query in queries:
                severity = query.get('severity', '').upper()
                if severity == 'HIGH':
                    high_issues += 1
                    critical_files.append(query.get('file_name', 'unknown'))
                elif severity == 'MEDIUM':
                    medium_issues += 1

        except json.JSONDecodeError:
            print(f"Error parsing {result_file}")

# Create summary
summary = {
    'total_issues': total_issues,
    'high_issues': high_issues,
    'medium_issues': medium_issues,
    'critical_files': list(set(critical_files))
}

with open('kics-results/summary.json', 'w') as f:
    json.dump(summary, f, indent=2)

print(f"Total issues: {total_issues}")
print(f"High severity: {high_issues}")
print(f"Medium severity: {medium_issues}")

# Set build status
if high_issues > 0:
    print("FAILURE: High severity issues found")
    exit(1)
elif medium_issues > 20:
    print("UNSTABLE: Too many medium severity issues")
    exit(2)
else:
    print("SUCCESS: No critical issues found")
EOF
                    '''
                }
            }
        }
    }

    post {
        always {
            // Archive results
            archiveArtifacts artifacts: 'kics-results/**/*', 
                           allowEmptyArchive: true

            // Publish HTML reports
            publishHTML([
                allowMissing: false,
                alwaysLinkToLastBuild: true,
                keepAll: true,
                reportDir: 'kics-results',
                reportFiles: '*.html',
                reportName: 'KICS Security Report'
            ])
        }

        failure {
            emailext (
                subject: "KICS Security Scan Failed: ${env.JOB_NAME} - ${env.BUILD_NUMBER}",
                body: "High severity security issues found. Check the build logs for details.",
                to: "${env.CHANGE_AUTHOR_EMAIL}"
            )
        }

        unstable {
            emailext (
                subject: "KICS Security Scan Unstable: ${env.JOB_NAME} - ${env.BUILD_NUMBER}",
                body: "Medium severity security issues found. Review recommended.",
                to: "${env.CHANGE_AUTHOR_EMAIL}"
            )
        }
    }
}

Advanced Configuration and Automation

Comprehensive Configuration Management

#!/bin/bash
# Advanced KICS configuration management

setup_kics_environment() {
    echo "Setting up KICS environment..."

    # Create directory structure
    mkdir -p ~/.kics/{config,queries,custom-queries,reports,cache,templates}

    # Create advanced configuration
    cat > ~/.kics/kics.config << 'EOF'
{
  "exclude-categories": [],
  "exclude-paths": [
    ".git/",
    "node_modules/",
    "vendor/",
    ".terraform/",
    "target/",
    "build/",
    "dist/"
  ],
  "exclude-queries": [],
  "exclude-results": [],
  "exclude-severities": ["INFO"],
  "include-queries": [],
  "libraries-path": "",
  "log-file": "~/.kics/kics.log",
  "log-format": "json",
  "log-level": "INFO",
  "log-path": "~/.kics/logs/",
  "minimal-ui": false,
  "no-color": false,
  "no-progress": false,
  "output-name": "",
  "output-path": "~/.kics/reports/",
  "payload-path": "",
  "preview-lines": 5,
  "queries-path": "~/.kics/custom-queries",
  "report-formats": ["json", "html", "sarif"],
  "timeout": 300,
  "type": [],
  "verbose": false
}
EOF

    # Set environment variables
    cat >> ~/.bashrc << 'EOF'
# KICS environment variables
export KICS_CONFIG=~/.kics/kics.config
export KICS_QUERIES_PATH=~/.kics/queries
export KICS_CUSTOM_QUERIES_PATH=~/.kics/custom-queries
export KICS_REPORTS_PATH=~/.kics/reports
EOF

    source ~/.bashrc

    echo "KICS environment setup complete"
}

# Advanced scanning function
advanced_kics_scan() {
    local scan_dir="${1:-.}"
    local output_dir="${2:-./kics-results}"
    local config_file="${3:-~/.kics/kics.config}"

    echo "Starting advanced KICS scan..."
    echo "Scan directory: $scan_dir"
    echo "Output directory: $output_dir"

    # Create output directory
    mkdir -p "$output_dir"

    # Detect IaC types in directory
    iac_types=()

    if find "$scan_dir" -name "*.tf" -type f | head -1 | grep -q .; then
        iac_types+=("Terraform")
    fi

    if find "$scan_dir" -name "*.yaml" -o -name "*.yml" | xargs grep -l "apiVersion\|kind" 2>/dev/null | head -1 | grep -q .; then
        iac_types+=("Kubernetes")
    fi

    if find "$scan_dir" -name "Dockerfile*" -type f | head -1 | grep -q .; then
        iac_types+=("Dockerfile")
    fi

    if find "$scan_dir" -name "docker-compose*.yml" -o -name "docker-compose*.yaml" | head -1 | grep -q .; then
        iac_types+=("DockerCompose")
    fi

    if find "$scan_dir" -name "*.json" | xargs grep -l "AWSTemplateFormatVersion\|Resources" 2>/dev/null | head -1 | grep -q .; then
        iac_types+=("CloudFormation")
    fi

    if find "$scan_dir" -name "*.yml" -o -name "*.yaml" | xargs grep -l "tasks:\|hosts:" 2>/dev/null | head -1 | grep -q .; then
        iac_types+=("Ansible")
    fi

    echo "Detected IaC types: ${iac_types[*]}"

    # Scan each IaC type
    for iac_type in "${iac_types[@]}"; do
        echo "Scanning $iac_type files..."

        kics scan \
            -p "$scan_dir" \
            -t "$iac_type" \
            --config "$config_file" \
            --report-formats json,html,sarif \
            -o "$output_dir/${iac_type}-results.json" \
            --output-name "${iac_type}-results" \
            --exclude-severities INFO
    done

    # Generate combined report
    python3 << EOF
import json
import glob
import os
from datetime import datetime

output_dir = "$output_dir"
results_files = glob.glob(f"{output_dir}/*-results.json")

combined_results = {
    "scan_info": {
        "timestamp": datetime.now().isoformat(),
        "scan_directory": "$scan_dir",
        "iac_types": ${iac_types[@]@Q},
        "total_files_scanned": 0,
        "total_queries": 0
    },
    "results_by_type": {},
    "summary": {
        "high_severity": 0,
        "medium_severity": 0,
        "low_severity": 0,
        "categories": {},
        "platforms": {}
    }
}

for results_file in results_files:
    iac_type = os.path.basename(results_file).replace("-results.json", "")

    try:
        with open(results_file, 'r') as f:
            data = json.load(f)

        queries = data.get("queries", [])
        combined_results["results_by_type"][iac_type] = {
            "queries": queries,
            "count": len(queries)
        }

        combined_results["scan_info"]["total_queries"] += len(queries)

        # Count by severity
        for query in queries:
            severity = query.get("severity", "").upper()
            if severity == "HIGH":
                combined_results["summary"]["high_severity"] += 1
            elif severity == "MEDIUM":
                combined_results["summary"]["medium_severity"] += 1
            elif severity == "LOW":
                combined_results["summary"]["low_severity"] += 1

            # Count by category
            category = query.get("category", "Unknown")
            if category in combined_results["summary"]["categories"]:
                combined_results["summary"]["categories"][category] += 1
            else:
                combined_results["summary"]["categories"][category] = 1

            # Count by platform
            platform = query.get("platform", "Unknown")
            if platform in combined_results["summary"]["platforms"]:
                combined_results["summary"]["platforms"][platform] += 1
            else:
                combined_results["summary"]["platforms"][platform] = 1

    except Exception as e:
        print(f"Error processing {results_file}: {e}")

# Save combined results
with open(f"{output_dir}/combined-results.json", 'w') as f:
    json.dump(combined_results, f, indent=2)

print(f"Combined results saved to {output_dir}/combined-results.json")
print(f"Total queries: {combined_results['scan_info']['total_queries']}")
print(f"High severity: {combined_results['summary']['high_severity']}")
print(f"Medium severity: {combined_results['summary']['medium_severity']}")
print(f"Low severity: {combined_results['summary']['low_severity']}")
EOF

    echo "Advanced scan completed. Results saved to: $output_dir"
}

# Automated remediation suggestions
generate_remediation_report() {
    local results_file="$1"
    local output_file="${2:-remediation-report.md}"

    echo "Generating remediation report..."

    python3 << EOF
import json
from datetime import datetime

# Load results
with open("$results_file", 'r') as f:
    data = json.load(f)

queries = data.get("queries", [])

# Group by category and severity
categories = {}
for query in queries:
    category = query.get("category", "Unknown")
    severity = query.get("severity", "Unknown")

    if category not in categories:
        categories[category] = {"HIGH": [], "MEDIUM": [], "LOW": []}

    categories[category][severity].append(query)

# Generate markdown report
report = f"""# KICS Remediation Report

**Generated:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}
**Total Issues:** {len(queries)}

## Executive Summary

"""

# Add summary statistics
high_count = sum(len(cats["HIGH"]) for cats in categories.values())
medium_count = sum(len(cats["MEDIUM"]) for cats in categories.values())
low_count = sum(len(cats["LOW"]) for cats in categories.values())

report += f"""
| Severity | Count | Percentage |
|----------|-------|------------|
| High     | {high_count} | {high_count/len(queries)*100:.1f}% |
| Medium   | {medium_count} | {medium_count/len(queries)*100:.1f}% |
| Low      | {low_count} | {low_count/len(queries)*100:.1f}% |

## Remediation Recommendations

"""

# Add remediation suggestions by category
remediation_suggestions = {
    "Access Control": {
        "description": "Identity and access management issues",
        "common_fixes": [
            "Implement principle of least privilege",
            "Use IAM roles instead of access keys",
            "Enable MFA for sensitive operations",
            "Regularly rotate credentials"
        ]
    },
    "Encryption": {
        "description": "Data encryption and protection issues",
        "common_fixes": [
            "Enable encryption at rest",
            "Use TLS for data in transit",
            "Implement proper key management",
            "Use strong encryption algorithms"
        ]
    },
    "Networking and Firewall": {
        "description": "Network security configuration issues",
        "common_fixes": [
            "Restrict inbound traffic to necessary ports",
            "Use security groups instead of 0.0.0.0/0",
            "Implement network segmentation",
            "Enable VPC flow logs"
        ]
    },
    "Resource Management": {
        "description": "Resource configuration and management issues",
        "common_fixes": [
            "Implement proper resource tagging",
            "Set resource limits and quotas",
            "Enable monitoring and logging",
            "Use managed services when possible"
        ]
    }
}

for category, queries_by_severity in categories.items():
    total_category_queries = sum(len(v) for v in queries_by_severity.values())

    report += f"""
### {category} ({total_category_queries} issues)

"""

    if category in remediation_suggestions:
        report += f"{remediation_suggestions[category]['description']}\\n\\n"
        report += "**Common Fixes:**\\n"
        for fix in remediation_suggestions[category]['common_fixes']:
            report += f"- {fix}\\n"
        report += "\\n"

    # Add specific issues
    for severity in ["HIGH", "MEDIUM", "LOW"]:
        if queries_by_severity[severity]:
            report += f"#### {severity} Severity ({len(queries_by_severity[severity])} issues)\\n\\n"

            for query in queries_by_severity[severity][:5]:  # Limit to first 5
                report += f"""
**Query:** {query.get('query_name', 'Unknown')}
**Description:** {query.get('description', 'No description')}
**File:** {query.get('file_name', 'Unknown')}:{query.get('line', 'Unknown')}
**Platform:** {query.get('platform', 'Unknown')}

"""

            if len(queries_by_severity[severity]) > 5:
                report += f"... and {len(queries_by_severity[severity]) - 5} more issues\\n\\n"

# Add next steps
report += """
## Next Steps

1. **Prioritize High Severity Issues:** Address all high severity issues first
2. **Review Medium Severity Issues:** Evaluate business impact and fix accordingly
3. **Implement Custom Queries:** Create organization-specific security policies
4. **Automate Scanning:** Integrate KICS into CI/CD pipelines
5. **Regular Reviews:** Schedule periodic security reviews and scans

## Resources

- [KICS Documentation](https://docs.kics.io/)
- [KICS Query Library](https://docs.kics.io/latest/queries/)
- [Infrastructure Security Best Practices](https://docs.kics.io/latest/getting-started/)
- [Custom Query Development](https://docs.kics.io/latest/creating-queries/)
"""

# Save report
with open("$output_file", 'w') as f:
    f.write(report)

print(f"Remediation report generated: $output_file")
EOF
}

# Performance optimization
optimize_kics_performance() {
    echo "Optimizing KICS performance..."

    # Create performance configuration
    cat > ~/.kics/performance-config.json << 'EOF'
{
  "exclude-paths": [
    ".git/",
    "node_modules/",
    "vendor/",
    ".terraform/",
    "target/",
    "build/",
    "dist/",
    "*.min.js",
    "*.min.css",
    "*.map"
  ],
  "exclude-severities": ["INFO"],
  "timeout": 600,
  "minimal-ui": true,
  "no-progress": false,
  "log-level": "WARN",
  "preview-lines": 1
}
EOF

    # Create parallel scanning script
    cat > ~/.kics/parallel-scan.sh << 'EOF'
#!/bin/bash
# Parallel KICS scanning for large codebases

parallel_scan() {
    local base_dir="$1"
    local output_dir="$2"
    local max_parallel="${3:-4}"

    echo "Starting parallel KICS scan..."
    echo "Base directory: $base_dir"
    echo "Output directory: $output_dir"
    echo "Max parallel jobs: $max_parallel"

    mkdir -p "$output_dir"

    # Find all directories with IaC files
    find "$base_dir" -type f \( -name "*.tf" -o -name "*.yaml" -o -name "*.yml" -o -name "Dockerfile*" \) \
        -exec dirname {} \; | sort -u > /tmp/iac_dirs.txt

    # Split directories into chunks
    split -l 10 /tmp/iac_dirs.txt /tmp/chunk_

    # Process chunks in parallel
    for chunk in /tmp/chunk_*; do
        {
            chunk_name=$(basename "$chunk")
            echo "Processing chunk: $chunk_name"

            while IFS= read -r dir; do
                if [ -d "$dir" ]; then
                    safe_name=$(echo "$dir" | sed 's/[^a-zA-Z0-9]/_/g')
                    kics scan \
                        -p "$dir" \
                        --config ~/.kics/performance-config.json \
                        --report-formats json \
                        -o "$output_dir/${safe_name}-results.json" \
                        --output-name "${safe_name}-results" \
                        2>/dev/null
                fi
            done < "$chunk"

            echo "Completed chunk: $chunk_name"
        } &

        # Limit parallel jobs
        while [ $(jobs -r | wc -l) -ge "$max_parallel" ]; do
            sleep 1
        done
    done

    # Wait for all jobs to complete
    wait

    echo "Parallel scanning completed"

    # Clean up
    rm -f /tmp/iac_dirs.txt /tmp/chunk_*
}

# Usage: parallel_scan /path/to/codebase ./results 4
EOF

    chmod +x ~/.kics/parallel-scan.sh

    echo "Performance optimizations applied"
}

# Run setup
setup_kics_environment

# Example usage
# advanced_kics_scan ./infrastructure ./scan-results
# generate_remediation_report ./scan-results/combined-results.json
# optimize_kics_performance

Performance Optimization and Troubleshooting

Performance Tuning

#!/bin/bash
# KICS performance optimization and troubleshooting

optimize_kics_performance() {
    echo "Optimizing KICS performance..."

    # 1. System-level optimizations
    echo "Applying system optimizations..."

    # Increase file descriptor limits
    ulimit -n 65536
    echo "* soft nofile 65536" | sudo tee -a /etc/security/limits.conf
    echo "* hard nofile 65536" | sudo tee -a /etc/security/limits.conf

    # Optimize memory settings
    echo 'vm.max_map_count=262144' | sudo tee -a /etc/sysctl.conf
    sudo sysctl -p

    # 2. KICS-specific optimizations
    echo "Configuring KICS optimizations..."

    # Create optimized configuration
    cat > ~/.kics/optimized-config.json << 'EOF'
{
  "exclude-paths": [
    ".git/",
    "node_modules/",
    "vendor/",
    ".terraform/",
    "target/",
    "build/",
    "dist/",
    "*.min.js",
    "*.min.css",
    "*.map",
    "*.lock",
    "*.log"
  ],
  "exclude-severities": ["INFO"],
  "timeout": 900,
  "minimal-ui": true,
  "no-progress": true,
  "log-level": "ERROR",
  "preview-lines": 0,
  "report-formats": ["json"]
}
EOF

    # 3. Create performance monitoring script
    cat > ~/.kics/monitor-performance.sh << 'EOF'
#!/bin/bash
# Monitor KICS performance

monitor_scan() {
    local scan_dir="$1"
    local output_file="kics-performance-$(date +%s).log"

    echo "Monitoring KICS performance for: $scan_dir"

    # Start monitoring
    {
        echo "Timestamp,CPU%,Memory(MB),Files_Processed"
        start_time=$(date +%s)

        while pgrep -f "kics" > /dev/null; do
            local pid=$(pgrep -f "kics")
            local cpu=$(ps -p $pid -o %cpu --no-headers 2>/dev/null || echo "0")
            local mem=$(ps -p $pid -o rss --no-headers 2>/dev/null | awk '{print $1/1024}' || echo "0")
            local timestamp=$(date +%s)

            echo "$timestamp,$cpu,$mem,N/A"
            sleep 2
        done
    } > "$output_file" &

    local monitor_pid=$!

    # Run KICS with timing
    echo "Starting KICS scan..."
    time kics scan -p "$scan_dir" --config ~/.kics/optimized-config.json

    # Stop monitoring
    kill $monitor_pid 2>/dev/null

    echo "Performance monitoring completed: $output_file"
}

# Usage: monitor_scan /path/to/iac/files
EOF

    chmod +x ~/.kics/monitor-performance.sh

    echo "Performance optimizations applied"
}

# Benchmark different configurations
benchmark_kics() {
    local test_dir="$1"

    echo "Benchmarking KICS configurations..."

    # Test different timeout settings
    timeouts=(60 300 600)

    for timeout in "${timeouts[@]}"; do
        echo "Testing with timeout: ${timeout}s"

        # Create test config
        cat > /tmp/test-config.json << EOF
{
  "timeout": $timeout,
  "exclude-severities": ["INFO"],
  "minimal-ui": true,
  "no-progress": true
}
EOF

        start_time=$(date +%s)
        kics scan -p "$test_dir" --config /tmp/test-config.json > /dev/null 2>&1
        end_time=$(date +%s)

        duration=$((end_time - start_time))
        echo "Timeout ${timeout}s: ${duration}s actual"
    done

    # Test different exclusion patterns
    echo "Testing different exclusion patterns..."

    # Minimal exclusions
    cat > /tmp/minimal-config.json << 'EOF'
{
  "exclude-paths": [".git/"],
  "exclude-severities": [],
  "timeout": 300
}
EOF

    start_time=$(date +%s)
    kics scan -p "$test_dir" --config /tmp/minimal-config.json > /dev/null 2>&1
    end_time=$(date +%s)
    duration=$((end_time - start_time))
    echo "Minimal exclusions: ${duration}s"

    # Comprehensive exclusions
    cat > /tmp/comprehensive-config.json << 'EOF'
{
  "exclude-paths": [
    ".git/", "node_modules/", "vendor/", ".terraform/",
    "target/", "build/", "dist/", "*.min.js", "*.min.css"
  ],
  "exclude-severities": ["INFO", "LOW"],
  "timeout": 300
}
EOF

    start_time=$(date +%s)
    kics scan -p "$test_dir" --config /tmp/comprehensive-config.json > /dev/null 2>&1
    end_time=$(date +%s)
    duration=$((end_time - start_time))
    echo "Comprehensive exclusions: ${duration}s"
}

# Memory optimization for large scans
optimize_memory_usage() {
    echo "Optimizing memory usage for large scans..."

    # Directory chunking for memory optimization
    chunk_large_directory() {
        local input_dir="$1"
        local chunk_size="${2:-50}"
        local output_dir="${3:-./scan_chunks}"

        mkdir -p "$output_dir"

        # Find all IaC files
        find "$input_dir" -type f \( -name "*.tf" -o -name "*.yaml" -o -name "*.yml" -o -name "*.json" \) > /tmp/iac_files.txt

        # Split into chunks
        split -l "$chunk_size" /tmp/iac_files.txt "$output_dir/chunk_"

        echo "Directory split into chunks in: $output_dir"
    }

    # Process chunks with memory monitoring
    process_chunks_with_monitoring() {
        local chunk_dir="$1"
        local output_file="$2"
        local memory_limit="${3:-2048}"  # MB

        echo "Processing chunks with memory monitoring..."

        for chunk in "$chunk_dir"/chunk_*; do
            echo "Processing chunk: $(basename "$chunk")"

            # Check available memory
            available_memory=$(free -m | awk 'NR==2{printf "%.0f", $7}')
            if [ "$available_memory" -lt "$memory_limit" ]; then
                echo "Warning: Low memory ($available_memory MB available, $memory_limit MB required)"
                echo "Waiting for memory to free up..."
                sleep 10
                continue
            fi

            # Create temporary directory for chunk
            temp_dir="/tmp/kics_chunk_$$"
            mkdir -p "$temp_dir"

            # Copy files from chunk to temp directory
            while IFS= read -r file; do
                if [ -f "$file" ]; then
                    cp "$file" "$temp_dir/"
                fi
            done < "$chunk"

            # Scan chunk with memory-optimized config
            kics scan \
                -p "$temp_dir" \
                --config ~/.kics/optimized-config.json \
                --report-formats json \
                -o "/tmp/chunk_result_$$.json" \
                --output-name "chunk_result_$$"

            # Append results
            if [ -f "/tmp/chunk_result_$$.json" ]; then
                cat "/tmp/chunk_result_$$.json" >> "$output_file"
                rm "/tmp/chunk_result_$$.json"
            fi

            # Clean up
            rm -rf "$temp_dir"

            # Small delay to prevent memory buildup
            sleep 2
        done

        echo "All chunks processed with memory monitoring"
    }

    # Example usage
    # chunk_large_directory "./large_infrastructure" 30 "./chunks"
    # process_chunks_with_monitoring "./chunks" "chunked_results.json" 1024
}

# Run optimizations
optimize_kics_performance

Troubleshooting Common Issues

#!/bin/bash
# KICS troubleshooting guide

troubleshoot_kics() {
    echo "KICS Troubleshooting Guide"
    echo "========================="

    # Check if KICS is installed
    if ! command -v kics &> /dev/null; then
        echo "❌ KICS not found"
        echo "Solution: Install KICS using one of these methods:"
        echo "  # Binary installation"
        echo "  curl -sfL 'https://raw.githubusercontent.com/Checkmarx/kics/master/install.sh' | sh"
        echo "  sudo mv ./bin/kics /usr/local/bin/"
        echo ""
        echo "  # Package manager"
        echo "  brew install kics  # macOS/Linux"
        echo "  choco install kics # Windows"
        return 1
    fi

    echo "✅ KICS found: $(kics version 2>/dev/null || echo 'Version unknown')"

    # Check configuration
    if [ ! -f ~/.kics/kics.config ]; then
        echo "⚠️  Configuration file not found"
        echo "Solution: Create default configuration"
        echo "  mkdir -p ~/.kics"
        echo "  # Create configuration file as shown in setup section"
    else
        echo "✅ Configuration file exists"
    fi

    # Check queries
    queries_count=$(find ~/.kics -name "*.rego" 2>/dev/null | wc -l)
    if [ "$queries_count" -eq 0 ]; then
        echo "⚠️  No queries found"
        echo "Solution: KICS includes built-in queries, but you can add custom ones"
        echo "  mkdir -p ~/.kics/custom-queries"
    else
        echo "✅ Found $queries_count query files"
    fi

    # Check system resources
    available_memory=$(free -m | awk 'NR==2{printf "%.1f", $7/1024}')
    if (( $(echo "$available_memory < 1.0" | bc -l) )); then
        echo "⚠️  Low available memory: ${available_memory}GB"
        echo "Solution: Free up memory or use chunked scanning"
    else
        echo "✅ Available memory: ${available_memory}GB"
    fi

    # Check file descriptor limits
    fd_limit=$(ulimit -n)
    if [ "$fd_limit" -lt 1024 ]; then
        echo "⚠️  Low file descriptor limit: $fd_limit"
        echo "Solution: Increase file descriptor limit"
        echo "  ulimit -n 65536"
    else
        echo "✅ File descriptor limit: $fd_limit"
    fi

    # Test basic functionality
    echo "Testing basic functionality..."

    # Create test Terraform file
    cat > /tmp/test.tf << 'EOF'
resource "aws_s3_bucket" "test" {
  bucket = "test-bucket"
}
EOF

    # Test scan
    if kics scan -p /tmp/test.tf > /dev/null 2>&1; then
        echo "✅ Basic scan functionality working"
    else
        echo "❌ Basic scan functionality failed"
        echo "Solution: Check KICS installation and configuration"
    fi

    # Clean up
    rm -f /tmp/test.tf

    echo "Troubleshooting completed"
}

# Common error solutions
fix_common_kics_errors() {
    echo "Common KICS Errors and Solutions"
    echo "==============================="

    cat << 'EOF'
1. "kics: command not found"
   Solution: 
   - Download and install KICS binary
   - Add installation directory to PATH
   - Verify installation with: kics version

2. "failed to load queries"
   Solution:
   - Check queries directory exists
   - Verify query file syntax with OPA
   - Use built-in queries if custom queries fail

3. "timeout exceeded" during scan
   Solution:
   - Increase timeout in config: "timeout": 900
   - Split large directories into smaller chunks
   - Use performance-optimized configuration

4. "out of memory" errors
   Solution:
   - Increase system memory
   - Use chunked scanning for large directories
   - Exclude unnecessary file types and directories
   - Use minimal UI and reduce preview lines

5. "invalid configuration file" error
   Solution:
   - Validate JSON syntax in configuration file
   - Check file permissions
   - Use default configuration as template

6. "no results found" (false negatives)
   Solution:
   - Check file type detection
   - Verify include/exclude filters
   - Use --verbose for detailed output
   - Check severity filters

7. "query execution failed"
   Solution:
   - Validate custom query syntax with OPA
   - Check query metadata format
   - Update to latest KICS version
   - Review query logic and dependencies

8. "permission denied" errors
   Solution:
   - Check file and directory permissions
   - Run with appropriate user privileges
   - Verify write permissions for output directory

9. Slow scanning performance
   Solution:
   - Use optimized configuration
   - Exclude unnecessary paths and file types
   - Reduce log verbosity
   - Use parallel scanning for multiple directories

10. "unsupported file type" warnings
    Solution:
    - Check supported IaC types in documentation
    - Use correct file extensions
    - Specify IaC type explicitly with -t flag
    - Update KICS to latest version for new format support
EOF
}

# Performance diagnostics
diagnose_performance_issues() {
    echo "Diagnosing KICS Performance Issues"
    echo "================================="

    # Check system load
    load_avg=$(uptime | awk -F'load average:' '{print $2}' | awk '{print $1}' | sed 's/,//')
    echo "System load average: $load_avg"

    # Check available CPU cores
    cpu_cores=$(nproc)
    echo "Available CPU cores: $cpu_cores"

    # Check memory usage
    memory_info=$(free -h | grep "Mem:")
    echo "Memory info: $memory_info"

    # Check disk I/O
    if command -v iostat &> /dev/null; then
        echo "Disk I/O statistics:"
        iostat -x 1 1 | tail -n +4
    fi

    # Check file system performance
    echo "Testing file system performance..."
    test_dir="/tmp/kics_perf_test"
    mkdir -p "$test_dir"

    # Create test files
    for i in {1..100}; do
        echo "resource \"aws_s3_bucket\" \"test_$i\" { bucket = \"test-bucket-$i\" }" > "$test_dir/test_$i.tf"
    done

    # Time file operations
    start_time=$(date +%s.%N)
    find "$test_dir" -name "*.tf" | wc -l > /dev/null
    end_time=$(date +%s.%N)

    file_op_time=$(echo "$end_time - $start_time" | bc)
    echo "File operation time: ${file_op_time}s"

    # Clean up
    rm -rf "$test_dir"

    # Recommendations based on findings
    echo ""
    echo "Performance Recommendations:"
    echo "- Optimal memory: >= 4GB for large scans"
    echo "- Use SSD storage for better I/O performance"
    echo "- Consider chunked scanning for very large codebases"
    echo "- Exclude unnecessary file types and directories"
    echo "- Use minimal UI and reduce log verbosity"
}

# Main troubleshooting function
main() {
    troubleshoot_kics
    echo ""
    fix_common_kics_errors
    echo ""
    diagnose_performance_issues
}

# Run troubleshooting
main

Resources and Documentation

Official Resources

Community Resources

Integration Examples