Ir al contenido

CloudBrute

CloudBrute is a Python-based cloud infrastructure enumerator that discovers misconfigured or exposed cloud resources across major cloud providers. It automates the discovery of storage buckets, web applications, databases, and other cloud services from AWS, Microsoft Azure, Google Cloud Platform, and DigitalOcean. This tool is invaluable for security researchers conducting reconnaissance during bug bounty programs and penetration testing engagements.

# Python 3.6+ required
python3 --version

# Install system dependencies
sudo apt-get install python3-pip python3-dev

# Upgrade pip
pip3 install --upgrade pip
# Clone from GitHub
git clone https://github.com/0xsha/CloudBrute.git
cd CloudBrute

# Install dependencies
pip3 install -r requirements.txt

# Make executable
chmod +x cloudbrute.py

# Verify installation
python3 cloudbrute.py --help
# Install directly from pip
pip3 install cloudbrute

# Verify installation
cloudbrute --help
CommandDescription
python3 cloudbrute.py -k companyBasic enumeration with keyword
python3 cloudbrute.py -k company -p aws,azure,gcpEnumerate specific providers
python3 cloudbrute.py -k company -o results.txtSave results to file
python3 cloudbrute.py -k company -t 50Set thread count to 50
python3 cloudbrute.py -k company -w wordlist.txtUse custom wordlist
# Keyword enumeration
python3 cloudbrute.py -k targetcompany

# Specify output file
python3 cloudbrute.py -k target -o results.txt

# Set thread count (default: 10)
python3 cloudbrute.py -k target -t 50

# Custom wordlist
python3 cloudbrute.py -k target -w wordlist.txt

# Verbose output
python3 cloudbrute.py -k target -v

# Version check
python3 cloudbrute.py --version
# Enumerate S3 buckets
python3 cloudbrute.py -k company -p aws

# Common S3 patterns discovered:
# company-bucket
# company-data
# company-logs
# company-backups
# company-prod

# Verify S3 bucket access
aws s3 ls s3://discovered-bucket/
aws s3 ls s3://discovered-bucket/ --recursive
# Check public access
aws s3api get-bucket-acl --bucket company-bucket

# List bucket contents
aws s3 ls s3://company-bucket/ --recursive

# Check for open ACLs
aws s3api get-object-acl --bucket company-bucket --key object-name

# Download file from bucket
aws s3 cp s3://company-bucket/file.txt ./file.txt
# Enumerate CloudFront distributions
python3 cloudbrute.py -k company | grep cloudfront

# Check CloudFront access
curl -I https://dxxxxx.cloudfront.net

# Look for AppSync endpoints
python3 cloudbrute.py -k company | grep appsync
# Enumerate Azure blob storage accounts
python3 cloudbrute.py -k company -p azure

# Common Azure patterns:
# company.blob.core.windows.net
# companydata.blob.core.windows.net
# companybackup.blob.core.windows.net
# companylogs.blob.core.windows.net

# Test blob storage access
curl https://company.blob.core.windows.net/
curl -I https://company.blob.core.windows.net/
# Check if container is public
curl https://company.blob.core.windows.net/container/file.txt

# List containers (if public)
curl "https://company.blob.core.windows.net/?comp=list"

# Check for storage account keys in responses
strings response.html | grep -i "storagekey\|authorization"
# Enumerate Azure App Services
python3 cloudbrute.py -k company | grep "azurewebsites"

# Access App Service
curl -I https://company.azurewebsites.net

# Check for admin panels
curl -I https://company.scm.azurewebsites.net
# Enumerate Google Cloud Storage buckets
python3 cloudbrute.py -k company -p gcp

# Common GCS patterns:
# company
# company-storage
# company-backups
# company-data
# company-logs
# company-prod

# Test GCS bucket access
gsutil ls gs://company/
gsutil ls -L gs://company/
# Check public access
gsutil acl ch -d AllUsers gs://company/

# List bucket contents
gsutil -m ls -r gs://company/**

# Check for open permissions
gsutil acl get gs://company/

# Download files from bucket
gsutil -m cp gs://company/file.txt ./file.txt
# Enumerate Firebase instances
python3 cloudbrute.py -k company | grep firebase

# Test Firebase access
curl https://company.firebaseio.com/.json

# Look for Cloud Run services
python3 cloudbrute.py -k company | grep "run.app"

# Test Cloud Run endpoints
curl -I https://company-xxxxx-uc.a.run.app
# Enumerate DigitalOcean Spaces
python3 cloudbrute.py -k company -p digitalocean

# Common DigitalOcean patterns:
# company.nyc3.digitaloceanspaces.com
# company-backup.sfo3.digitaloceanspaces.com
# company-data.sgp1.digitaloceanspaces.com

# Test Spaces access
curl https://company.nyc3.digitaloceanspaces.com
# Enumerate DigitalOcean Apps
python3 cloudbrute.py -k company | grep "ondigitalocean"

# Check app access
curl -I https://company-app-xxxxx.ondigitalocean.app

# Test application endpoints
curl -I https://app.ondigitalocean.app
# AWS only
python3 cloudbrute.py -k target -p aws

# Azure only
python3 cloudbrute.py -k target -p azure

# GCP only
python3 cloudbrute.py -k target -p gcp

# DigitalOcean only
python3 cloudbrute.py -k target -p digitalocean

# All providers
python3 cloudbrute.py -k target -p aws,azure,gcp,digitalocean
# Create custom wordlist
cat > wordlist.txt << EOF
company
companyx
company-dev
company-prod
company-data
company-backup
company-logs
company-archive
EOF

# Use custom wordlist
python3 cloudbrute.py -k target -w wordlist.txt
# Generate from domain variations
domain="company"
for suffix in "" -dev -prod -data -backup -logs -archive -staging; do
  echo "$domain$suffix"
done > wordlist.txt

# Generate with underscore variations
for name in company testing data backup; do
  echo $name
  echo ${name}_dev
  echo ${name}_prod
  echo ${name}_backup
done > extended_wordlist.txt

# Combine lists
cat wordlist.txt extended_wordlist.txt | sort -u > final_wordlist.txt
# Enumerate multiple keywords
for keyword in company subsidiary branch division; do
  python3 cloudbrute.py -k "$keyword" -o "results_$keyword.txt"
  echo "Completed: $keyword"
done
Thread CountSpeedStealthUse Case
5-10SlowHighStealth/IDS evasion
20-50MediumMediumStandard enumeration
100+FastLowQuick scans
# High speed enumeration
python3 cloudbrute.py -k target -t 100

# Stealth enumeration
python3 cloudbrute.py -k target -t 5

# Balanced enumeration
python3 cloudbrute.py -k target -t 30
# Distributed scanning over time
for i in {1..5}; do
  python3 cloudbrute.py -k "keyword$i" -t 20
  sleep 300  # Wait 5 minutes
done
# Extract all found resources
grep "Found:" results.txt

# Filter by cloud provider
grep -i "amazon" results.txt
grep -i "azure" results.txt
grep -i "google" results.txt
grep -i "digitalocean" results.txt

# Count findings
grep "Found:" results.txt | wc -l
grep "Found:" results.txt | cut -d: -f2 | sort | uniq -c
# Organize results by type
mkdir -p findings
grep -i "s3\|bucket" results.txt > findings/s3.txt
grep -i "blob" results.txt > findings/azure_blob.txt
grep -i "gcs" results.txt > findings/gcs.txt

# Generate summary
echo "=== Enumeration Summary ===" > findings/summary.txt
echo "Total findings: $(wc -l < findings/s3.txt)" >> findings/summary.txt
echo "S3 buckets: $(grep -c 's3' findings/s3.txt)" >> findings/summary.txt
# Pipe to httpx for live checking
python3 cloudbrute.py -k target -o resources.txt
cat resources.txt | httpx -o live_resources.txt

# Integration with Nuclei
python3 cloudbrute.py -k target -o cloud_resources.txt
nuclei -l cloud_resources.txt -t cloud_misconfiguration

# Combine with curl
grep "azurewebsites\|cloudfront" results.txt | while read url; do
  curl -I "https://$url" 2>/dev/null
done
# Test for public access
python3 cloudbrute.py -k target -o resources.txt

# Validate each resource
while read resource; do
  echo "Testing: $resource"
  timeout 3 curl -I "$resource" 2>/dev/null | head -n 1
done < resources.txt
# Check S3 bucket permissions
aws s3api get-bucket-acl --bucket bucket-name 2>&1

# Check Azure blob permissions
curl "https://account.blob.core.windows.net/?comp=list" 2>&1

# Check GCS permissions
gsutil acl get gs://bucket-name 2>&1
IssueSolution
No resultsTry different keywords; check wordlist
Slow enumerationIncrease thread count with -t
Import errorsRun pip3 install -r requirements.txt
Connection timeoutsReduce thread count; check network
# Enable verbose output
python3 cloudbrute.py -k target -v

# Check Python installation
python3 --version
which python3

# Verify dependencies
pip3 list | grep -E "boto3|azure|google"

# Test cloud connectivity
ping s3.amazonaws.com
ping blob.core.windows.net
ping storage.googleapis.com
# Document findings
python3 cloudbrute.py -k target -o findings_$(date +%Y%m%d).txt

# Review before sharing
cat findings.txt | head -20

# Assess impact
grep -c "Found" findings.txt
# Ensure written authorization
# Verify scope aligns with bug bounty program
# Document all findings
# Follow responsible disclosure guidelines
# Official repository
# https://github.com/0xsha/CloudBrute

# Get help
python3 cloudbrute.py --help

# Check for updates
cd CloudBrute && git pull origin main