Skip to content

cloud_enum

cloud_enum is a multi-cloud OSINT enumeration tool designed to find exposed resources across major cloud providers including AWS, Azure, and Google Cloud Platform (GCP). It enumerates cloud storage buckets, static websites, databases, and other public resources that may have been misconfigured. This tool is essential for security researchers, penetration testers, and bug bounty hunters assessing cloud infrastructure exposure.

# Python 3.6+ required
python3 --version

# Required dependencies
pip3 install -r requirements.txt
# Clone the repository
git clone https://github.com/initstring/cloud_enum.git
cd cloud_enum

# Install dependencies
pip3 install -r requirements.txt

# Make executable
chmod +x cloud_enum.py

# Verify installation
python3 cloud_enum.py --help
# Using pip
pip3 install cloud-enum

# Or clone and install
git clone https://github.com/initstring/cloud_enum
cd cloud_enum
pip3 install .
CommandDescription
python3 cloud_enum.py -k companyEnumerate based on keyword “company”
python3 cloud_enum.py -k target -b 50Use 50 brute-force threads
python3 cloud_enum.py -k target --disable-awsEnumerate excluding AWS
python3 cloud_enum.py -k target -o results.txtSave results to file
python3 cloud_enum.py -k target -l /path/to/wordlist.txtUse custom wordlist
# Basic enumeration with single keyword
python3 cloud_enum.py -k companyname

# Enumeration with multiple keywords
for keyword in company subsidiary division product; do
  python3 cloud_enum.py -k $keyword
done

# Long-running enumeration
python3 cloud_enum.py -k target --threads 100 2>&1 | tee enumeration.log
# Increase threads for faster enumeration (default: 5)
python3 cloud_enum.py -k target -b 50

# Reduce threads for stealth
python3 cloud_enum.py -k target -b 5

# Custom thread configuration
python3 cloud_enum.py -k target --threads 25
# Enumerate AWS S3 buckets
python3 cloud_enum.py -k company --enable-aws

# Check specific S3 bucket patterns
python3 cloud_enum.py -k target
# Results may include: target-bucket, target-data, target-backups

# Verify S3 bucket access
aws s3 ls s3://discovered-bucket/
aws s3api head-bucket --bucket discovered-bucket
# Enumerate EC2 instances
python3 cloud_enum.py -k target | grep "ec2"

# Look for CloudFront distributions
python3 cloud_enum.py -k target | grep "cloudfront"

# Check for RDS instances
python3 cloud_enum.py -k target | grep "rds"
# Enumerate Azure blob storage accounts
python3 cloud_enum.py -k company --enable-azure

# Common Azure patterns found:
# company.blob.core.windows.net
# companydata-blob.core.windows.net
# company-backup.blob.core.windows.net

# Test Azure blob access
curl https://company.blob.core.windows.net/
# Check for additional Azure resources
python3 cloud_enum.py -k target | grep "azure"

# Look for Azure App Services
python3 cloud_enum.py -k target | grep "azurewebsites"

# Check for Azure Storage Tables
python3 cloud_enum.py -k target | grep "table.core.windows.net"
# Enumerate Google Cloud Storage buckets
python3 cloud_enum.py -k company --enable-gcp

# Common GCS patterns:
# company-storage
# company-backups
# company-data
# company-logs

# Test GCS bucket access
gsutil ls gs://discovered-bucket/
gsutil ls -L gs://discovered-bucket/
# Enumerate Firebase instances
python3 cloud_enum.py -k target | grep "firebase"

# Look for Cloud Functions
python3 cloud_enum.py -k target | grep "cloudfunctions"

# Check for Cloud Run services
python3 cloud_enum.py -k target | grep "run.app"
# Specify custom wordlist
python3 cloud_enum.py -k target -l custom_words.txt

# Create wordlist from company info
echo "company" > wordlist.txt
echo "companyx" >> wordlist.txt
echo "company-dev" >> wordlist.txt
echo "company-prod" >> wordlist.txt

# Use extended wordlist
python3 cloud_enum.py -k target -l wordlist.txt
# Generate from domain registrations
cat domains.txt | sed 's/\.com//' > base_names.txt

# Add common suffixes
for name in $(cat base_names.txt); do
  echo "$name"
  echo "$name-dev"
  echo "$name-prod"
  echo "$name-data"
  echo "$name-backup"
done > expanded_list.txt

# Use expanded list
python3 cloud_enum.py -k target -l expanded_list.txt
# Basic output to file
python3 cloud_enum.py -k target -o results.txt

# Verbose output with logging
python3 cloud_enum.py -k target -v -o detailed_results.txt 2>&1

# Append to existing results
python3 cloud_enum.py -k newkeyword >> results.txt 2>&1
# Extract found resources
grep "Found" results.txt

# Filter by cloud provider
grep "amazon" results.txt
grep "microsoft" results.txt
grep "google" results.txt

# Count findings by type
grep -o "S3\|Blob\|GCS" results.txt | sort | uniq -c
OptionDescription
--enable-awsEnable AWS enumeration
--enable-azureEnable Azure enumeration
--enable-gcpEnable Google Cloud enumeration
--disable-awsDisable AWS enumeration
--disable-azureDisable Azure enumeration
--disable-gcpDisable GCP enumeration
# Only AWS enumeration
python3 cloud_enum.py -k target --enable-aws --disable-azure --disable-gcp

# Only Azure enumeration
python3 cloud_enum.py -k target --enable-azure --disable-aws --disable-gcp

# Only GCP enumeration
python3 cloud_enum.py -k target --enable-gcp --disable-aws --disable-azure
# Verify discovered resources via DNS
while read resource; do
  nslookup "$resource" 2>/dev/null | grep -i "address"
done < results.txt
# Test HTTP access to discovered resources
grep "cloudfront\|azurewebsites" results.txt | while read url; do
  curl -I "https://$url" 2>/dev/null | head -n 1
done
# Combine with httpx for live checking
python3 cloud_enum.py -k target -o cloud_resources.txt
cat cloud_resources.txt | httpx -o live_cloud_resources.txt

# Combine with nuclei for vulnerability scanning
python3 cloud_enum.py -k target -o resources.txt
nuclei -l resources.txt -t cloud_misconfiguration
# Batch enumeration with multiple keywords
cat keywords.txt | while read keyword; do
  python3 cloud_enum.py -k "$keyword" -o "results_$keyword.txt"
  echo "Completed: $keyword"
done

# Merge all results
cat results_*.txt > combined_results.txt
# Low-frequency enumeration
python3 cloud_enum.py -k target -b 2

# Distributed across time
for i in {1..10}; do
  python3 cloud_enum.py -k "target$i" -b 5
  sleep 300  # Wait 5 minutes between scans
done
# Schedule regular enumeration
0 0 * * * cd /path/to/cloud_enum && python3 cloud_enum.py -k company -o results_$(date +\%Y\%m\%d).txt

# Compare daily results
diff results_20260430.txt results_20260501.txt
# Test S3 bucket access
aws s3 ls s3://bucket-name/
aws s3api get-bucket-acl --bucket bucket-name

# Test Azure blob access
curl -I "https://account.blob.core.windows.net/container"

# Test GCS bucket access
gsutil ls gs://bucket-name/
gsutil acl ch -u AllUsers:R gs://bucket-name/
# Check for false positives
for resource in $(cat results.txt | grep "Found"); do
  echo "Testing: $resource"
  curl -I "$resource" 2>/dev/null | head -n 1
done
IssueSolution
No results foundVerify keyword is relevant; try variations
Slow enumerationIncrease threads with -b flag
Connection errorsCheck internet connection; verify firewall
Rate limitedReduce thread count; add delays
# Enable verbose output
python3 cloud_enum.py -k target -v

# Check Python version
python3 --version

# Verify dependencies
pip3 list | grep -E "boto3|azure|google"

# Test cloud connectivity
ping s3.amazonaws.com
ping blob.core.windows.net
ping storage.googleapis.com
# Document findings carefully
python3 cloud_enum.py -k target -o findings.txt
# Review before sharing
cat findings.txt

# Calculate impact assessment
grep -c "Found" findings.txt
# Ensure authorization before enumeration
# Document scope and approval
# Follow bug bounty program guidelines
# Report findings responsibly
# Official documentation
# https://github.com/initstring/cloud_enum

# View help menu
python3 cloud_enum.py --help

# Check for updates
cd cloud_enum && git pull origin main