콘텐츠로 이동

hashcat-utils

git clone https://github.com/hashcat/hashcat-utils.git
cd hashcat-utils/src
make
# Download from hashcat releases
wget https://github.com/hashcat/hashcat-utils/releases/download/v1.9.1/hashcat-utils-1.9.1.7z
7z x hashcat-utils-1.9.1.7z
cd hashcat-utils-1.9.1
# Arch Linux
pacman -S hashcat-utils

# Ubuntu/Debian (if in repos)
apt-get install hashcat-utils
UtilityPurposeCommon Use
combinator.binCombine two wordlistsMerge password lists
cap2hccapx.binConvert wireless capturesHCCAPX format conversion
hcstat2gen.binGenerate Markov statisticsCreate .hcstat2 files
keyspace.binCalculate attack keyspaceDetermine mask attack size
len.binFilter wordlist by lengthExtract N-character passwords
maskprocessorGenerate mask patternsCreate custom mask lists
rli.binRemove left intersectionEliminate duplicate prefixes
rpr.binRemove right progressivesTrim duplicate suffixes
splitlen.binSplit by password lengthOrganize by character count
permute.binPermute wordlist entriesGenerate variations
./combinator.bin wordlist1.txt wordlist2.txt > combined.txt
# Combine first two, then combine result with third
./combinator.bin wordlist1.txt wordlist2.txt | \
  ./combinator.bin - wordlist3.txt > final.txt
# Combine password list with common suffixes
./combinator.bin passwords.txt suffixes.txt > hybrid.txt

# Use with hashcat
hashcat -m 0 hashes.txt hybrid.txt
wc -l wordlist1.txt wordlist2.txt
# Multiply line counts to estimate output size

cap2hccapx.bin - Wireless Capture Conversion

섹션 제목: “cap2hccapx.bin - Wireless Capture Conversion”
./cap2hccapx.bin input.cap input.hccapx
for cap_file in *.cap; do
  ./cap2hccapx.bin "$cap_file" "${cap_file%.cap}.hccapx"
done
# Check file was created successfully
ls -lah input.hccapx

# Use with hashcat
hashcat -m 22000 input.hccapx wordlist.txt

hcstat2gen.bin - Generate Markov Statistics

섹션 제목: “hcstat2gen.bin - Generate Markov Statistics”
./hcstat2gen.bin wordlist.txt output.hcstat2
# With Markov statistics
hashcat -m 0 -a 1 hashes.txt wordlist.txt \
  --markov-hcstat2 output.hcstat2 -g 1000

# Specify Markov threshold
hashcat -m 0 -a 1 hashes.txt wordlist.txt \
  --markov-hcstat2 output.hcstat2 -t 40
# Combine multiple wordlists for stats
cat wordlist1.txt wordlist2.txt wordlist3.txt | \
  ./hcstat2gen.bin /dev/stdin combined.hcstat2
# Use password breaches from same vertical
cat linkedin_passwords.txt rockyou.txt > common.txt
./hcstat2gen.bin common.txt password.hcstat2
./keyspace.bin -a 3 "?u?l?l?l?d"
# Output: 52,892,000

# Estimate time needed
# 52,892,000 keyspace / GPU speed = time
./keyspace.bin wordlist.txt
# Count total passwords in list
# Estimate with rule application
wc -l wordlist.txt
# Multiply by average rule expansion
# Dictionary + mask
./keyspace.bin -a 6 wordlist.txt "?d?d?d"
# (wordlist count) * (mask keyspace)
./len.bin 8 < wordlist.txt > len8.txt
# Extract only 8-character passwords
# Get passwords 6-12 characters
for len in {6..12}; do
  ./len.bin $len < wordlist.txt >> range_6_12.txt
done

Filter for Specific Hash Type Requirements

섹션 제목: “Filter for Specific Hash Type Requirements”
# Windows NTLM minimum is usually 4 chars
./len.bin 4 < rockyou.txt > ntlm_valid.txt

# Oracle DB minimum 3 chars
./len.bin 3 < rockyou.txt > oracle_valid.txt
# Get 8-12 char passwords, sort by length
for len in {8..12}; do
  ./len.bin $len < rockyou.txt
done | sort -u > eightto12.txt
./mp64.bin "?u?l?l?l"
# Output: Aaaa, Aaab, Aaac, ... Zzzz
PlaceholderRepresentsExample
?lLowercasea-z
?uUppercaseA-Z
?dDigits0-9
?sSpecial chars!@#$%^&*
?aAll (l+u+d+s)Mixed chars
?bByte 0-255Binary range
CustomUser defined?1?1?1
# Uppercase + lowercase + digit + digit
./mp64.bin "?u?l?d?d"

# Common pattern: word + year
./mp64.bin "?u?l?l?l201?d"
# Generate first 1 million patterns only
./mp64.bin "?u?l?l?l?l?l?l?l" -l 1000000

# Start from position 5000000
./mp64.bin "?u?l?l?l?l?l?l?l" -l 1000000 -s 5000000
# Define custom set: numbers and common symbols
./mp64.bin -1 "0123456789!@#" "?1?1?1?1"

# Multiple custom sets
./mp64.bin -1 "aeiou" -2 "bcdfg" "?1?1?2?2"
./mp64.bin "?u?l?l?l?d?d" > patterns.txt

# Use with hashcat
hashcat -m 0 hashes.txt patterns.txt
./rli.bin wordlist1.txt wordlist2.txt > unique_left.txt
# Removes entries from wordlist1 that share left substring with wordlist2
# Remove common passwords from targeted list
./rli.bin targeted_passwords.txt rockyou.txt > novel.txt
./splitlen.bin wordlist.txt
# Creates: wordlist.txt.len_2, .len_3, .len_4, etc.
./splitlen.bin wordlist.txt
ls -lah wordlist.txt.len_* | awk '{print $9, $5}'
# Get only 8-character passwords
./splitlen.bin wordlist.txt
cat wordlist.txt.len_8
# Generate Markov stats
./hcstat2gen.bin rockyou.txt rockyou.hcstat2

# Combine wordlists
./combinator.bin rockyou.txt suffixes.txt > hybrid.txt

# Attack with rules
hashcat -m 0 -r rules/best64.rule hashes.txt hybrid.txt
# 1. Combine wordlists
./combinator.bin passwords.txt years.txt > hybrid.txt

# 2. Generate mask patterns
./mp64.bin "?d?d?d" > nums.txt

# 3. Count total keyspace
wc -l hybrid.txt nums.txt

# 4. Run hybrid attack
hashcat -m 0 -a 6 hashes.txt hybrid.txt "?d?d?d"
# 1. Generate stats from target domain passwords
./hcstat2gen.bin linkedin_breaches.txt linkedin.hcstat2

# 2. Run with generated stats
hashcat -m 0 -a 1 hashes.txt \
  wordlist.txt wordlist.txt \
  --markov-hcstat2 linkedin.hcstat2 \
  -t 50 -g 1000

# 3. Monitor progress and adjust threshold as needed
# 1. Calculate keyspace first
./keyspace.bin "?u?l?l?l?d?d?d?d"
# Output: 5,249,280,000 (estimate time)

# 2. Limit to practical range
hashcat -m 0 hashes.txt \
  -a 3 "?u?l?l?l?d?d?d?d" \
  -l 100000000 # Limit to 100M patterns per GPU

# 3. Split attack with incrementing limits
for offset in 0 100000000 200000000; do
  hashcat -m 0 hashes.txt -a 3 "?u?l?l?l?d?d?d?d" \
    -s $offset -l 100000000
done
# 1. Get only viable password lengths
./len.bin 8 < rockyou.txt > candidates_8.txt
./len.bin 9 < rockyou.txt > candidates_9.txt

# 2. Crack each length separately
hashcat -m 0 hashes.txt candidates_8.txt
hashcat -m 0 hashes.txt candidates_9.txt
# 1. Split wordlist by length to target specific patterns
./splitlen.bin rockyou.txt

# 2. Combine with domain-specific words
./combinator.bin company_names.txt rockyou.txt.len_8 > targeted.txt

# 3. Apply rules
hashcat -m 0 -r rules/best64.rule hashes.txt targeted.txt

# 4. Apply additional rules
hashcat -m 0 -r rules/d3ad0001.rule hashes.txt targeted.txt
# Use stdin instead of loading entire wordlist
cat wordlist.txt | ./combinator.bin - additions.txt | \
  hashcat -m 0 hashes.txt
# Process wordlist in chunks
split -l 1000000 rockyou.txt chunk_
for file in chunk_*; do
  ./len.bin 8 < "$file" >> len8_output.txt &
done
wait
# Keyspace / GPU speed (H/s) = time
# Example: 1B keyspace / 10B H/s = 100 seconds

./keyspace.bin "?u?l?l?l?d?d" # Get keyspace
# Divide by your GPU's hash rate
# Test rule speed on subset first
head -100000 rockyou.txt | \
  hashcat -m 0 -r rules/best64.rule hashes.txt

# Check performance metrics
# If slow, use -w 2 or -w 3 for slower rules
# Verify files are readable
file wordlist1.txt wordlist2.txt

# Check for binary data
file wordlist1.txt | grep -i text
# Process in streaming fashion
cat huge_wordlist.txt | \
  ./combinator.bin - small_list.txt > output.txt
# Use only valid .cap files from airodump-ng
file input.cap
# Should output: data

# Verify wireless frame headers
strings input.cap | grep -i "wpa\|wep"
# Check keyspace before generating
./keyspace.bin "?a?a?a?a?a?a?a?a?a" # Likely very large

# Reduce mask complexity
./keyspace.bin "?u?l?l?l?d?d"