تخطَّ إلى المحتوى

PRINCE Processor

PRINCE (Probability-Infinite Chained Elements) Processor is an advanced password generation algorithm that creates intelligent password candidates by chaining dictionary words based on their statistical frequency and occurrence patterns. Unlike traditional rule-based password generation, PRINCE learns from input wordlists to generate probable password combinations, significantly improving crack success rates while reducing computation time. It’s particularly effective when integrated with hashcat for password hash cracking.

# Clone PRINCE repository
git clone https://github.com/hashcat/princeprocessor.git
cd princeprocessor

# Build from source
make clean
make

# Verify installation
./pp64 --version
# Install dependencies
brew install gcc

# Build PRINCE
git clone https://github.com/hashcat/princeprocessor.git
cd princeprocessor
make -f Makefile clean
make -f Makefile

# Verify
./pp64 --version
# Download precompiled binary
# From https://github.com/hashcat/princeprocessor/releases
# Or build with Visual Studio Community Edition

# Verify installation
.\pp64.exe --version
# Build PRINCE in Docker
docker run -it --rm hashcat/hashcat:latest bash
apt-get install -y princeprocessor
CommandDescription
./pp64 < wordlist.txtGenerate candidates from wordlist
./pp64 --helpDisplay help information
./pp64 --versionShow version information
./pp64 < words.txt > candidates.txtSave output to file
# Remove duplicates
sort wordlist.txt | uniq > wordlist_clean.txt

# Convert to lowercase
tr '[:upper:]' '[:lower:]' < wordlist.txt > wordlist_lower.txt

# Remove empty lines
grep -v "^$" wordlist.txt > wordlist_cleaned.txt
# Create frequency-sorted wordlist
sort wordlist.txt | uniq -c | sort -rn | awk '{print $2}' > wordlist_sorted.txt

# Filter by word length
awk 'length > 3 && length < 20' wordlist.txt > wordlist_filtered.txt

# Remove special characters
tr -cd '[:alnum:]\n' < wordlist.txt > wordlist_alphanumeric.txt
# Merge multiple wordlists
cat wordlist1.txt wordlist2.txt wordlist3.txt > combined.txt

# Merge and remove duplicates
cat *.txt | sort | uniq > merged_wordlist.txt

# Create hybrid wordlist
cat rockyou.txt custom_words.txt sorted | uniq > hybrid_wordlist.txt
# Generate candidates from wordlist
./pp64 < wordlist.txt | head -100

# Count total candidates generated
./pp64 < wordlist.txt | wc -l

# Save all candidates
./pp64 < wordlist.txt > candidates.txt
# Generate first 10,000 candidates
./pp64 < wordlist.txt | head -10000

# Generate candidates between line 1000 and 2000
./pp64 < wordlist.txt | sed -n '1000,2000p'

# Sample random candidates
./pp64 < wordlist.txt | shuf | head -5000
FlagDescription
--helpShow help menu
--versionDisplay version
-oOutput file (some versions)
--max-lenMaximum password length
--min-lenMinimum password length
# Generate candidates with length constraints
./pp64 --elem-cnt-min 2 --elem-cnt-max 4 < wordlist.txt

# Minimum password length
./pp64 --pw-min 8 < wordlist.txt

# Maximum password length
./pp64 --pw-max 16 < wordlist.txt

# Specific length range
./pp64 --pw-min 8 --pw-max 12 < wordlist.txt
# Generate 2-element chains only
./pp64 --elem-cnt-min 2 --elem-cnt-max 2 < wordlist.txt

# Generate 3-element chains
./pp64 --elem-cnt-min 3 --elem-cnt-max 3 < wordlist.txt

# Variable element chains (2-5 elements)
./pp64 --elem-cnt-min 2 --elem-cnt-max 5 < wordlist.txt
# Generate with separator (space)
./pp64 --separator ' ' < wordlist.txt

# Generate with separator (dash)
./pp64 --separator '-' < wordlist.txt

# Generate with no separator (concatenation)
./pp64 < wordlist.txt
FlagDescription
--elem-cnt-minMinimum elements per candidate
--elem-cnt-maxMaximum elements per candidate
--pw-minMinimum password length
--pw-maxMaximum password length
--separatorCharacter between elements
# Pipe PRINCE output to hashcat
./pp64 < wordlist.txt | hashcat -m 1000 -a 0 hashes.txt

# Generate and crack in one command
./pp64 < wordlist.txt | hashcat -m 0 -a 0 hashes.txt --potfile-disable
# Generate candidates with constraints
./pp64 --pw-min 8 --pw-max 16 < wordlist.txt > candidates.txt

# Crack hashes with candidates
hashcat -m 1000 -a 0 hashes.txt candidates.txt -o cracked.txt

# Show cracked passwords
cat cracked.txt
# Combine wordlists, generate candidates, crack
cat dict1.txt dict2.txt dict3.txt | sort | uniq | ./pp64 | \
  hashcat -m 1000 -a 0 hashes.txt --potfile-disable
# Generate candidates and apply hashcat rules
./pp64 < wordlist.txt | hashcat -m 1000 -a 0 hashes.txt \
  -r /usr/share/hashcat/rules/dive.rule \
  -r /usr/share/hashcat/rules/d3ad0ne.rule
# Split large wordlist for parallel processing
split -l 50000 wordlist.txt wordlist_

# Generate from each part
for file in wordlist_*; do
  ./pp64 < $file >> candidates_all.txt
done
# Stream candidates directly to hashcat (memory efficient)
./pp64 < wordlist.txt | hashcat -m 1000 -a 0 hashes.txt

# Pipe with progress monitoring
./pp64 < wordlist.txt | pv | hashcat -m 1000 -a 0 hashes.txt
# Generate candidates in parallel with GNU Parallel
cat wordlist.txt | parallel --pipe --block 10M "./pp64" > candidates.txt

# Multiple PRINCE instances
./pp64 < wordlist.txt | tee >(hashcat -m 1000 -a 0 hashes1.txt) \
  >(hashcat -m 1000 -a 0 hashes2.txt) > /dev/null
# Common words in corporate environment
echo -e "company\npassword\nsummer\nwinter\nspring\nfall\n2024\n2025" > corp_words.txt

# Generate candidates
./pp64 < corp_words.txt | sort | uniq > corp_candidates.txt

# Crack corporate hashes
hashcat -m 1000 -a 0 corporate_hashes.txt corp_candidates.txt
# Download rockyou wordlist
wget https://github.com/praetorian-inc/Hob0Rules/raw/master/wordlists/rockyou.txt.gz
gunzip rockyou.txt.gz

# Generate PRINCE candidates
./pp64 < rockyou.txt | head -1000000 > rockyou_prince.txt

# Use with hashcat
hashcat -m 1000 -a 0 hashes.txt rockyou_prince.txt -w 4
# Create custom wordlist from company information
echo -e "acmecorp\nacme\ncorp2024\nemployee\npassword123" > custom.txt

# Generate intelligent candidates
./pp64 < custom.txt > custom_candidates.txt

# Estimate cracking time
hashcat -m 1000 -a 0 hashes.txt custom_candidates.txt --benchmark
# Create Spanish wordlist
cat spanish_words.txt french_words.txt > latin_words.txt

# Generate candidates
./pp64 < latin_words.txt > latin_candidates.txt

# Crack with language wordlist
hashcat -m 1000 -a 0 hashes.txt latin_candidates.txt
# Count unique candidates
./pp64 < wordlist.txt | sort | uniq | wc -l

# Find most common length
./pp64 < wordlist.txt | awk '{print length}' | sort | uniq -c | sort -rn

# Analyze character distribution
./pp64 < wordlist.txt | grep -o . | sort | uniq -c | sort -rn
# Generate only numeric candidates
./pp64 < wordlist.txt | grep '^[0-9]*$'

# Generate only alphabetic candidates
./pp64 < wordlist.txt | grep '^[a-zA-Z]*$'

# Generate only lowercase
./pp64 < wordlist.txt | tr '[:upper:]' '[:lower:]' | sort | uniq
# Generate batches and test progressively
for batch in {1..10}; do
  ./pp64 < wordlist.txt | head -$((batch * 100000)) | tail -100000 | \
    hashcat -m 1000 -a 0 hashes.txt
  echo "Batch $batch complete"
done
# Check memory usage during generation
time ./pp64 < wordlist.txt | wc -l

# Monitor with system tools
watch -n 1 'ps aux | grep pp64'

# Limit memory usage
ulimit -v 4000000  # 4GB limit
./pp64 < wordlist.txt
# Estimate output size
wordlist_size=$(wc -c < wordlist.txt)
echo "Estimated output: ~$((wordlist_size * 2)) bytes"

# Compress candidates to save space
./pp64 < wordlist.txt | gzip > candidates.txt.gz

# Use compressed candidates with hashcat
gunzip -c candidates.txt.gz | hashcat -m 1000 -a 0 hashes.txt
# Clean build
make clean
make

# Verbose build output
make V=1

# Check for missing dependencies
apt-get install -y gcc g++ make git
# Verify input wordlist
head wordlist.txt
wc -l wordlist.txt

# Check for encoding issues
file wordlist.txt
iconv -f ISO-8859-1 -t UTF-8 wordlist.txt > wordlist_utf8.txt
# Test hashcat with generated candidates
./pp64 < wordlist.txt | head -1000 | hashcat -m 0 -a 0 test_hash.txt

# Check hash type
hashcat -m 1000 --help | grep "NTLM"
  • Wordlist Quality: Use curated wordlists relevant to target
  • Length Constraints: Tailor min/max lengths to target policy
  • Element Count: Balance between diversity and computation
  • Testing: Test with known password before full attack
  • Resources: Monitor CPU and memory during generation
  • Efficiency: Pipe directly to hashcat to save disk space
  • Logging: Record successful cracks and parameters used
  • Authorization: Only crack hashes with explicit authorization
FlagDescription
--helpDisplay help message
--versionShow version number
--elem-cnt-minMinimum elements per candidate
--elem-cnt-maxMaximum elements per candidate
--pw-minMinimum password length
--pw-maxMaximum password length
--separatorSeparator between elements
-oOutput file path
  • hashcat — GPU-accelerated password hash cracking
  • John the Ripper — Password cracking tool
  • maskprocessor — Password mask generator
  • combinator — Dictionary combinator tool
  • rockyou — Popular wordlist
  • Weakpass — Wordlist compilation service