HashRat is a cross-platform command-line hashing tool that supports multiple cryptographic hash algorithms including MD5, SHA-1, SHA-256, Whirlpool, and JH. It’s designed for digital forensics, integrity verification, and malware analysis. HashRat can recursively hash directories, process batch operations, and generate detailed audit reports.
sudo apt-get update
sudo apt-get install hashrat
brew install hashrat
git clone https://github.com/Fileability/hashrat.git
cd hashrat
./configure
make
sudo make install
hashrat --version
hashrat --help
| Command | Description |
|---|
hashrat file.txt | Hash single file (all algorithms by default) |
hashrat *.txt | Hash all text files in directory |
hashrat -r directory/ | Recursively hash all files in directory |
hashrat -md5 file.txt | Hash using MD5 only |
hashrat -sha256 file.txt | Hash using SHA-256 only |
# Single file hashing
hashrat myfile.iso
# Recursive directory hashing
hashrat -r /path/to/data/
# Multiple files matching pattern
hashrat /evidence/*.bin
# Specific directory with all files
hashrat -r /home/user/documents/
| Algorithm | Flag | Output Size | Collision History | Use Case |
|---|
| MD5 | -md5 | 128-bit (32 hex) | Vulnerable | Legacy only |
| SHA-1 | -sha1 | 160-bit (40 hex) | Deprecated | Legacy forensics |
| SHA-256 | -sha256 | 256-bit (64 hex) | Secure | Recommended standard |
| Whirlpool | -whirlpool | 512-bit (128 hex) | Secure | High-security hashing |
| JH | -jh | 256-bit (64 hex) | Secure | Emerging algorithm |
# MD5 only (fast, legacy)
hashrat -md5 largefile.bin
# SHA-256 only (recommended)
hashrat -sha256 firmware.img
# Whirlpool (strongest)
hashrat -whirlpool evidence.dd
# JH algorithm (modern)
hashrat -jh secure_backup.zip
# Multiple algorithms simultaneously
hashrat -md5 -sha256 -whirlpool file.bin
# Forensic standard (MD5 + SHA-256)
hashrat -md5 -sha256 disk_image.dd
# High-security combination
hashrat -sha256 -whirlpool -jh encrypted_vault.zip
# All algorithms
hashrat -all file.txt
| Flag | Description |
|---|
-r | Recursively hash directory contents |
-b | Bare output mode |
-csv | CSV format output |
-xml | XML format output |
-t | Tree-style output with indentation |
# Recursive hashing of entire directory
hashrat -r /evidence/
# Recursive with CSV output
hashrat -r -csv /data/ > hashes.csv
# Recursive with XML format
hashrat -r -xml /evidence/ > evidence_hashes.xml
# Tree view of directory structure
hashrat -r -t /important/files/
hashrat -md5 -sha256 file.txt
File: /path/to/file.txt
MD5: d41d8cd98f00b204e9800998ecf8427e
SHA-256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
hashrat -r -csv /data/ > hashes.csv
# Output:
# File,MD5,SHA1,SHA256,Whirlpool
# /data/file1.txt,d41d8cd98f00b204e9800998ecf8427e,...,...
hashrat -r -xml /data/ > hashes.xml
| Command | Description |
|---|
hashrat -r -csv -o output.csv directory/ | Save CSV output to file |
hashrat -r -xml -o output.xml directory/ | Save XML output to file |
hashrat -batch filelist.txt | Hash files from list |
hashrat -parallel 4 -r directory/ | Use 4 threads for parallel hashing |
# Create list of files to hash
cat > files_to_hash.txt << EOF
/path/to/file1.iso
/path/to/file2.dd
/path/to/file3.bin
EOF
# Hash files from batch list
hashrat -batch files_to_hash.txt
# With specific output format
hashrat -batch files_to_hash.txt -csv > batch_hashes.csv
# Use multiple threads for faster hashing
hashrat -parallel 4 -r /large_directory/
# Optimal threading (use available cores)
hashrat -parallel 8 -r /terabyte_storage/
# Default single-threaded
hashrat -r /data/
| Command | Description |
|---|
hashrat -verify hashes.txt file.txt | Verify file against known hash |
hashrat -check hashes.txt | Verify all files in hash list |
hashrat -compare new.csv old.csv | Compare two hash files |
# Create baseline hashes
hashrat -r -csv /system/ > system_baseline.csv
# Later verification
hashrat -check system_baseline.csv
# Compare before and after states
hashrat -r -csv /system/ > system_after.csv
hashrat -compare system_after.csv system_baseline.csv
# Hash sensitive files
hashrat -sha256 -whirlpool /etc/passwd /etc/shadow > critical_hashes.txt
# Verify integrity later
hashrat -check critical_hashes.txt
# If changes detected:
hashrat -sha256 -whirlpool /etc/passwd /etc/shadow
| Flag | Description |
|---|
-include *.pdf | Hash only matching patterns |
-exclude *.tmp | Skip matching patterns |
-minsize 1M | Hash files larger than threshold |
-maxsize 100M | Hash files smaller than threshold |
# Hash only PDF files
hashrat -r -include "*.pdf" /documents/
# Exclude temporary files
hashrat -r -exclude "*.tmp" -exclude "*.cache" /data/
# Hash files larger than 1GB
hashrat -r -minsize 1G /archive/
# Hash files between 10MB and 100MB
hashrat -r -minsize 10M -maxsize 100M /media/
# Hash files modified in last 7 days
hashrat -r -mtime 7 /var/www/
# Files modified since specific date
hashrat -r -since "2026-01-01" /evidence/
# Step 1: Initial acquisition hash
hashrat -sha256 -whirlpool disk_image.dd > acquisition_hash.txt
# Step 2: Store securely
gpg -e -r recipient@example.com acquisition_hash.txt
# Step 3: Transport verification
hashrat -sha256 -whirlpool disk_image.dd > transport_verify.txt
# Step 4: Compare
diff acquisition_hash.txt transport_verify.txt
# Create comprehensive hash report
hashrat -r -csv /evidence/ -o evidence_report.csv
# Generate supplementary metadata
hashrat -r -xml /evidence/ -o evidence_metadata.xml
# Document with timestamp
hashrat -r -csv /evidence/ > evidence_$(date +%Y%m%d_%H%M%S).csv
# Hash suspected malware
hashrat -sha256 -md5 suspicious_binary.exe > malware_hashes.txt
# Submit to virustotal/databases
cat malware_hashes.txt
# Track variants
hashrat -r -sha256 /quarantine/ > quarantine_inventory.csv
# Create analysis-ready CSV
hashrat -r -csv /suspect/files/ > analysis.csv
# Filter for specific file types
hashrat -r -csv -include "*.exe" /suspect/ > exes.csv
# Generate JSON for integration
hashrat -r -xml /data/ | xmllint --format - > hashes.json
# Hash files and filter by pattern
hashrat -r /data/ | grep -i suspicious
# Count total files hashed
hashrat -r /evidence/ | wc -l
# Extract only SHA-256 hashes
hashrat -r -sha256 /data/ | grep "SHA-256:"
# Import CSV directly to database
hashrat -r -csv /evidence/ > hashes.csv
sqlite3 forensics.db ".import hashes.csv file_hashes"
# Generate SQL statements
hashrat -r -csv /data/ | awk -F',' '{print "INSERT INTO hashes VALUES('"'"'" $1 "'"'"', '"'"'" $2 "'"'"');"}'
# Parallel hashing with 8 threads
hashrat -parallel 8 -r /massive_drive/
# Fast MD5 only for speed
hashrat -md5 -parallel 16 -r /large_dataset/
# Exclude known irrelevant files
hashrat -r -exclude "*.jpg" -exclude "*.mp4" -parallel 8 /data/
# Process in chunks for limited RAM
hashrat -r -include "*.txt" /part1/ > part1.csv
hashrat -r -include "*.txt" /part2/ > part2.csv
# Monitor with watch
watch -n 5 'ps aux | grep hashrat'
# Run with elevated privileges
sudo hashrat -r /root/
# Hash system directories
sudo hashrat -r /etc/ > etc_hashes.csv
# Reduce thread count if system is overwhelmed
hashrat -parallel 2 -r /large_dir/
# Use fastest algorithm only
hashrat -md5 -parallel 8 -r /data/
# Convert CSV to different format
hashrat -check hashes.csv -xml > hashes.xml
# Merge multiple CSV files
cat file1.csv file2.csv file3.csv > merged.csv
#!/bin/bash
# Daily integrity verification
DATE=$(date +%Y%m%d)
hashrat -r -sha256 /usr /lib /bin > integrity_check_$DATE.csv
# Compare with yesterday
if [ -f integrity_check_$(date -d yesterday +%Y%m%d).csv ]; then
hashrat -compare integrity_check_$DATE.csv integrity_check_$(date -d yesterday +%Y%m%d).csv
fi
# Create detailed evidence log
echo "Evidence Acquisition Log - $(date)" > evidence.log
echo "Source: /dev/sda1" >> evidence.log
echo "Destination: /media/usb/image.dd" >> evidence.log
hashrat -sha256 -whirlpool /media/usb/image.dd >> evidence.log
# Create malware database
mkdir malware_hashes
hashrat -r -sha256 -csv /quarantine/ > malware_hashes/$(date +%Y%m%d).csv
# Compare samples over time
diff malware_hashes/20260401.csv malware_hashes/20260501.csv
- hashdeep - Similar multi-algorithm hasher with NSRL support
- md5sum/sha256sum - Single-algorithm utilities
- openssl - Cryptographic hashing alternative
- ssdeep - Fuzzy hashing for similarity detection
- md5deep - Single-algorithm recursive hasher