REMnux
Installation
Live USB Installation
# Download REMnux ISO
wget https://remnux.org/download/
# Write to USB drive (Linux)
sudo dd if=remnux-7.20201113-amd64.iso of=/dev/sdc bs=4M
sudo sync
# Or use Balena Etcher (GUI)
balena-etcher-x.x.x-linux-x64.zip
# Boot from USB and install
Virtual Machine Setup
# Using VirtualBox
# Create new VM: Linux, Ubuntu 64-bit
# Allocate: 4+ CPU cores, 8GB+ RAM, 100GB+ disk
# Boot from ISO and install
# Using VMware
# New Virtual Machine > Linux > Ubuntu 64-bit
# Custom hardware settings
# Install from ISO
Docker Installation (Fastest)
# Pull REMnux Docker image
docker pull remnux/remnux:latest
# Run container
docker run -it --rm remnux/remnux
# With volume mount for samples
docker run -it -v /home/user/samples:/samples remnux/remnux
# With X11 forwarding for GUI tools
docker run -it -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix remnux/remnux
Tool Categories
Static Analysis Tools
# Strings extraction
strings malware.bin
strings -a -t x malware.bin > strings.txt
# ELF analysis
readelf -h malware # Headers
readelf -l malware # Program headers
readelf -s malware # Symbol table
file malware # File type
# Hexdump/Binary inspection
hexdump -C malware.bin | head
od -x malware.bin | head
xxd malware.bin | head
# YARA rule scanning
yara /usr/share/yara-rules/malware.yar malware.bin
yara -r /usr/share/yara-rules/ /samples/
# Clamav antivirus scanning
clamscan malware.bin
clamscan -r /samples/
freshclam # Update signatures
Dynamic Analysis Tools
# System call tracing
strace -o trace.log ./malware
strace -e trace=open,read,write ./malware
strace -f -p $(pgrep malware) # Trace running process
# Library call tracing
ltrace -o ltrace.log ./malware
ltrace -e malloc,free ./malware
ltrace -c ./malware # Call count summary
# Process monitoring
ps aux | grep malware
top
htop
pgrep -a malware
# Network monitoring
tcpdump -i eth0 -w capture.pcap
tcpdump -i eth0 'dst host 192.168.1.1'
wireshark
tshark -i eth0
Debuggers & Disassemblers
# GDB debugger
gdb ./malware
(gdb) break main
(gdb) run
(gdb) disassemble main
(gdb) print $rax
(gdb) backtrace
# Radare2
r2 malware
> aaa # Analyze
> afl # List functions
> pdf @ main # Print disassembly
> pxr @ 0x4000 # Hexdump with refs
> s 0x4000 # Seek
> iz # Strings
# Objdump disassembly
objdump -d malware | head
objdump -s -j .data malware # Data section
objdump -t malware # Symbols
# Ghidra (interactive)
ghidra # GUI launcher
# File > Import > Select binary > Auto analyze
Reverse Engineering Frameworks
# Radare2 scripting
cat > analyze.r2 <<EOF
aaa
afl
pdf @ main
.!echo Analysis complete
EOF
r2 -i analyze.r2 malware
# Frida dynamic instrumentation
frida-server & # Start server
frida -n malware # Attach to process
# Python-based analysis with PyELF
python3 -c "
import struct
with open('malware', 'rb') as f:
elf_magic = f.read(4)
print(f'ELF: {elf_magic.hex()}')
"
Malware Extraction & Unpacking
APK Analysis (Android)
# Extract APK
unzip app.apk -d app_extracted/
# Decompile with apktool
apktool d app.apk -o app_decompiled
# Extract DEX and convert to Java
d2j-dex2jar classes.dex
jd-gui classes-dex2jar.jar
# Analyze manifest
cat AndroidManifest.xml
Container & Archive Extraction
# Unzip
unzip -l archive.zip
unzip archive.zip
# Tar archive
tar -tf archive.tar.gz
tar -xzf archive.tar.gz
# Gzip compression
gunzip file.gz
# 7-Zip
7z x archive.7z
# UPX packed executable
upx -d packed_binary -o unpacked_binary
Memory Dump Analysis
# Volatility memory forensics
volatility -f memory.dump imageinfo
volatility -f memory.dump --profile=LinuxUbuntu pslist
volatility -f memory.dump dumpfiles -D ./
# YARA on memory
volatility -f memory.dump yarascan -y /usr/share/yara-rules/malware.yar
# String extraction from memory
strings memory.dump | grep "suspicious_pattern"
Network Forensics
Traffic Analysis
# Tcpdump with filters
tcpdump -i eth0 -n 'tcp port 80' # HTTP
tcpdump -i eth0 -n 'udp port 53' # DNS
tcpdump -i eth0 -n 'src 192.168.1.1' # Source IP
# Save to PCAP
tcpdump -i eth0 -w capture.pcap 'tcp or udp'
# Read PCAP file
tcpdump -r capture.pcap | head
# Statistics
tcpdump -r capture.pcap -nn | awk '{print $3}' | sort | uniq -c
Wireshark Analysis
# Launch Wireshark
wireshark &
# Command-line analysis
tshark -r capture.pcap -Y 'http' | head
tshark -r capture.pcap -Y 'dns' -T json > dns_queries.json
tshark -r capture.pcap -z conversations,tcp
# Extract objects
tshark -r capture.pcap --export-objects http,./http_objects/
DNS Forensics
# Monitor DNS queries
tcpdump -i eth0 'udp port 53' -w dns.pcap
# Analyze DNS traffic
tshark -r dns.pcap -Y 'dns' -T fields -e dns.qry.name | sort | uniq
# Extract from PCAP with tools
./dns_parser.sh dns.pcap
# Bulk DNS lookup
for domain in $(cat suspicious_domains.txt); do
dig $domain @8.8.8.8
done
File Carving & Recovery
Searching & Extracting
# Search for file signatures (magic bytes)
grep -a -b -o "^MZ" malware.bin # PE executable
grep -a -b -o "PK\x03\x04" disk.img # ZIP archive
# Using foremost for carving
foremost -i disk.img -o ./recovered
# Scalpel carving
scalpel -c /etc/scalpel/scalpel.conf -i disk.img -o ./carved
# Binwalk for firmware analysis
binwalk malware.bin
binwalk -e malware.bin # Extract
Hash & Integrity
# Calculate hashes
md5sum malware.bin
sha256sum malware.bin
ssdeep malware.bin > fuzzy.hash
# Fuzzy matching similar files
ssdeep -r fuzzy.hash ./samples/
# Create and verify AIDE database
aideinit
aide --check
Scripting & Automation
Bash Analysis Script
#!/bin/bash
# Comprehensive malware analysis script
SAMPLE=$1
RESULTS_DIR="analysis_$(date +%Y%m%d_%H%M%S)"
mkdir -p "$RESULTS_DIR"
# Static analysis
echo "[*] Performing static analysis..."
file "$SAMPLE" > "$RESULTS_DIR/file_type.txt"
strings "$SAMPLE" > "$RESULTS_DIR/strings.txt"
readelf -h "$SAMPLE" > "$RESULTS_DIR/headers.txt"
# Hash calculation
echo "[*] Calculating hashes..."
sha256sum "$SAMPLE" > "$RESULTS_DIR/sha256.txt"
md5sum "$SAMPLE" >> "$RESULTS_DIR/hashes.txt"
ssdeep "$SAMPLE" >> "$RESULTS_DIR/fuzzy.txt"
# YARA scanning
echo "[*] Scanning with YARA..."
yara -r /usr/share/yara-rules/ "$SAMPLE" > "$RESULTS_DIR/yara_results.txt"
# ClamAV scanning
echo "[*] Scanning with ClamAV..."
clamscan "$SAMPLE" > "$RESULTS_DIR/clamscan_results.txt"
echo "[+] Analysis complete. Results in $RESULTS_DIR"
Python Analysis
#!/usr/bin/env python3
import subprocess
import hashlib
import os
def analyze_malware(filepath):
results = {}
# File info
results['size'] = os.path.getsize(filepath)
# Hashes
with open(filepath, 'rb') as f:
content = f.read()
results['md5'] = hashlib.md5(content).hexdigest()
results['sha256'] = hashlib.sha256(content).hexdigest()
# File type
result = subprocess.run(['file', filepath], capture_output=True)
results['file_type'] = result.stdout.decode()
# Strings
result = subprocess.run(['strings', filepath], capture_output=True)
results['strings'] = result.stdout.decode().split('\n')
return results
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print("Usage: analyze.py <sample>")
sys.exit(1)
results = analyze_malware(sys.argv[1])
for key, value in results.items():
print(f"{key}: {value}")
Automated Submission & Reporting
Online Analysis
# VirusTotal submission (requires API key)
curl -X POST 'https://www.virustotal.com/api/v3/files' \
-H 'x-apikey: your-api-key' \
-F 'file=@malware.bin'
# Joe Sandbox automated analysis
curl -F 'apikey=your-key' -F 'file=@malware.bin' \
https://joesandbox.com/api/v2/submissions/new
# ANY.RUN automated analysis
curl -F 'file=@malware.bin' \
https://api.any.run/v1/analysis
Report Generation
# Generate HTML report
cat > report.html <<EOF
<html>
<head><title>Malware Analysis Report</title></head>
<body>
<h1>Analysis Report</h1>
<h2>File Information</h2>
<pre>$(file malware.bin)</pre>
<h2>Hashes</h2>
<pre>$(sha256sum malware.bin)</pre>
<h2>Strings</h2>
<pre>$(strings malware.bin | head -20)</pre>
</body>
</html>
EOF
Best Practices
- Always analyze in isolated network environment
- Use air-gapped analysis machine for sensitive samples
- Document all findings with timestamps
- Create clean VM snapshot before each analysis
- Use write-blocking for evidence preservation
- Monitor network and filesystem changes
- Preserve original sample with hash verification
- Remove analysis artifacts before publishing
- Use separate VM for online lookups
References
Last updated: 2026-03-30