DEV Community

Alex Chen
Alex Chen

Posted on

The 30 Linux Commands I Use Every Day on My VPS

The 30 Linux Commands I Use Every Day on My VPS

I manage a $5 VPS that runs 5 services. These are the commands I can't live without.

File Operations

# Find large files (when disk is full)
du -sh * | sort -rh | head -10
# Output:
# 4.2G    node_modules/
# 512M    logs/
# 128M    data.db

# Find files modified in last N days
find /var/log -mtime -7 -name "*.log" -ls

# Quick file search
fzf # Fuzzy finder — install it, thank me later
locate "config.json" # Instant file search (updatedb runs daily)

# Watch a file for changes (live log tailing)
tail -f /var/log/nginx/error.log
# With highlights:
tail -f /var/log/app.log | grep --color=always -E "ERROR|WARN"

# Compare two files
diff -u file1.json file2.json
sdiff -s file1 file2 # Side-by-side comparison
Enter fullscreen mode Exit fullscreen mode

Process Management

# What's eating my CPU?
htop # Better top (install it!)
# Or: ps aux --sort=-%cpu | head -10

# What's eating my RAM?
ps aux --sort=-%mem | head -10
free -h # Memory overview

# Find process by port
ss -tlnp | grep :3000
# Output: LISTEN 0  128  *:3000  users:(("node",pid=12345))

# Kill processes by name
pkill -f "node server.js"
kill $(lsof -ti:3000) # Kill whatever is on port 3000

# Process tree
pstree -p # Shows parent-child relationships
pgrep -a node # All Node.js processes with args

# Background & foreground
npm start &
jobs # List background jobs
fg %1 # Bring job 1 to foreground
Ctrl+Z # Suspend current job
bg %1 # Resume in background

# nohup = survives terminal close
nohup npm start > app.log 2>&1 &
Enter fullscreen mode Exit fullscreen mode

Network Debugging

# Is the port open?
curl -I http://localhost:3000
ss -tlnp | grep :3000

# DNS check
dig example.com +short
nslookup example.com

# Connection test (is this host reachable?)
ping -c 3 google.com
nc -zv github.com 443 # Port scan single port
nc -zv example.com 80-100 # Port range

# HTTP debugging (see headers, redirects)
curl -v https://api.example.com/data
curl -I https://example.com # Headers only
curl -sI -o /dev/null -w "%{http_code} %{time_total}s\n" URL # Status + time

# Follow redirects
curl -L https://bit.ly/something

# POST request
curl -X POST https://api.example.com/users \
  -H "Content-Type: application/json" \
  -d '{"name": "Alex"}'

# Download file
wget -q URL -O output.txt
curl -sL URL -o output.txt

# Network interfaces + IPs
ip addr show
hostname -I # Just IP addresses
Enter fullscreen mode Exit fullscreen mode

Disk & Storage

# Am I running out of space?
df -h # Human-readable disk usage
du -sh /* | sort -rh | head-10 # Biggest directories

# Find large files (>100MB)
find / -type f -size +100M -exec ls -lh {} \; 2>/dev/null | sort -k5 -rh

# Clean up
rm -rf node_modules/ # Obvious
npm cache clean --force # Free up npm cache
docker system prune -af # Docker cleanup (careful!)

# Disk usage by type
ncdu # Interactive disk usage explorer (amazing tool)

# Inode check (lots of small files?)
df -i # If %Used is 100%, you're out of inodes not space
Enter fullscreen mode Exit fullscreen mode

Text Processing (The Unix Way)

# Grep power user
grep -r "TODO" src/ --include="*.ts"  # Recursive, filtered
grep -n "error" app.log | tail -20     # Line numbers
grep -c "" file.txt                    # Count lines
grep -E "(error|warn|fail)" log.txt   # Regex OR
grep -v "^#" config.ini                # Exclude comments
grep -A 5 -B 2 "exception" error.log   # Context lines

# Sed quick edits
sed -i 's/old/new/g' file.txt         # Replace all occurrences
sed -i '42d' file.txt                 # Delete line 42
sed -n '10,20p' file.txt              # Print lines 10-20
sed -i '1i\# Header' file.txt        # Insert at line 1

# Awk for column processing
awk '{print $1, $NF}' access.log      # First + last column
awk '{sum+=$NF} END {print sum}' nums.txt # Sum last column
awk -F',' '{print $2}' csv.csv       # CSV parsing
awk 'length > 80' longlines.txt       # Filter long lines

# Sort & unique
sort file.txt | uniq                  # Deduplicate
sort -rn numbers.txt                 # Reverse numeric
sort -k2 -t',' file.csv             # Sort by column 2

# Quick JSON manipulation (with jq)
cat data.json | jq '.users[].name'
cat data.json | jq '.[] | select(.age > 30)'
echo '{"key":"value"}' | jq '.key'    # Extract value
Enter fullscreen mode Exit fullscreen mode

System Monitoring

# Real-time monitoring
htop          # Processes (interactive)
iotop         # Disk I/O
iftop         # Network bandwidth
nethogs       # Network per-process

# One-liner stats
echo "=== CPU ===" && nproc && echo "=== RAM ===" && free -h | grep Mem && echo "=== Disk ===" && df -h / && echo "=== Uptime ===" && uptime

# Last reboot
who -b
last reboot | head -5

# Current runlevel / systemd targets
systemctl list-units --state=running --type=service

# Journalctl (systemd logs)
journalctl -u nginx -f           # Follow nginx logs
journalctl --since "1 hour ago" # Recent logs
journalctl -p err               # Only errors
Enter fullscreen mode Exit fullscreen mode

Git Quick Commands

# Status super-view
git status -sb                   # Short branch + status

# What changed?
git diff --stat                 # File summary
git diff name-only              # Just filenames

# Who changed this?
git blame file.ts -L 10,20       # Lines 10-20 only
git log --oneline --author="Alex" # My commits

# Undo mistakes
git checkout -- file.ts         # Discard changes to file
git reset HEAD~1                # Undo last commit (keep changes)
git commit --amend              # Edit last commit message

# Stash management
git stash push -m "WIP feature X"
git stash list
git stash pop

# Branch cleanup
git branch -vv                  # Show tracking info
git branch --merged | grep -v '\*' | xargs git branch -d # Delete merged branches
Enter fullscreen mode Exit fullscreen mode

SSH & Remote

# Quick connection
ssh user@host "command"        # Run command remotely

# Copy files
scp file.txt user@host:/path/
rsync -avz local/ user@host:/remote/ # Sync directories

# SSH tunnel (access remote port locally)
ssh -L 8080:localhost:3000 user@host
# Now localhost:8080 → remote:3000

# Key-based auth (no passwords!)
ssh-copy-id user@host           # Copy your public key
Enter fullscreen mode Exit fullscreen mode

My .bashrc Aliases

# Add these to ~/.bashrc
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
alias ..='cd ..'
alias ...='cd ../..'
alias gs='git status -sb'
alias gl='git log --oneline --graph -15'
alias gp='git push'
alias gd='git diff'
alias gc='git checkout'
alias nb='npm run build'
alias ns='npm run start'
alias nt='npm test'
alias dc='docker compose'
alias ports='ss -tlnp'
alias myip='curl -s ifconfig.me'
alias weather='curl -s wttr.in/?format=3'

# Quick functions
mkcd() { mkdir -p "$1" && cd "$1"; }
extract() { 
  if [ -f "$1" ]; then
    case "$1" in *.tar.gz) tar xzf "$1" ;;
      *.tar.xz) tar xJf "$1" ;;
      *.zip) unzip "$1" ;;
      *) echo "Unknown format";;
    esac
  fi
}
Enter fullscreen mode Exit fullscreen mode

The One Command That Saves Me Daily

# My "server health check" one-liner
echo "=== $(date) ===" && \
echo "Uptime: $(uptime -p)" && \
echo "CPU: $(top -bn1 | grep "Cpu(s)" | awk '{print $2}')" && \
echo "RAM: $(free -h | awk '/Mem:/{print $3"/"$2}') " && \
echo "Disk: $(df -h / | awk 'NR==2{print $3"/"$2" ("$5")"}')" && \
echo "Processes: $(ps aux | wc -l)" && \
echo "Node procs: $(pgrep -c node)" && \
echo "Ports: $(ss -tlnp | wc -l) listening"
Enter fullscreen mode Exit fullscreen mode

Output:

=== Sat May 16 02:49:00 CST 2026 ===
Uptime: up 3 weeks, 2 days, 14 hours
CPU: 2.3%
RAM: 1.8G/3.7G (51%)
Disk: 37G/59G (65%)
Processes: 187
Node procs: 4
Ports: 12 listening
Enter fullscreen mode Exit fullscreen mode

One glance tells me everything I need to know about my server's health.


What are your must-have Linux commands? Share your favorites!

Follow @armorbreak for more sysadmin tips.

Top comments (0)