Python Database Backup: Automate Your Backups and Never Lose Data Again
Database disasters happen. 60% of companies that lose data close within 6 months. Python automates reliable backups.
Complete Backup System
import subprocess, gzip, shutil, os, boto3
from datetime import datetime, timedelta
from pathlib import Path
class DatabaseBackup:
def __init__(self, backup_dir="./backups", retention_days=7):
self.backup_dir = Path(backup_dir)
self.backup_dir.mkdir(parents=True, exist_ok=True)
self.retention_days = retention_days
def backup_postgresql(self, host, user, password, dbname):
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
filepath = self.backup_dir / f"{dbname}_{ts}.sql"
env = {**os.environ, "PGPASSWORD": password}
result = subprocess.run(
["pg_dump", "-h", host, "-U", user, "-d", dbname, "-f", str(filepath)],
env=env, capture_output=True, text=True
)
if result.returncode != 0:
raise Exception(f"pg_dump failed: {result.stderr}")
return filepath
def backup_sqlite(self, db_path):
ts = datetime.now().strftime("%Y%m%d_%H%M%S")
name = Path(db_path).stem
filepath = self.backup_dir / f"{name}_{ts}.db"
shutil.copy2(db_path, filepath)
return filepath
def compress(self, filepath):
compressed = Path(str(filepath) + ".gz")
with open(filepath, "rb") as f_in, gzip.open(compressed, "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
original_size = filepath.stat().st_size
compressed_size = compressed.stat().st_size
os.remove(filepath)
reduction = (1 - compressed_size / original_size) * 100
print(f"Compressed: {original_size//1024}KB → {compressed_size//1024}KB ({reduction:.0f}% saved)")
return compressed
def upload_s3(self, filepath, bucket):
s3 = boto3.client("s3")
key = f"backups/{filepath.name}"
s3.upload_file(str(filepath), bucket, key)
print(f"Uploaded: s3://{bucket}/{key}")
def cleanup_old(self):
cutoff = datetime.now() - timedelta(days=self.retention_days)
removed = sum(1 for f in self.backup_dir.iterdir()
if f.stat().st_mtime < cutoff.timestamp() and not f.unlink())
print(f"Cleaned up {removed} old backups")
def run(self, databases, s3_bucket=None):
results = []
for db in databases:
try:
if db["type"] == "postgresql":
path = self.backup_postgresql(db["host"], db["user"], db["password"], db["name"])
elif db["type"] == "sqlite":
path = self.backup_sqlite(db["path"])
path = self.compress(path)
if s3_bucket:
self.upload_s3(path, s3_bucket)
results.append({"db": db.get("name", db.get("path")), "success": True})
except Exception as e:
results.append({"db": db.get("name","?"), "success": False, "error": str(e)})
self.cleanup_old()
return results
# Usage
backup = DatabaseBackup("./backups", retention_days=7)
results = backup.run([
{"type": "postgresql", "name": "prod_db", "host": "localhost", "user": "postgres", "password": "secret"},
{"type": "sqlite", "path": "/var/app/data.db"},
], s3_bucket="my-company-backups")
print(results)
Schedule It (Set and Forget)
# Add to crontab - runs every night at 2 AM
0 2 * * * /usr/bin/python3 /home/user/backup.py >> /var/log/backup.log 2>&1
Alert on Failure
import smtplib
from email.mime.text import MIMEText
def alert_on_failure(results):
failed = [r for r in results if not r["success"]]
if not failed: return
msg = MIMEText(f"BACKUP FAILED: {failed}")
msg["Subject"] = "⚠️ Database Backup Failed"
msg["From"] = msg["To"] = "admin@company.com"
with smtplib.SMTP("smtp.gmail.com", 587) as s:
s.starttls()
s.login("admin@company.com", "app-password")
s.send_message(msg)
What This Gets You
- ✅ PostgreSQL + SQLite support (add MySQL easily)
- ✅ ~70% compression via gzip (SQL compresses extremely well)
- ✅ S3 off-site storage for disaster recovery
- ✅ 7-day retention with automatic cleanup
- ✅ Email alerts on failure
- ✅ Zero maintenance once scheduled
Setup time: 15 minutes. Protection: permanent.
Get the Complete Python Automation Toolkit
Part of the Python Business Automation Toolkit — \$9 one-time for database backup + email automation + invoice generation + file organization scripts.
What databases are you running? Share your stack in the comments!
Top comments (0)