DEV Community

Brad
Brad

Posted on

Python Database Automation: Back Up, Migrate, and Monitor SQLite/PostgreSQL

Python Database Automation: Back Up, Migrate, and Monitor SQLite/PostgreSQL

Database maintenance is critical but tedious. Here are Python scripts that automate backups, migrations, and health monitoring.

Automated SQLite Backup

import sqlite3
import shutil
import os
from datetime import datetime
from pathlib import Path

def backup_sqlite(
    db_path: str,
    backup_dir: str,
    max_backups: int = 7
) -> str:
    """Create a timestamped backup of an SQLite database."""

    backup_dir = Path(backup_dir)
    backup_dir.mkdir(parents=True, exist_ok=True)

    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    db_name = Path(db_path).stem
    backup_file = backup_dir / f"{db_name}_{timestamp}.db"

    # Use SQLite's backup API (safe even with active connections)
    source = sqlite3.connect(db_path)
    dest = sqlite3.connect(str(backup_file))

    with dest:
        source.backup(dest)

    source.close()
    dest.close()

    print(f"Backup created: {backup_file}")

    # Clean up old backups
    backups = sorted(backup_dir.glob(f"{db_name}_*.db"))
    while len(backups) > max_backups:
        oldest = backups.pop(0)
        oldest.unlink()
        print(f"Deleted old backup: {oldest.name}")

    return str(backup_file)

# Schedule daily backups
backup_sqlite("myapp.db", "./backups", max_backups=7)
Enter fullscreen mode Exit fullscreen mode

PostgreSQL Backup with pg_dump

import subprocess
import os
from pathlib import Path

def backup_postgres(
    host: str,
    port: int,
    database: str,
    username: str,
    password: str,
    output_dir: str
) -> str:
    """Backup a PostgreSQL database using pg_dump."""

    output_dir = Path(output_dir)
    output_dir.mkdir(parents=True, exist_ok=True)

    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    output_file = output_dir / f"{database}_{timestamp}.sql.gz"

    env = os.environ.copy()
    env['PGPASSWORD'] = password

    # pg_dump with gzip compression
    dump_cmd = [
        'pg_dump',
        '-h', host,
        '-p', str(port),
        '-U', username,
        '-d', database,
        '--no-password',
        '-v'
    ]

    gzip_cmd = ['gzip', '-c']

    with open(output_file, 'wb') as f:
        dump_proc = subprocess.Popen(dump_cmd, stdout=subprocess.PIPE, env=env)
        gzip_proc = subprocess.Popen(gzip_cmd, stdin=dump_proc.stdout, stdout=f)
        dump_proc.wait()
        gzip_proc.wait()

    size_mb = output_file.stat().st_size / (1024 * 1024)
    print(f"Backup saved: {output_file} ({size_mb:.1f} MB)")

    return str(output_file)
Enter fullscreen mode Exit fullscreen mode

Database Health Monitor

Track database performance and alert on issues:

import sqlite3
import time
from dataclasses import dataclass
from typing import Optional

@dataclass
class DBHealth:
    size_mb: float
    table_count: int
    row_counts: dict
    slow_queries: list
    integrity_ok: bool

def check_db_health(db_path: str) -> DBHealth:
    """Comprehensive SQLite health check."""

    conn = sqlite3.connect(db_path)
    cursor = conn.cursor()

    # Database size
    size_bytes = os.path.getsize(db_path)
    size_mb = size_bytes / (1024 * 1024)

    # Table list and row counts
    cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
    tables = [row[0] for row in cursor.fetchall()]

    row_counts = {}
    for table in tables:
        cursor.execute(f"SELECT COUNT(*) FROM {table}")
        row_counts[table] = cursor.fetchone()[0]

    # Integrity check
    cursor.execute("PRAGMA integrity_check")
    result = cursor.fetchone()[0]
    integrity_ok = result == "ok"

    # Find tables without indexes
    slow_queries = []
    for table in tables:
        cursor.execute(f"PRAGMA index_list({table})")
        indexes = cursor.fetchall()
        if not indexes:
            slow_queries.append(f"Table '{table}' has no indexes")

    conn.close()

    return DBHealth(
        size_mb=size_mb,
        table_count=len(tables),
        row_counts=row_counts,
        slow_queries=slow_queries,
        integrity_ok=integrity_ok
    )

def monitor_db(db_path: str, interval_hours: int = 24):
    """Continuously monitor database health."""

    while True:
        health = check_db_health(db_path)

        print(f"\n=== DB Health Report: {datetime.now()} ===")
        print(f"Size: {health.size_mb:.2f} MB")
        print(f"Tables: {health.table_count}")
        print(f"Integrity: {'✓ OK' if health.integrity_ok else '✗ FAILED'}")

        for table, count in health.row_counts.items():
            print(f"  {table}: {count:,} rows")

        if health.slow_queries:
            print("\nWarnings:")
            for warning in health.slow_queries:
                print(f"{warning}")

        if not health.integrity_ok:
            # Send alert
            print("CRITICAL: Database integrity check failed!")

        time.sleep(interval_hours * 3600)
Enter fullscreen mode Exit fullscreen mode

Auto-Migration with Schema Versioning

class DatabaseMigrator:
    """Simple schema version management for SQLite."""

    def __init__(self, db_path: str):
        self.conn = sqlite3.connect(db_path)
        self._ensure_versions_table()

    def _ensure_versions_table(self):
        self.conn.execute("""
            CREATE TABLE IF NOT EXISTS schema_versions (
                version INTEGER PRIMARY KEY,
                applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                description TEXT
            )
        """)
        self.conn.commit()

    def current_version(self) -> int:
        cursor = self.conn.execute("SELECT MAX(version) FROM schema_versions")
        result = cursor.fetchone()[0]
        return result or 0

    def apply_migration(self, version: int, description: str, sql: str):
        if self.current_version() >= version:
            print(f"Migration v{version} already applied")
            return

        print(f"Applying migration v{version}: {description}")
        self.conn.executescript(sql)
        self.conn.execute(
            "INSERT INTO schema_versions (version, description) VALUES (?, ?)",
            (version, description)
        )
        self.conn.commit()
        print(f"Migration v{version} applied successfully")

# Usage
migrator = DatabaseMigrator("myapp.db")

migrator.apply_migration(1, "Create users table", """
    CREATE TABLE users (
        id INTEGER PRIMARY KEY,
        email TEXT UNIQUE NOT NULL,
        created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
    );
""")

migrator.apply_migration(2, "Add user profile", """
    ALTER TABLE users ADD COLUMN name TEXT;
    ALTER TABLE users ADD COLUMN avatar_url TEXT;
""")
Enter fullscreen mode Exit fullscreen mode

Schedule Automated Backups with Cron

# Add to crontab (crontab -e)
# Daily backup at 2 AM
0 2 * * * /usr/bin/python3 /path/to/backup_script.py >> /var/log/db_backup.log 2>&1

# Weekly cleanup
0 3 * * 0 find /backups -name "*.db" -mtime +30 -delete
Enter fullscreen mode Exit fullscreen mode

Want More Database Automation?

This is part of my Python automation toolkit for managing production systems.

👉 Get 50+ automation scripts — database tools, file managers, email automation, web scrapers, and more — all ready to use.

Stop babysitting your systems. Automate the maintenance.

Top comments (0)