10 Python Automation Scripts That Save Me 10+ Hours Per Week
I'm obsessed with automation. If I do something more than twice, I write a script.
Here are 10 Python scripts I use daily that save me over 10 hours per week. Each one is under 50 lines and ready to use.
1. 📧 Auto-Reply to Emails
import imaplib
import email
import smtplib
from email.mime.text import MIMEText
def auto_reply(imap_server, smtp_server, email_addr, password):
# Connect to inbox
mail = imaplib.IMAP4_SSL(imap_server)
mail.login(email_addr, password)
mail.select('inbox')
# Find unread emails
_, messages = mail.search(None, 'UNSEEN')
for msg_id in messages[0].split():
_, msg_data = mail.fetch(msg_id, '(RFC822)')
msg = email.message_from_bytes(msg_data[0][1])
sender = email.utils.parseaddr(msg['From'])[1]
subject = msg['Subject']
# Smart auto-reply
reply = MIMEText(f"Thanks for your email about '{subject}'. "
f"I'll get back to you within 24 hours.")
reply['Subject'] = f"Re: {subject}"
reply['To'] = sender
smtp = smtplib.SMTP_SSL(smtp_server)
smtp.login(email_addr, password)
smtp.send_message(reply)
smtp.quit()
print(f"Processed {len(messages[0].split())} emails")
Time saved: 1 hour/week
2. 📁 Smart File Organizer
import os
import shutil
from pathlib import Path
RULES = {
'Images': ['.jpg', '.jpeg', '.png', '.gif', '.svg', '.webp'],
'Documents': ['.pdf', '.doc', '.docx', '.txt', '.md'],
'Code': ['.py', '.js', '.ts', '.html', '.css', '.json'],
'Videos': ['.mp4', '.mov', '.avi', '.mkv'],
'Archives': ['.zip', '.tar', '.gz', '.rar'],
}
def organize(directory):
path = Path(directory)
moved = 0
for file in path.iterdir():
if file.is_file():
ext = file.suffix.lower()
for folder, extensions in RULES.items():
if ext in extensions:
dest = path / folder
dest.mkdir(exist_ok=True)
shutil.move(str(file), str(dest / file.name))
moved += 1
break
print(f"Organized {moved} files")
# Usage: organize('/path/to/messy/folder')
Time saved: 30 min/week
3. 🔍 Bulk API Health Checker
import requests
import concurrent.futures
from datetime import datetime
ENDPOINTS = [
'https://api.example.com/health',
'https://api.example.com/v2/status',
'https://staging.example.com/health',
]
def check_endpoint(url):
try:
r = requests.get(url, timeout=5)
return {
'url': url,
'status': r.status_code,
'response_time': r.elapsed.total_seconds(),
'healthy': r.status_code == 200
}
except Exception as e:
return {'url': url, 'status': 'ERROR', 'error': str(e), 'healthy': False}
def health_check():
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
results = list(executor.map(check_endpoint, ENDPOINTS))
print(f"\n{'='*60}")
print(f"Health Check - {datetime.now().strftime('%Y-%m-%d %H:%M')}")
print(f"{'='*60}")
for r in results:
icon = '✅' if r['healthy'] else '❌'
print(f"{icon} {r['url']} - {r['status']}")
unhealthy = [r for r in results if not r['healthy']]
if unhealthy:
print(f"\n⚠️ {len(unhealthy)} endpoints DOWN!")
# Send alert (Slack, email, etc.)
health_check()
Time saved: 2 hours/week
4. 📊 CSV to Beautiful Report
import csv
import json
from collections import Counter
def csv_to_report(csv_file, group_by_col):
with open(csv_file) as f:
reader = csv.DictReader(f)
rows = list(reader)
# Group and count
groups = Counter(row[group_by_col] for row in rows)
print(f"\n📊 Report: {csv_file}")
print(f"Total records: {len(rows)}")
print(f"\nBreakdown by {group_by_col}:")
print("-" * 40)
for name, count in groups.most_common(10):
bar = '█' * (count * 30 // max(groups.values()))
print(f" {name:20} {bar} ({count})")
return {
'total': len(rows),
'groups': dict(groups),
'columns': list(rows[0].keys()) if rows else []
}
Time saved: 1 hour/week
5. 🔄 Git Multi-Repo Updater
import subprocess
import os
REPOS = [
'~/projects/frontend',
'~/projects/backend',
'~/projects/mobile',
'~/projects/docs',
]
def update_all_repos():
for repo in REPOS:
repo = os.path.expanduser(repo)
if not os.path.exists(repo):
print(f"⚠️ Skipping {repo} (not found)")
continue
print(f"\n📦 Updating {os.path.basename(repo)}...")
result = subprocess.run(
['git', 'pull', '--rebase'],
cwd=repo, capture_output=True, text=True
)
if result.returncode == 0:
print(f" ✅ {result.stdout.strip()}")
else:
print(f" ❌ {result.stderr.strip()}")
update_all_repos()
Time saved: 30 min/week
6-10: Quick Fire Scripts
6. Screenshot Webpage
# pip install playwright
from playwright.sync_api import sync_playwright
def screenshot(url, output='screenshot.png'):
with sync_playwright() as p:
browser = p.chromium.launch()
page = browser.new_page(viewport={'width': 1920, 'height': 1080})
page.goto(url)
page.screenshot(path=output, full_page=True)
browser.close()
7. Password Generator
import secrets
import string
def generate_password(length=20):
chars = string.ascii_letters + string.digits + '!@#$%^&*'
password = ''.join(secrets.choice(chars) for _ in range(length))
print(f"🔐 {password}")
return password
8. JSON Formatter
import json
import sys
def pretty_json(file_or_string):
try:
data = json.loads(file_or_string)
except:
with open(file_or_string) as f:
data = json.load(f)
print(json.dumps(data, indent=2, ensure_ascii=False))
9. Duplicate File Finder
import hashlib
from pathlib import Path
from collections import defaultdict
def find_duplicates(directory):
hashes = defaultdict(list)
for file in Path(directory).rglob('*'):
if file.is_file():
h = hashlib.md5(file.read_bytes()).hexdigest()
hashes[h].append(str(file))
dupes = {h: files for h, files in hashes.items() if len(files) > 1}
for h, files in dupes.items():
print(f"\n🔄 Duplicates ({len(files)} files):")
for f in files:
print(f" {f}")
10. Pomodoro Timer
import time
import os
def pomodoro(work=25, rest=5, cycles=4):
for i in range(cycles):
print(f"\n🍅 Cycle {i+1}/{cycles} - WORK ({work} min)")
time.sleep(work * 60)
os.system('say "Break time!"') # macOS
print(f"☕ REST ({rest} min)")
time.sleep(rest * 60)
os.system('say "Back to work!"')
🎁 Bonus: The Automation Mindset
The real skill isn't writing scripts — it's recognizing what should be automated.
My rule: If you do it 3 times, automate it.
Want 100+ more automation-ready prompts?
👉 100+ AI Coding Prompts — battle-tested prompts for Python, JavaScript, DevOps, and more.
What's your favorite automation script? Share it in the comments!
Top comments (0)