If you're writing the same repetitive tasks every week, you're losing hours that Python could give back.
Here are 8 Python automation scripts I use regularly — complete, copy-paste ready.
1. Rename files in bulk
import os
from pathlib import Path
def bulk_rename(folder, old_ext, new_ext):
folder = Path(folder)
for file in folder.glob(f"*.{old_ext}"):
file.rename(file.with_suffix(f".{new_ext}"))
print(f"Renamed: {file.name}")
# Example: rename all .jpeg to .jpg
bulk_rename("./images", "jpeg", "jpg")
Time saved: 20 minutes per rename session.
2. Send yourself a daily summary email
import smtplib
from email.mime.text import MIMEText
from datetime import datetime
def send_summary(sender, password, recipient, subject, body):
msg = MIMEText(body)
msg["Subject"] = subject
msg["From"] = sender
msg["To"] = recipient
with smtplib.SMTP_SSL("smtp.gmail.com", 465) as server:
server.login(sender, password)
server.sendmail(sender, recipient, msg.as_string())
# In your cron job:
send_summary(
"you@gmail.com", "your-app-password",
"you@gmail.com",
f"Daily Summary {datetime.now().strftime('%Y-%m-%d')}",
"Tasks done: ..."
)
3. Monitor a website and alert when it's down
import requests
import time
import smtplib
from datetime import datetime
def check_site(url, check_every=60):
while True:
try:
response = requests.get(url, timeout=10)
if response.status_code != 200:
print(f"⚠️ {url} returned {response.status_code}")
except requests.RequestException as e:
print(f"❌ {url} is DOWN: {e}")
# Send alert email here
time.sleep(check_every)
check_site("https://your-website.com")
4. Organize downloads folder automatically
import shutil
from pathlib import Path
RULES = {
"Images": [".jpg", ".jpeg", ".png", ".gif", ".webp", ".svg"],
"Documents": [".pdf", ".docx", ".xlsx", ".pptx", ".txt"],
"Videos": [".mp4", ".mkv", ".avi", ".mov"],
"Audio": [".mp3", ".wav", ".flac", ".ogg"],
"Code": [".py", ".js", ".html", ".css", ".json", ".zip"],
}
def organize_downloads():
downloads = Path.home() / "Downloads"
for file in downloads.iterdir():
if file.is_file():
for folder, extensions in RULES.items():
if file.suffix.lower() in extensions:
dest = downloads / folder
dest.mkdir(exist_ok=True)
shutil.move(str(file), str(dest / file.name))
print(f"Moved {file.name} → {folder}/")
break
organize_downloads()
Run this in a cron job every day. Your Downloads folder will never be a disaster again.
5. Convert CSV to clean JSON
import csv
import json
from pathlib import Path
def csv_to_json(csv_file, json_file=None):
path = Path(csv_file)
if not json_file:
json_file = path.with_suffix(".json")
with open(path, newline="", encoding="utf-8") as f:
data = list(csv.DictReader(f))
with open(json_file, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
print(f"Converted: {len(data)} rows → {json_file}")
return data
csv_to_json("data.csv")
6. Screenshot a webpage automatically
from playwright.sync_api import sync_playwright
def screenshot_url(url, output="screenshot.png", width=1280, height=720):
with sync_playwright() as p:
browser = p.chromium.launch(headless=True)
page = browser.new_page(viewport={"width": width, "height": height})
page.goto(url, wait_until="networkidle")
page.screenshot(path=output, full_page=True)
browser.close()
print(f"Screenshot saved: {output}")
screenshot_url("https://github.com", "github.png")
7. Find duplicate files
import hashlib
from pathlib import Path
from collections import defaultdict
def find_duplicates(folder):
hashes = defaultdict(list)
for file in Path(folder).rglob("*"):
if file.is_file():
h = hashlib.md5(file.read_bytes()).hexdigest()
hashes[h].append(file)
duplicates = {h: files for h, files in hashes.items() if len(files) > 1}
for h, files in duplicates.items():
print(f"Duplicate group ({h[:8]}):")
for f in files:
print(f" {f}")
return duplicates
find_duplicates("./my-folder")
8. Generate a daily report from your git repos
import subprocess
from datetime import datetime, timedelta
def git_report(repo_path, days=1):
since = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
result = subprocess.run(
["git", "log", f"--since={since}", "--oneline", "--all", "--author=you"],
cwd=repo_path,
capture_output=True, text=True
)
commits = result.stdout.strip().split("\n")
commits = [c for c in commits if c]
print(f"\n📦 {repo_path}")
print(f" {len(commits)} commits since {since}")
for c in commits:
print(f" → {c}")
for repo in ["/path/to/repo1", "/path/to/repo2"]:
git_report(repo)
Automate everything
These scripts are just the beginning. The rule I follow: if I do something more than 3 times, I automate it.
Want more Python automation examples? Follow me on Dev.to — I post new scripts regularly.
And if you need a quick JSON/CSV converter, Base64 encoder, or similar without writing code: DevToolkit has 31 free tools.
Top comments (0)