Automating repetitive tasks is one of the highest-ROI things you can do as a developer in 2026. Here are 7 ready-to-use Python scripts that will save you hours every week.
1. Bulk Rename Files in One Second
from pathlib import Path
folder = Path("./my_files")
for i, file in enumerate(folder.iterdir()):
new_name = f"document_{i+1}{file.suffix}"
file.rename(folder / new_name)
print(f"Renamed: {new_name}")
Use cases: client photos, CSV exports, monthly reports.
2. Send Automated Emails with Attachments
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email import encoders
def send_email(to, subject, body, attachment=None):
msg = MIMEMultipart()
msg["From"] = "you@gmail.com"
msg["To"] = to
msg["Subject"] = subject
msg.attach(MIMEText(body, "plain"))
if attachment:
with open(attachment, "rb") as f:
part = MIMEBase("application", "octet-stream")
part.set_payload(f.read())
encoders.encode_base64(part)
part.add_header("Content-Disposition", f"attachment; filename={attachment}")
msg.attach(part)
with smtplib.SMTP_SSL("smtp.gmail.com", 465) as server:
server.login("you@gmail.com", "your_app_password")
server.send_message(msg)
print(f"Email sent to {to}")
send_email("client@example.com", "Your Weekly Report", "Hi, please find the report attached.", "report.pdf")
3. Scrape E-Commerce Prices
import requests
from bs4 import BeautifulSoup
def scrape_price(url):
headers = {"User-Agent": "Mozilla/5.0"}
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
title = soup.find("h1").text.strip()
price = soup.find(class_="price").text.strip()
print(f"Product : {title}")
print(f"Price : {price}")
return {"title": title, "price": price}
Use cases: competitive monitoring, price alerts.
4. Auto-Generate a PDF Report
from fpdf import FPDF
from datetime import datetime
def generate_report(data: list[dict], filename="report.pdf"):
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", "B", 16)
pdf.cell(0, 10, f"Report — {datetime.now().strftime('%Y-%m-%d')}", ln=True)
pdf.set_font("Arial", size=12)
for row in data:
pdf.cell(0, 8, f"- {row['label']}: {row['value']}", ln=True)
pdf.output(filename)
generate_report([
{"label": "Revenue", "value": "$12,340"},
{"label": "New clients", "value": "47"},
])
5. Monitor a Website and Get Alerts
import requests, time
def monitor(url, interval=60):
while True:
try:
r = requests.get(url, timeout=10)
status = "Online" if r.status_code == 200 else f"Error {r.status_code}"
except requests.ConnectionError:
status = "Offline"
print(f"[{time.strftime('%H:%M:%S')}] {url} — {status}")
time.sleep(interval)
monitor("https://codes-me.com")
6. Clean and Deduplicate a CSV File
import pandas as pd
df = pd.read_csv("contacts.csv")
before = len(df)
df = df.drop_duplicates(subset=["email"]).dropna(subset=["email", "name"])
df["email"] = df["email"].str.lower().str.strip()
df.to_csv("contacts_clean.csv", index=False)
print(f"{before - len(df)} rows removed. {len(df)} contacts kept.")
7. Batch Download Files from the Web
import requests
from pathlib import Path
def download_files(urls: list[str], folder="./downloads"):
Path(folder).mkdir(exist_ok=True)
for url in urls:
name = url.split("/")[-1]
r = requests.get(url, stream=True)
with open(f"{folder}/{name}", "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
print(f"Downloaded: {name}")
Conclusion
These 7 scripts cover the most common automation use cases. With just a few lines of Python, you can eliminate hours of manual work every week.
Need something more advanced — scraping pipelines, API integrations, real-time dashboards? Check out codes-me.com — we build custom automation tools tailored to your needs.
Which script would save you the most time? Drop it in the comments!
Top comments (0)