Pricing is the most powerful lever in business. Knowing your competitors' prices in real time lets you react instantly — adjust margins, match promotions, and spot trends. Here's how to build automated price monitoring that runs 24/7.
Architecture Overview
Our system will:
- Scrape competitor product pages on schedule
- Extract and normalize prices
- Detect changes and alert on significant movements
- Store historical data for trend analysis
Building the Price Scraper
import requests
from bs4 import BeautifulSoup
import re
import json
from datetime import datetime
API_KEY = "YOUR_SCRAPERAPI_KEY"
class PriceMonitor:
def __init__(self, api_key):
self.api_key = api_key
self.price_history = {}
def scrape_price(self, url, selectors):
params = {
"api_key": self.api_key,
"url": url,
"render": "true"
}
resp = requests.get(
"https://api.scraperapi.com", params=params, timeout=60
)
soup = BeautifulSoup(resp.text, "html.parser")
price_el = soup.select_one(selectors.get("price", ".price"))
name_el = soup.select_one(selectors.get("name", "h1"))
stock_el = soup.select_one(selectors.get("stock", ".stock"))
price_text = price_el.get_text(strip=True) if price_el else None
return {
"name": name_el.get_text(strip=True) if name_el else "Unknown",
"price": self.parse_price(price_text),
"price_raw": price_text,
"in_stock": self.check_stock(stock_el),
"scraped_at": datetime.utcnow().isoformat(),
"url": url,
}
@staticmethod
def parse_price(text):
if not text:
return None
cleaned = re.sub(r"[^\d.,]", "", text)
cleaned = cleaned.replace(",", "")
try:
return float(cleaned)
except ValueError:
return None
@staticmethod
def check_stock(el):
if not el:
return None
text = el.get_text(strip=True).lower()
return "out of stock" not in text and "unavailable" not in text
ScraperAPI handles JS rendering and anti-bot protection across different e-commerce platforms.
Configuring Products to Monitor
PRODUCTS = [
{
"name": "Competitor A - Widget Pro",
"url": "https://competitor-a.com/widget-pro",
"selectors": {"price": ".product-price .amount", "name": "h1.title"},
"our_price": 49.99,
},
{
"name": "Competitor B - Widget Pro",
"url": "https://competitor-b.com/products/widget",
"selectors": {"price": "[data-price]", "name": ".product-name"},
"our_price": 49.99,
},
]
Change Detection and Alerting
import sqlite3
import smtplib
from email.mime.text import MIMEText
class PriceAlerts:
def __init__(self, db_path="prices.db"):
self.conn = sqlite3.connect(db_path)
self.conn.execute("""
CREATE TABLE IF NOT EXISTS prices (
id INTEGER PRIMARY KEY AUTOINCREMENT,
product_name TEXT, url TEXT,
price REAL, in_stock BOOLEAN,
scraped_at TEXT
)
""")
def record_and_check(self, data):
# Get last recorded price
cursor = self.conn.execute(
"SELECT price FROM prices WHERE url = ? ORDER BY id DESC LIMIT 1",
(data["url"],)
)
last = cursor.fetchone()
# Record new price
self.conn.execute(
"INSERT INTO prices (product_name, url, price, in_stock, scraped_at) "
"VALUES (?, ?, ?, ?, ?)",
(data["name"], data["url"], data["price"], data["in_stock"], data["scraped_at"])
)
self.conn.commit()
# Check for changes
if last and last[0] and data["price"]:
old_price = last[0]
new_price = data["price"]
change_pct = ((new_price - old_price) / old_price) * 100
if abs(change_pct) > 3: # 3% threshold
return {
"product": data["name"],
"old_price": old_price,
"new_price": new_price,
"change_pct": round(change_pct, 1),
"direction": "increased" if change_pct > 0 else "decreased",
}
return None
def send_alert(self, alert, email_to):
subject = f"Price Alert: {alert['product']} {alert['direction']} {abs(alert['change_pct'])}%"
body = (
f"Product: {alert['product']}\n"
f"Old Price: ${alert['old_price']:.2f}\n"
f"New Price: ${alert['new_price']:.2f}\n"
f"Change: {alert['change_pct']:+.1f}%"
)
msg = MIMEText(body)
msg["Subject"] = subject
msg["To"] = email_to
# Configure your SMTP server
with smtplib.SMTP("smtp.gmail.com", 587) as server:
server.starttls()
server.login("your-email", "app-password")
server.send_message(msg)
Running 24/7 with Scheduling
import schedule
import time as time_module
def monitoring_cycle():
monitor = PriceMonitor(API_KEY)
alerts = PriceAlerts()
for product in PRODUCTS:
try:
data = monitor.scrape_price(product["url"], product["selectors"])
alert = alerts.record_and_check(data)
if alert:
alerts.send_alert(alert, "team@company.com")
print(f"ALERT: {alert['product']} {alert['direction']} {abs(alert['change_pct'])}%")
else:
print(f"OK: {data['name']} = ${data['price']}")
except Exception as e:
print(f"Error scraping {product['name']}: {e}")
time_module.sleep(2) # Spacing between requests
# Check every 4 hours
schedule.every(4).hours.do(monitoring_cycle)
print("Price monitor started...")
while True:
schedule.run_pending()
time_module.sleep(60)
Analytics Dashboard Data
def get_price_trends(db_path="prices.db", days=30):
conn = sqlite3.connect(db_path)
cursor = conn.execute("""
SELECT product_name, price, scraped_at
FROM prices
WHERE scraped_at > datetime('now', ?)
ORDER BY product_name, scraped_at
""", (f"-{days} days",))
trends = {}
for name, price, ts in cursor.fetchall():
if name not in trends:
trends[name] = []
trends[name].append({"price": price, "date": ts})
return trends
Scaling with Proxy Infrastructure
E-commerce sites actively block scrapers. ThorData residential proxies rotate IPs automatically, mimicking real shoppers. ScrapeOps monitors success rates across sites and alerts when extraction patterns break.
Automated price monitoring turns competitive intelligence from a manual task into a real-time advantage. The system above handles scraping, change detection, alerting, and historical tracking — everything you need for data-driven pricing decisions.
Happy scraping!
Top comments (0)