Want to know what your competitors are charging? Or when a supplier changes their prices? Python can monitor the web for you 24/7 — here's how.
Why Automate Competitive Intelligence?
Manually checking competitor prices, tracking news mentions, and monitoring supplier sites is time-consuming and inconsistent. A Python script can do it while you sleep.
Here are 4 real scripts I use in my business:
Script 1: Competitor Price Monitor
import httpx
import sqlite3
import smtplib
from bs4 import BeautifulSoup
from datetime import datetime
import schedule
def check_competitor_price(url, css_selector, competitor_name):
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
resp = httpx.get(url, headers=headers, timeout=15)
soup = BeautifulSoup(resp.content, 'html.parser')
price_element = soup.select_one(css_selector)
if not price_element:
print(f"Warning: Could not find price for {competitor_name}")
return None
# Extract number from price text like "$29.99" or "€45,00"
price_text = price_element.get_text(strip=True)
price = float(''.join(c for c in price_text if c.isdigit() or c == '.'))
# Store in database
conn = sqlite3.connect('competitor_prices.db')
conn.execute('''CREATE TABLE IF NOT EXISTS prices
(id INTEGER PRIMARY KEY, competitor TEXT, price REAL,
url TEXT, checked_at TIMESTAMP)''')
conn.execute('INSERT INTO prices VALUES (NULL, ?, ?, ?, ?)',
(competitor_name, price, url, datetime.now()))
# Check if price changed
cursor = conn.execute('''
SELECT price FROM prices
WHERE competitor = ?
ORDER BY checked_at DESC
LIMIT 2
''', (competitor_name,))
prices = [row[0] for row in cursor.fetchall()]
conn.commit()
conn.close()
if len(prices) == 2 and prices[0] != prices[1]:
send_price_alert(competitor_name, prices[1], prices[0])
return price
def send_price_alert(competitor, old_price, new_price):
change = ((new_price - old_price) / old_price) * 100
direction = "⬆️ increased" if new_price > old_price else "⬇️ decreased"
message = f"""
PRICE ALERT: {competitor} {direction}!
Old price: ${old_price:.2f}
New price: ${new_price:.2f}
Change: {change:+.1f}%
Time to review your own pricing?
"""
# Send email alert (see email automation article)
print(f"ALERT: {competitor} price changed from ${old_price:.2f} to ${new_price:.2f}")
# Configure your competitors
COMPETITORS = [
{
"name": "Competitor A",
"url": "https://competitora.com/product",
"selector": ".price .amount"
},
{
"name": "Competitor B",
"url": "https://competitorb.com/pricing",
"selector": "#basic-plan-price"
}
]
def run_price_checks():
for comp in COMPETITORS:
price = check_competitor_price(comp['url'], comp['selector'], comp['name'])
if price:
print(f"{comp['name']}: ${price:.2f}")
# Check twice daily
schedule.every().day.at("09:00").do(run_price_checks)
schedule.every().day.at("17:00").do(run_price_checks)
Script 2: Google Mentions Monitor
Know when your brand (or competitors) is mentioned online:
import httpx
from datetime import datetime, timedelta
import urllib.parse
def check_google_mentions(keyword, days_back=1):
# Use Google News RSS - no API key needed
encoded = urllib.parse.quote(keyword)
url = f"https://news.google.com/rss/search?q={encoded}&hl=en-US&gl=US&ceid=US:en"
resp = httpx.get(url, timeout=15)
import xml.etree.ElementTree as ET
root = ET.fromstring(resp.content)
cutoff = datetime.now() - timedelta(days=days_back)
recent_items = []
for item in root.findall('.//item'):
title = item.find('title').text if item.find('title') is not None else ''
link = item.find('link').text if item.find('link') is not None else ''
pub_date_str = item.find('pubDate').text if item.find('pubDate') is not None else ''
# Parse date and filter recent
try:
from email.utils import parsedate_to_datetime
pub_date = parsedate_to_datetime(pub_date_str)
if pub_date.replace(tzinfo=None) > cutoff:
recent_items.append({'title': title, 'link': link, 'date': pub_date_str})
except:
recent_items.append({'title': title, 'link': link, 'date': pub_date_str})
return recent_items
def daily_mentions_report():
keywords = ["Your Brand Name", "Competitor Name", "Your Product Name"]
report = f"📰 Daily Mentions Report — {datetime.now().strftime('%B %d, %Y')}\n\n"
for keyword in keywords:
mentions = check_google_mentions(keyword)
report += f"## {keyword} ({len(mentions)} mentions)\n"
for m in mentions[:5]: # Top 5
report += f" • {m['title']}\n {m['link']}\n"
report += "\n"
print(report)
# Or email it: send_email("you@email.com", "Daily Mentions", report)
schedule.every().day.at("08:00").do(daily_mentions_report)
Script 3: Supplier Stock Tracker
Never get caught off-guard when a supplier runs out:
def check_supplier_stock(product_url, product_name, selector):
headers = {'User-Agent': 'Mozilla/5.0 (compatible; StockChecker/1.0)'}
resp = httpx.get(product_url, headers=headers, timeout=15)
soup = BeautifulSoup(resp.content, 'html.parser')
# Look for out-of-stock indicators
page_text = soup.get_text().lower()
out_of_stock_phrases = [
'out of stock', 'sold out', 'unavailable',
'currently unavailable', 'notify me'
]
is_out_of_stock = any(phrase in page_text for phrase in out_of_stock_phrases)
# Check quantity field
qty_element = soup.select_one(selector)
if qty_element:
qty_text = qty_element.get_text()
qty = int(''.join(c for c in qty_text if c.isdigit()) or '0')
is_low = qty < 10
else:
is_low = False
status = {
"product": product_name,
"url": product_url,
"in_stock": not is_out_of_stock,
"checked_at": datetime.now().isoformat()
}
if is_out_of_stock:
alert_msg = f"🚨 {product_name} is OUT OF STOCK at supplier!"
print(alert_msg)
# send_email("you@email.com", "Stock Alert!", alert_msg)
elif is_low:
print(f"⚠️ {product_name} is LOW STOCK — order soon")
return status
Script 4: Job/Contract Board Monitor
Find new contract opportunities before the competition:
def monitor_upwork_rss(keywords):
for keyword in keywords:
encoded = urllib.parse.quote(keyword)
url = f"https://www.upwork.com/ab/feed/jobs/rss?q={encoded}&sort=recency"
resp = httpx.get(url, headers={'User-Agent': 'Mozilla/5.0'}, timeout=15)
import xml.etree.ElementTree as ET
try:
root = ET.fromstring(resp.content)
jobs = []
for item in root.findall('.//item')[:10]:
title = item.find('title').text or ''
link = item.find('link').text or ''
description = item.find('description').text or ''
jobs.append({'title': title, 'link': link, 'snippet': description[:200]})
print(f"\n{keyword}: {len(jobs)} new jobs")
for job in jobs[:3]:
print(f" • {job['title']}")
print(f" {job['link']}")
except ET.ParseError:
print(f"Could not parse feed for: {keyword}")
monitor_upwork_rss(["python automation", "python scripting", "business automation"])
Running All Scripts Together
import schedule
import time
# Price monitoring - twice daily
schedule.every().day.at("09:00").do(run_price_checks)
schedule.every().day.at("17:00").do(run_price_checks)
# News monitoring - every morning
schedule.every().day.at("08:00").do(daily_mentions_report)
# Stock checking - every 4 hours during business hours
for hour in ['09:00', '13:00', '17:00']:
schedule.every().day.at(hour).do(lambda: check_supplier_stock(
"https://supplier.com/product", "Widget A", ".stock-count"
))
print("Business intelligence bot running...")
while True:
schedule.run_pending()
time.sleep(60)
Install Requirements
pip install httpx beautifulsoup4 schedule
The Full Toolkit
These 4 scripts save me about 5 hours per week on manual research and monitoring. They're part of my Python Business Automation Toolkit — a collection of 5 production-ready scripts for small business owners.
Each script includes:
- Full error handling and logging
- Configuration file for easy customization
- Documentation and setup instructions
- Email alert integration
Download the complete toolkit for $29
What manual tasks are eating your business hours? Drop a comment — I might be able to help automate it.
Top comments (0)