How to Build a Real-Time Sports Betting Odds Tracker
Sports betting odds shift constantly as bookmakers react to wagers, injuries, and breaking news. Tracking these movements in real time gives you an edge for arbitrage finding, line analysis, or comparison tools.
Core Odds Scraper
import requests
from bs4 import BeautifulSoup
from dataclasses import dataclass, field
from datetime import datetime
import sqlite3, time
@dataclass
class OddsEntry:
bookmaker: str
sport: str
event: str
market: str
selection: str
odds: float
timestamp: datetime = field(default_factory=datetime.now)
class OddsTracker:
def __init__(self, db_path='odds.db', api_key=None):
self.db = sqlite3.connect(db_path)
self.api_key = api_key
self.session = requests.Session()
self._init_db()
def _init_db(self):
self.db.execute('''CREATE TABLE IF NOT EXISTS odds (
id INTEGER PRIMARY KEY AUTOINCREMENT,
bookmaker TEXT, sport TEXT, event TEXT,
market TEXT, selection TEXT, odds REAL,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
)''')
self.db.commit()
def fetch(self, url):
if self.api_key:
return self.session.get(
f"http://api.scraperapi.com?api_key={self.api_key}&url={url}&render=true")
return self.session.get(url)
def scrape_odds(self, url, sport, parser):
resp = self.fetch(url)
soup = BeautifulSoup(resp.text, 'html.parser')
entries = parser(soup, sport)
for e in entries:
self.db.execute(
'INSERT INTO odds (bookmaker,sport,event,market,selection,odds) VALUES (?,?,?,?,?,?)',
(e.bookmaker, e.sport, e.event, e.market, e.selection, e.odds))
self.db.commit()
return entries
Detecting Line Movements
class LineAnalyzer:
def __init__(self, db):
self.db = db
def significant_moves(self, threshold=0.10):
cursor = self.db.execute('''
SELECT event, selection, bookmaker,
MIN(odds) as low, MAX(odds) as high,
(MAX(odds)-MIN(odds))/MIN(odds) as pct
FROM odds WHERE timestamp > datetime('now','-6 hours')
GROUP BY event, selection, bookmaker HAVING pct > ?
''', (threshold,))
return [{'event': m[0], 'selection': m[1], 'bookmaker': m[2],
'low': m[3], 'high': m[4], 'move_pct': round(m[5]*100,2)}
for m in cursor.fetchall()]
Finding Arbitrage Opportunities
def find_arbitrage(tracker, event):
cursor = tracker.db.execute('''
SELECT selection, bookmaker, MAX(odds) as best
FROM odds WHERE event = ? AND timestamp > datetime('now','-1 hour')
GROUP BY selection
''', (event,))
best = {r[0]: {'bookmaker': r[1], 'odds': r[2]} for r in cursor.fetchall()}
if len(best) < 2:
return None
implied = sum(1.0/v['odds'] for v in best.values())
if implied < 1.0:
profit = (1.0 - implied) * 100
stakes = {}
for sel, info in best.items():
stakes[sel] = {
'bookmaker': info['bookmaker'],
'odds': info['odds'],
'stake': round(100 * (1.0/info['odds']) / implied, 2)
}
return {'profit_pct': round(profit, 2), 'stakes': stakes}
return None
Continuous Tracking
def run_tracker(sources, interval=300):
tracker = OddsTracker(api_key='YOUR_KEY')
analyzer = LineAnalyzer(tracker.db)
while True:
for src in sources:
try:
tracker.scrape_odds(src['url'], src['sport'], src['parser'])
except Exception as e:
print(f"Error: {e}")
moves = analyzer.significant_moves()
for m in moves:
print(f"LINE MOVE: {m['event']} | {m['selection']} | {m['move_pct']}%")
time.sleep(interval)
Scaling with Proxies
Bookmaker sites use aggressive anti-bot measures. ScraperAPI handles JavaScript rendering and CAPTCHAs. ThorData provides residential proxies that avoid IP blocks. Monitor scraper health with ScrapeOps.
Legal Note
Check local regulations regarding odds scraping and betting data. Many jurisdictions allow personal use of publicly displayed odds but restrict commercial redistribution.
Follow for more Python web scraping tutorials.
Top comments (0)