Price differences between Amazon US, UK, DE, and JP can exceed 50% for the same product. Build a scraper that finds these arbitrage opportunities automatically.
The Arbitrage Opportunity
Cross-border e-commerce arbitrage exploits price differences for identical products across regional marketplaces. Currency fluctuations and regional pricing create persistent gaps.
Scraping Multi-Region Product Prices
import requests
from bs4 import BeautifulSoup
import pandas as pd
from datetime import datetime
API_KEY = "YOUR_SCRAPERAPI_KEY" # Get one at https://www.scraperapi.com?fp_ref=the52
REGIONS = {
'US': {'domain': 'amazon.com', 'currency': 'USD', 'rate_to_usd': 1.0},
'UK': {'domain': 'amazon.co.uk', 'currency': 'GBP', 'rate_to_usd': 1.27},
'DE': {'domain': 'amazon.de', 'currency': 'EUR', 'rate_to_usd': 1.09},
'JP': {'domain': 'amazon.co.jp', 'currency': 'JPY', 'rate_to_usd': 0.0067},
}
def scrape_amazon_price(asin, region_code):
region = REGIONS[region_code]
url = f"https://www.{region['domain']}/dp/{asin}"
proxy_url = f"http://api.scraperapi.com?api_key={API_KEY}&url={url}&render=true&country_code={region_code.lower()}"
import re
response = requests.get(proxy_url, timeout=60)
soup = BeautifulSoup(response.text, 'html.parser')
price_elem = soup.select_one('.a-price .a-offscreen, #priceblock_ourprice')
if price_elem:
price_text = price_elem.text.strip()
numbers = re.findall(r'[\d,.]+', price_text.replace(',', ''))
if numbers:
local_price = float(numbers[0].replace(',', '.'))
usd_price = local_price * region['rate_to_usd']
return {
'asin': asin,
'region': region_code,
'local_price': local_price,
'currency': region['currency'],
'usd_price': round(usd_price, 2)
}
return None
test_asins = ['B0BSHF7WHW', 'B09V3KXJPB', 'B0C8QJ5P1Y']
all_prices = []
for asin in test_asins:
for region in REGIONS:
result = scrape_amazon_price(asin, region)
if result:
all_prices.append(result)
print(f"{asin} ({region}): ${result['usd_price']}")
df = pd.DataFrame(all_prices)
Finding Arbitrage Opportunities
def find_arbitrage(df, min_margin_pct=20):
opportunities = []
for asin in df['asin'].unique():
prices = df[df['asin'] == asin]
if len(prices) < 2:
continue
cheapest = prices.loc[prices['usd_price'].idxmin()]
expensive = prices.loc[prices['usd_price'].idxmax()]
margin_pct = (expensive['usd_price'] - cheapest['usd_price']) / cheapest['usd_price'] * 100
if margin_pct >= min_margin_pct:
opportunities.append({
'asin': asin,
'buy_region': cheapest['region'],
'buy_price': cheapest['usd_price'],
'sell_region': expensive['region'],
'sell_price': expensive['usd_price'],
'margin_pct': round(margin_pct, 1)
})
return pd.DataFrame(opportunities).sort_values('margin_pct', ascending=False)
opps = find_arbitrage(df)
print("\nArbitrage Opportunities (>20% margin):")
print(opps.to_string(index=False))
Real-Time Exchange Rates
def get_exchange_rates():
resp = requests.get('https://api.exchangerate-api.com/v4/latest/USD', timeout=10)
data = resp.json()
return {
'US': 1.0, 'UK': 1.0 / data['rates']['GBP'],
'DE': 1.0 / data['rates']['EUR'], 'JP': 1.0 / data['rates']['JPY'],
}
rates = get_exchange_rates()
for region, rate in rates.items():
REGIONS[region]['rate_to_usd'] = rate
Net Margin After Fees
Use ThorData for geo-specific pricing. Monitor with ScrapeOps:
def calculate_net_margin(opp, shipping=15, duty_pct=5, platform_pct=15):
total_cost = opp['buy_price'] + shipping + opp['buy_price'] * duty_pct / 100
platform_fee = opp['sell_price'] * platform_pct / 100
net_profit = opp['sell_price'] - total_cost - platform_fee
return round((net_profit / total_cost) * 100, 1)
for _, opp in opps.iterrows():
net = calculate_net_margin(opp)
print(f"ASIN {opp['asin']}: Net margin {net}%")
Key Considerations
- Always factor in shipping, duties, and platform fees
- Currency rates fluctuate — recheck at purchase time
- Some products have regional restrictions
- ScraperAPI handles Amazon's bot detection across all regions
- Start with lightweight, high-margin products to minimize shipping costs
Top comments (0)