Full sync - all projects, memory, configs

This commit is contained in:
2026-03-21 20:27:59 -05:00
parent 2447677d4a
commit b33de10902
395 changed files with 1635300 additions and 459211 deletions

View File

@ -0,0 +1,19 @@
[Unit]
Description=Polymarket Sports Arbitrage Scanner
After=network.target
[Service]
Type=oneshot
ExecStart=/usr/bin/python3 /home/wdjones/.openclaw/workspace/projects/crypto-signals/scripts/polymarket_sports_scanner.py
WorkingDirectory=/home/wdjones/.openclaw/workspace/projects/crypto-signals
Environment=PATH=/usr/local/bin:/usr/bin:/bin
EnvironmentFile=-/home/wdjones/.openclaw/workspace/.credentials/telegram-bot.env
StandardOutput=journal
StandardError=journal
# Restart policy for failed runs
Restart=no
RestartSec=30
[Install]
WantedBy=default.target

View File

@ -0,0 +1,11 @@
[Unit]
Description=Run Polymarket Sports Arbitrage Scanner every 5 minutes
Requires=polymarket-sports-scanner.service
[Timer]
OnCalendar=*:0/5
Persistent=true
RandomizedDelaySec=30
[Install]
WantedBy=timers.target

View File

@ -0,0 +1,525 @@
#!/usr/bin/env python3
"""
Polymarket Sports Arbitrage Scanner
Scans active sports markets for arbitrage opportunities.
Alerts via Telegram when potential arb opportunities exist.
Zero AI tokens — runs as pure Python via systemd timer.
"""
import json
import os
import sqlite3
import sys
import time
import urllib.request
from datetime import datetime, timezone, timedelta
from pathlib import Path
# Config
PROJECT_DIR = Path(__file__).parent.parent
DATA_DIR = PROJECT_DIR / "data" / "sports-arb"
DATA_DIR.mkdir(parents=True, exist_ok=True)
DB_FILE = DATA_DIR / "markets.db"
LOG_FILE = DATA_DIR / "scan_log.json"
OPPORTUNITIES_FILE = DATA_DIR / "opportunities.json"
# Load telegram credentials from environment or credentials file
CREDENTIALS_FILE = Path.home() / ".openclaw/workspace/.credentials/telegram-bot.env"
def load_telegram_credentials():
"""Load Telegram bot credentials from environment or file."""
bot_token = os.environ.get("TELEGRAM_BOT_TOKEN", os.environ.get("BOT_TOKEN", ""))
chat_id = os.environ.get("TELEGRAM_CHAT_ID", os.environ.get("CHAT_ID", ""))
# Try loading from credentials file if not in environment
if not bot_token and CREDENTIALS_FILE.exists():
with open(CREDENTIALS_FILE) as f:
for line in f:
line = line.strip()
if line and '=' in line and not line.startswith('#'):
key, value = line.split('=', 1)
if key == "BOT_TOKEN" and not bot_token:
bot_token = value
elif key == "CHAT_ID" and not chat_id:
chat_id = value
return bot_token, chat_id
TELEGRAM_BOT_TOKEN, TELEGRAM_CHAT_ID = load_telegram_credentials()
# Sports-related keywords for filtering
SPORTS_KEYWORDS = {
"nfl", "nba", "nhl", "mlb", "soccer", "football", "basketball", "hockey",
"baseball", "tennis", "golf", "mma", "ufc", "boxing", "olympics", "premier league",
"champions league", "world cup", "super bowl", "playoffs", "finals"
}
# Polymarket fee calculation
def calc_taker_fee(shares, price):
"""Calculate taker fee in USDC."""
if price <= 0 or price >= 1:
return 0
return shares * price * 0.01 # 1% fee for most markets
def calc_implied_probability(price):
"""Convert market price to implied probability."""
if price <= 0:
return 0
if price >= 1:
return 1
return price
def init_database():
"""Initialize SQLite database for market storage."""
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS sports_markets (
id TEXT PRIMARY KEY,
condition_id TEXT,
question TEXT,
description TEXT,
tags TEXT,
category TEXT,
active INTEGER,
closed INTEGER,
created_at TEXT,
end_date TEXT,
outcomes TEXT,
token_ids TEXT,
volume REAL,
liquidity REAL,
last_scan_time TEXT
)
""")
# Create indexes separately
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sports_markets_active ON sports_markets(active)")
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sports_markets_closed ON sports_markets(closed)")
cursor.execute("CREATE INDEX IF NOT EXISTS idx_sports_markets_end_date ON sports_markets(end_date)")
cursor.execute("""
CREATE TABLE IF NOT EXISTS market_prices (
id INTEGER PRIMARY KEY AUTOINCREMENT,
market_id TEXT,
token_id TEXT,
outcome TEXT,
best_bid REAL,
best_ask REAL,
bid_size REAL,
ask_size REAL,
spread REAL,
implied_prob REAL,
timestamp TEXT,
FOREIGN KEY (market_id) REFERENCES sports_markets (id)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS arbitrage_opportunities (
id INTEGER PRIMARY KEY AUTOINCREMENT,
market_id TEXT,
question TEXT,
outcomes TEXT,
prices TEXT,
combined_prob REAL,
max_profit_pct REAL,
min_liquidity REAL,
confidence_score REAL,
timestamp TEXT,
alerted INTEGER DEFAULT 0,
FOREIGN KEY (market_id) REFERENCES sports_markets (id)
)
""")
conn.commit()
conn.close()
def get_active_sports_markets():
"""Fetch active sports markets from Polymarket Gamma API."""
markets = []
# Search for sports markets with pagination
for offset in range(0, 2000, 200):
url = (
f"https://gamma-api.polymarket.com/markets?"
f"active=true&closed=false&limit=200&offset={offset}"
f"&order=volume&ascending=false"
)
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0 (compatible; PolymarketSportsScanner/1.0)"})
try:
resp = urllib.request.urlopen(req, timeout=15)
batch = json.loads(resp.read())
for market in batch:
if is_sports_market(market):
markets.append(market)
if len(batch) < 200:
break
except Exception as e:
print(f"Error fetching markets (offset={offset}): {e}")
break
time.sleep(0.1) # Rate limiting
# Filter out markets ending too soon (less than 1 hour)
now = datetime.now(timezone.utc)
tradeable = []
for market in markets:
end_str = market.get("endDate", "")
if not end_str:
continue
try:
end_dt = datetime.fromisoformat(end_str.replace("Z", "+00:00"))
hours_until = (end_dt - now).total_seconds() / 3600
if hours_until > 1: # At least 1 hour left
market["_hours_until_end"] = round(hours_until, 2)
tradeable.append(market)
except Exception as e:
print(f"Error parsing end date for market {market.get('id', 'unknown')}: {e}")
continue
return tradeable
def is_sports_market(market):
"""Check if a market is sports-related based on tags, question, and description."""
question = market.get("question", "").lower()
description = market.get("description", "").lower()
tags = market.get("tags", [])
# Check tags first
for tag in tags:
if isinstance(tag, dict):
tag_label = tag.get("label", "").lower()
else:
tag_label = str(tag).lower()
if any(keyword in tag_label for keyword in SPORTS_KEYWORDS):
return True
# Check question and description
text = f"{question} {description}"
if any(keyword in text for keyword in SPORTS_KEYWORDS):
return True
return False
def get_orderbook_prices(token_id):
"""Get best bid/ask from the CLOB API."""
url = f"https://clob.polymarket.com/book?token_id={token_id}"
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0 (compatible; PolymarketSportsScanner/1.0)"})
try:
resp = urllib.request.urlopen(req, timeout=10)
book = json.loads(resp.read())
bids = book.get("bids", [])
asks = book.get("asks", [])
best_bid = float(bids[0]["price"]) if bids else 0
best_ask = float(asks[0]["price"]) if asks else 1
bid_size = float(bids[0].get("size", 0)) if bids else 0
ask_size = float(asks[0].get("size", 0)) if asks else 0
return {
"best_bid": best_bid,
"best_ask": best_ask,
"bid_size": bid_size,
"ask_size": ask_size,
"spread": best_ask - best_bid
}
except Exception as e:
print(f"Error fetching orderbook for token {token_id}: {e}")
return None
def store_market_data(market, price_data):
"""Store market and price data in SQLite database."""
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
# Store market info
cursor.execute("""
INSERT OR REPLACE INTO sports_markets
(id, condition_id, question, description, tags, category, active, closed,
created_at, end_date, outcomes, token_ids, volume, liquidity, last_scan_time)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
market.get("id", ""),
market.get("conditionId", ""),
market.get("question", ""),
market.get("description", ""),
json.dumps(market.get("tags", [])),
market.get("category", ""),
market.get("active", 0),
market.get("closed", 0),
market.get("createdAt", ""),
market.get("endDate", ""),
json.dumps(market.get("outcomes", [])),
json.dumps(market.get("clobTokenIds", [])),
market.get("volume", 0),
market.get("liquidity", 0),
datetime.now(timezone.utc).isoformat()
))
# Store price data
if price_data:
outcomes = market.get("outcomes", [])
token_ids = market.get("clobTokenIds", [])
if isinstance(token_ids, str):
try:
token_ids = json.loads(token_ids) if token_ids.startswith("[") else token_ids.split(",")
except:
token_ids = []
for i, (token_id, outcome_data) in enumerate(zip(token_ids, price_data)):
if outcome_data:
outcome_name = outcomes[i] if i < len(outcomes) else f"Outcome {i+1}"
cursor.execute("""
INSERT INTO market_prices
(market_id, token_id, outcome, best_bid, best_ask, bid_size, ask_size,
spread, implied_prob, timestamp)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
market.get("id", ""),
token_id,
outcome_name,
outcome_data["best_bid"],
outcome_data["best_ask"],
outcome_data["bid_size"],
outcome_data["ask_size"],
outcome_data["spread"],
calc_implied_probability(outcome_data["best_ask"]),
datetime.now(timezone.utc).isoformat()
))
conn.commit()
conn.close()
def analyze_arbitrage_opportunity(market, price_data):
"""Analyze if market presents arbitrage opportunity."""
if not price_data or len(price_data) < 2:
return None
valid_prices = [p for p in price_data if p is not None]
if len(valid_prices) < 2:
return None
# Calculate combined implied probabilities (using ask prices - what we'd pay)
combined_prob = sum(calc_implied_probability(p["best_ask"]) for p in valid_prices)
# Calculate minimum liquidity across all outcomes
min_liquidity = min(p["ask_size"] for p in valid_prices)
# Calculate potential profit (simplified - ignores fees for now)
max_profit_pct = max(0, (1 - combined_prob) * 100)
# Confidence score based on spread and liquidity
avg_spread = sum(p["spread"] for p in valid_prices) / len(valid_prices)
confidence_score = min_liquidity * (1 - avg_spread) * 100
# Flag as opportunity if:
# 1. Combined probability < 0.99 (pure arb potential)
# 2. OR combined probability < 1.02 and good liquidity + low spreads (market inefficiency)
is_opportunity = (
combined_prob < 0.99 or
(combined_prob < 1.02 and min_liquidity > 100 and confidence_score > 50)
)
if is_opportunity:
return {
"market_id": market.get("id", ""),
"question": market.get("question", ""),
"outcomes": [market.get("outcomes", [])[i] if i < len(market.get("outcomes", [])) else f"Outcome {i+1}"
for i in range(len(valid_prices))],
"prices": [{"outcome": market.get("outcomes", [])[i] if i < len(market.get("outcomes", [])) else f"Outcome {i+1}",
"best_ask": p["best_ask"], "ask_size": p["ask_size"]}
for i, p in enumerate(valid_prices)],
"combined_prob": round(combined_prob, 4),
"max_profit_pct": round(max_profit_pct, 2),
"min_liquidity": min_liquidity,
"confidence_score": round(confidence_score, 2),
"hours_left": market.get("_hours_until_end", 0),
"timestamp": datetime.now(timezone.utc).isoformat()
}
return None
def store_opportunity(opportunity):
"""Store arbitrage opportunity in database."""
conn = sqlite3.connect(DB_FILE)
cursor = conn.cursor()
cursor.execute("""
INSERT INTO arbitrage_opportunities
(market_id, question, outcomes, prices, combined_prob, max_profit_pct,
min_liquidity, confidence_score, timestamp)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
opportunity["market_id"],
opportunity["question"],
json.dumps(opportunity["outcomes"]),
json.dumps(opportunity["prices"]),
opportunity["combined_prob"],
opportunity["max_profit_pct"],
opportunity["min_liquidity"],
opportunity["confidence_score"],
opportunity["timestamp"]
))
conn.commit()
conn.close()
def send_telegram_alert(message):
"""Send alert via Telegram bot API."""
if not TELEGRAM_BOT_TOKEN:
print(f"[ALERT] {message}")
return False
url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
data = json.dumps({
"chat_id": TELEGRAM_CHAT_ID,
"text": message,
"parse_mode": "HTML"
}).encode()
req = urllib.request.Request(url, data=data, headers={
"Content-Type": "application/json",
"User-Agent": "Mozilla/5.0"
})
try:
resp = urllib.request.urlopen(req, timeout=10)
return resp.getcode() == 200
except Exception as e:
print(f"Telegram alert failed: {e}")
return False
def scan_for_sports_arbitrage():
"""Main scanning function for sports arbitrage opportunities."""
print(f"=== Polymarket Sports Arbitrage Scanner ===")
print(f"Time: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')}")
print()
# Initialize database
init_database()
# Fetch active sports markets
markets = get_active_sports_markets()
print(f"Found {len(markets)} active sports markets")
opportunities = []
scanned_count = 0
for market in markets:
question = market.get("question", "")[:80]
market_id = market.get("id", "")
print(f"Scanning: {question}")
# Get token IDs
token_ids = market.get("clobTokenIds", [])
if isinstance(token_ids, str):
try:
token_ids = json.loads(token_ids) if token_ids.startswith("[") else token_ids.split(",")
except:
token_ids = []
if len(token_ids) < 2:
print(f" Skipping - insufficient outcomes ({len(token_ids)})")
continue
# Fetch orderbook data for each outcome
price_data = []
for token_id in token_ids:
price_info = get_orderbook_prices(token_id)
price_data.append(price_info)
time.sleep(0.1) # Rate limiting
# Store market data
store_market_data(market, price_data)
# Analyze for arbitrage
opportunity = analyze_arbitrage_opportunity(market, price_data)
if opportunity:
print(f" ✅ OPPORTUNITY: {opportunity['max_profit_pct']}% profit potential")
opportunities.append(opportunity)
store_opportunity(opportunity)
else:
print(f" ❌ No arbitrage opportunity")
scanned_count += 1
print(f"\n📊 Scan Results:")
print(f" Markets scanned: {scanned_count}")
print(f" Opportunities found: {len(opportunities)}")
# Send Telegram alerts for high-confidence opportunities
alert_count = 0
for opp in opportunities:
if opp["max_profit_pct"] > 2.0 and opp["confidence_score"] > 30:
message = (
f"🚨 <b>Sports Arb Opportunity</b>\n\n"
f"<b>{opp['question']}</b>\n\n"
f"💰 Max Profit: {opp['max_profit_pct']}%\n"
f"📊 Combined Prob: {opp['combined_prob']:.3f}\n"
f"💧 Min Liquidity: {opp['min_liquidity']:.0f}\n"
f"🎯 Confidence: {opp['confidence_score']:.1f}\n"
f"⏰ Hours Left: {opp['hours_left']:.1f}h\n\n"
f"<b>Prices:</b>\n"
)
for price in opp["prices"]:
message += f"{price['outcome']}: ${price['best_ask']:.3f} ({price['ask_size']:.0f} shares)\n"
if send_telegram_alert(message):
alert_count += 1
print(f" Alerts sent: {alert_count}")
# Save opportunities to file
if opportunities:
OPPORTUNITIES_FILE.write_text(json.dumps(opportunities, indent=2))
# Update scan log
log_entry = {
"timestamp": datetime.now(timezone.utc).isoformat(),
"markets_scanned": scanned_count,
"opportunities_found": len(opportunities),
"alerts_sent": alert_count
}
log = []
if LOG_FILE.exists():
try:
log = json.loads(LOG_FILE.read_text())
except:
pass
log.append(log_entry)
log = log[-100:] # Keep last 100 scans
LOG_FILE.write_text(json.dumps(log, indent=2))
return opportunities
def main():
"""Main entry point."""
try:
opportunities = scan_for_sports_arbitrage()
print(f"\n✅ Scan complete. Found {len(opportunities)} opportunities.")
return 0
except Exception as e:
print(f"❌ Error during scan: {e}")
return 1
if __name__ == "__main__":
sys.exit(main())

View File

@ -0,0 +1,302 @@
[
{
"timestamp": "2026-02-26T17:19:56.871764+00:00",
"markets_scanned": 369,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:24:02.551068+00:00",
"markets_scanned": 366,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:28:59.505205+00:00",
"markets_scanned": 366,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:34:03.331135+00:00",
"markets_scanned": 371,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:38:59.915553+00:00",
"markets_scanned": 369,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:44:00.696391+00:00",
"markets_scanned": 372,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:49:05.753436+00:00",
"markets_scanned": 375,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:54:02.890149+00:00",
"markets_scanned": 376,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T17:59:01.155689+00:00",
"markets_scanned": 374,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:04:04.486384+00:00",
"markets_scanned": 379,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:09:04.604052+00:00",
"markets_scanned": 382,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:14:04.636426+00:00",
"markets_scanned": 383,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:19:05.410663+00:00",
"markets_scanned": 383,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:24:01.723967+00:00",
"markets_scanned": 378,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:29:06.183090+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:34:09.048980+00:00",
"markets_scanned": 388,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:39:05.895026+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:44:07.381094+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:49:06.487580+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:54:04.954036+00:00",
"markets_scanned": 382,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T18:59:06.347393+00:00",
"markets_scanned": 383,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:04:09.985372+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:09:06.781489+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:14:10.388276+00:00",
"markets_scanned": 388,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:19:12.606504+00:00",
"markets_scanned": 392,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:24:09.600430+00:00",
"markets_scanned": 391,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:29:11.267547+00:00",
"markets_scanned": 390,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:34:09.185579+00:00",
"markets_scanned": 389,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:39:13.584607+00:00",
"markets_scanned": 389,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:44:09.874804+00:00",
"markets_scanned": 390,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:49:11.898132+00:00",
"markets_scanned": 390,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:54:09.633469+00:00",
"markets_scanned": 390,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T19:59:09.504569+00:00",
"markets_scanned": 382,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:04:28.639777+00:00",
"markets_scanned": 388,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:09:10.661969+00:00",
"markets_scanned": 388,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:14:09.008917+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:19:12.900489+00:00",
"markets_scanned": 386,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:24:11.491667+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:29:06.464873+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:34:09.954920+00:00",
"markets_scanned": 389,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:39:10.968931+00:00",
"markets_scanned": 388,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:44:09.034296+00:00",
"markets_scanned": 390,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:49:07.342449+00:00",
"markets_scanned": 386,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:54:08.831969+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T20:59:05.962615+00:00",
"markets_scanned": 384,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T21:04:07.576130+00:00",
"markets_scanned": 382,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T21:09:06.466807+00:00",
"markets_scanned": 385,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T21:14:09.570814+00:00",
"markets_scanned": 387,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T21:19:07.435786+00:00",
"markets_scanned": 379,
"opportunities_found": 0,
"alerts_sent": 0
},
{
"timestamp": "2026-02-26T21:24:07.779992+00:00",
"markets_scanned": 378,
"opportunities_found": 0,
"alerts_sent": 0
}
]

View File

@ -0,0 +1,260 @@
#!/usr/bin/env python3
"""
Unit tests for Polymarket Sports Arbitrage Scanner
"""
import unittest
import json
import sqlite3
import tempfile
import os
from pathlib import Path
from unittest.mock import patch, MagicMock
# Import the scanner module
import sys
sys.path.insert(0, str(Path(__file__).parent))
import polymarket_sports_scanner as scanner
class TestPolymarketSportsScanner(unittest.TestCase):
def setUp(self):
"""Set up test fixtures."""
# Use temporary directory for test database
self.temp_dir = tempfile.mkdtemp()
self.test_db = Path(self.temp_dir) / "test_markets.db"
# Mock the database path
scanner.DB_FILE = self.test_db
def tearDown(self):
"""Clean up after tests."""
if self.test_db.exists():
self.test_db.unlink()
def test_init_database(self):
"""Test database initialization."""
scanner.init_database()
# Check if database file was created
self.assertTrue(self.test_db.exists())
# Check if tables were created
conn = sqlite3.connect(self.test_db)
cursor = conn.cursor()
# Check sports_markets table
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='sports_markets'")
self.assertIsNotNone(cursor.fetchone())
# Check market_prices table
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='market_prices'")
self.assertIsNotNone(cursor.fetchone())
# Check arbitrage_opportunities table
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='arbitrage_opportunities'")
self.assertIsNotNone(cursor.fetchone())
conn.close()
def test_calc_implied_probability(self):
"""Test implied probability calculation."""
# Test normal cases
self.assertEqual(scanner.calc_implied_probability(0.5), 0.5)
self.assertEqual(scanner.calc_implied_probability(0.75), 0.75)
# Test edge cases
self.assertEqual(scanner.calc_implied_probability(0), 0)
self.assertEqual(scanner.calc_implied_probability(1), 1)
self.assertEqual(scanner.calc_implied_probability(-0.1), 0)
self.assertEqual(scanner.calc_implied_probability(1.1), 1)
def test_calc_taker_fee(self):
"""Test taker fee calculation."""
# Test normal case
fee = scanner.calc_taker_fee(100, 0.5)
self.assertEqual(fee, 0.5) # 100 * 0.5 * 0.01
# Test edge cases
self.assertEqual(scanner.calc_taker_fee(100, 0), 0)
self.assertEqual(scanner.calc_taker_fee(100, 1), 0)
def test_is_sports_market(self):
"""Test sports market identification."""
# Test with sports tags
market_with_sports_tag = {
"question": "Who will win?",
"tags": [{"label": "NFL"}, {"label": "Football"}]
}
self.assertTrue(scanner.is_sports_market(market_with_sports_tag))
# Test with sports keywords in question
market_with_sports_question = {
"question": "Will the Lakers win the NBA championship?",
"tags": []
}
self.assertTrue(scanner.is_sports_market(market_with_sports_question))
# Test with sports keywords in description
market_with_sports_description = {
"question": "Who will win?",
"description": "Super Bowl betting market",
"tags": []
}
self.assertTrue(scanner.is_sports_market(market_with_sports_description))
# Test non-sports market
non_sports_market = {
"question": "Will Bitcoin reach $100k?",
"description": "Cryptocurrency prediction",
"tags": [{"label": "Crypto"}]
}
self.assertFalse(scanner.is_sports_market(non_sports_market))
def test_analyze_arbitrage_opportunity(self):
"""Test arbitrage opportunity analysis."""
# Mock market data
market = {
"id": "test-market-123",
"question": "Will Team A win vs Team B?",
"outcomes": ["Team A wins", "Team B wins"],
"_hours_until_end": 24
}
# Test arbitrage opportunity (combined prob < 1)
price_data_arb = [
{"best_bid": 0.45, "best_ask": 0.48, "bid_size": 1000, "ask_size": 1000, "spread": 0.03},
{"best_bid": 0.47, "best_ask": 0.50, "bid_size": 1000, "ask_size": 1000, "spread": 0.03}
]
opportunity = scanner.analyze_arbitrage_opportunity(market, price_data_arb)
self.assertIsNotNone(opportunity)
self.assertEqual(opportunity["market_id"], "test-market-123")
self.assertLess(opportunity["combined_prob"], 0.99)
# Test no arbitrage (combined prob >= 1)
price_data_no_arb = [
{"best_bid": 0.48, "best_ask": 0.52, "bid_size": 1000, "ask_size": 1000, "spread": 0.04},
{"best_bid": 0.46, "best_ask": 0.50, "bid_size": 1000, "ask_size": 1000, "spread": 0.04}
]
opportunity = scanner.analyze_arbitrage_opportunity(market, price_data_no_arb)
# This should still return None or a low-confidence opportunity
if opportunity:
self.assertGreaterEqual(opportunity["combined_prob"], 1.00)
# Test insufficient data
insufficient_data = [
{"best_bid": 0.45, "best_ask": 0.48, "bid_size": 1000, "ask_size": 1000, "spread": 0.03}
]
opportunity = scanner.analyze_arbitrage_opportunity(market, insufficient_data)
self.assertIsNone(opportunity)
def test_store_market_data(self):
"""Test storing market data in database."""
scanner.init_database()
market = {
"id": "test-market-456",
"conditionId": "test-condition",
"question": "Test sports market?",
"description": "Test description",
"tags": [{"label": "NFL"}],
"category": "Sports",
"active": True,
"closed": False,
"createdAt": "2024-01-01T00:00:00Z",
"endDate": "2024-01-02T00:00:00Z",
"outcomes": ["Yes", "No"],
"clobTokenIds": ["token1", "token2"],
"volume": 10000,
"liquidity": 5000
}
price_data = [
{"best_bid": 0.45, "best_ask": 0.48, "bid_size": 1000, "ask_size": 1000, "spread": 0.03},
{"best_bid": 0.47, "best_ask": 0.50, "bid_size": 1000, "ask_size": 1000, "spread": 0.03}
]
scanner.store_market_data(market, price_data)
# Verify data was stored
conn = sqlite3.connect(self.test_db)
cursor = conn.cursor()
# Check market was stored
cursor.execute("SELECT * FROM sports_markets WHERE id = ?", ("test-market-456",))
market_row = cursor.fetchone()
self.assertIsNotNone(market_row)
# Check prices were stored
cursor.execute("SELECT COUNT(*) FROM market_prices WHERE market_id = ?", ("test-market-456",))
price_count = cursor.fetchone()[0]
self.assertEqual(price_count, 2)
conn.close()
@patch('polymarket_sports_scanner.urllib.request.urlopen')
def test_get_orderbook_prices(self, mock_urlopen):
"""Test orderbook price fetching."""
# Mock successful API response
mock_response = MagicMock()
mock_response.read.return_value = json.dumps({
"bids": [{"price": "0.45", "size": "1000"}],
"asks": [{"price": "0.48", "size": "1200"}]
}).encode()
mock_urlopen.return_value = mock_response
result = scanner.get_orderbook_prices("test-token-id")
self.assertIsNotNone(result)
self.assertEqual(result["best_bid"], 0.45)
self.assertEqual(result["best_ask"], 0.48)
self.assertEqual(result["bid_size"], 1000)
self.assertEqual(result["ask_size"], 1200)
self.assertAlmostEqual(result["spread"], 0.03, places=3)
def test_load_telegram_credentials(self):
"""Test telegram credential loading."""
# Test with environment variables
with patch.dict(os.environ, {'TELEGRAM_BOT_TOKEN': 'test-token', 'TELEGRAM_CHAT_ID': '12345'}):
token, chat_id = scanner.load_telegram_credentials()
self.assertEqual(token, 'test-token')
self.assertEqual(chat_id, '12345')
@patch('polymarket_sports_scanner.urllib.request.urlopen')
def test_send_telegram_alert(self, mock_urlopen):
"""Test sending telegram alerts."""
# Mock successful response
mock_response = MagicMock()
mock_response.getcode.return_value = 200
mock_urlopen.return_value = mock_response
# Set up telegram credentials for test
scanner.TELEGRAM_BOT_TOKEN = "test-token"
scanner.TELEGRAM_CHAT_ID = "12345"
result = scanner.send_telegram_alert("Test alert message")
self.assertTrue(result)
class TestIntegration(unittest.TestCase):
"""Integration tests that require network access (run separately)."""
@unittest.skip("Requires network access - run manually")
def test_get_active_sports_markets(self):
"""Test fetching actual sports markets from API."""
markets = scanner.get_active_sports_markets()
self.assertIsInstance(markets, list)
if markets:
market = markets[0]
self.assertIn("question", market)
self.assertIn("id", market)
def run_tests():
"""Run all tests."""
unittest.main()
if __name__ == "__main__":
run_tests()