Add crypto signals pipeline + Polymarket arb scanner
- Signal parser for Telegram JSON exports - Price fetcher using Binance US API - Backtester with fee-aware simulation - Polymarket 15-min arb scanner with orderbook checking - Systemd timer every 2 min for arb alerts - Paper trade tracking - Investigation: polymarket-15min-arb.md
This commit is contained in:
36
data/investigations/polymarket-15min-arb.md
Normal file
36
data/investigations/polymarket-15min-arb.md
Normal file
@ -0,0 +1,36 @@
|
||||
# Polymarket 15-Min Crypto Arbitrage
|
||||
|
||||
**Source:** https://x.com/noisyb0y1/status/2020942208858456206
|
||||
**Date investigated:** 2026-02-09
|
||||
**Verdict:** Legitimate edge, inflated claims
|
||||
|
||||
## Strategy
|
||||
- Buy BOTH sides (Up + Down) on 15-minute BTC/ETH/SOL/XRP markets
|
||||
- When combined cost < $1.00, guaranteed profit regardless of outcome
|
||||
- Edge exists because these markets are low liquidity / inefficient pricing
|
||||
|
||||
## Reference Wallet
|
||||
- `0xE594336603F4fB5d3ba4125a67021ab3B4347052`
|
||||
- Real PnL on 2026-02-09: ~$9K on $82K deployed (11% daily)
|
||||
- Combined costs ranged from $0.70 (great arb) to $1.10 (not arb)
|
||||
- Best arbs: ETH markets at $0.70-0.73 combined cost
|
||||
|
||||
## Why It Works
|
||||
- 15-min markets have thin books — prices diverge from fair value
|
||||
- Binary outcome means Up + Down must sum to $1.00 at resolution
|
||||
- If you buy both for < $1.00 total, guaranteed profit
|
||||
|
||||
## Challenges
|
||||
- Needs significant capital ($50K+) to make meaningful returns
|
||||
- Fill quality degrades at scale — slippage kills the edge
|
||||
- Competition from other bots narrows the window
|
||||
- Not all markets have arb — some combined costs > $1.00
|
||||
|
||||
## Revisit When
|
||||
- [ ] We have capital to deploy
|
||||
- [ ] Built a bot to scan for combined < $1.00 opportunities in real-time
|
||||
- [ ] Polymarket adds more 15-min markets (more opportunities)
|
||||
|
||||
## Related
|
||||
- Tweet author promoting "Clawdbots" — bot product shill
|
||||
- "$99K in a day" / "$340K total" claims are inflated (real: $9K profit)
|
||||
20
projects/crypto-signals/data/arb-scanner/scan_log.json
Normal file
20
projects/crypto-signals/data/arb-scanner/scan_log.json
Normal file
@ -0,0 +1,20 @@
|
||||
[
|
||||
{
|
||||
"timestamp": "2026-02-09T20:27:10.137684+00:00",
|
||||
"markets_scanned": 0,
|
||||
"arbs_found": 0,
|
||||
"opportunities": []
|
||||
},
|
||||
{
|
||||
"timestamp": "2026-02-09T20:29:24.646570+00:00",
|
||||
"markets_scanned": 0,
|
||||
"arbs_found": 0,
|
||||
"opportunities": []
|
||||
},
|
||||
{
|
||||
"timestamp": "2026-02-09T20:30:04.391737+00:00",
|
||||
"markets_scanned": 0,
|
||||
"arbs_found": 0,
|
||||
"opportunities": []
|
||||
}
|
||||
]
|
||||
259
projects/crypto-signals/scripts/backtester.py
Normal file
259
projects/crypto-signals/scripts/backtester.py
Normal file
@ -0,0 +1,259 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Crypto Signal Backtester
|
||||
Simulates each signal against historical price data to determine outcomes.
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from price_fetcher import get_all_klines, get_current_price, normalize_symbol, datetime_to_ms
|
||||
|
||||
|
||||
def simulate_signal(signal, klines):
|
||||
"""
|
||||
Simulate a signal against historical candle data.
|
||||
Returns outcome dict with result, P&L, time to resolution, etc.
|
||||
"""
|
||||
direction = signal['direction']
|
||||
entry = signal.get('entry')
|
||||
stop_loss = signal.get('stop_loss')
|
||||
targets = signal.get('targets', [])
|
||||
leverage = signal.get('leverage', 1)
|
||||
|
||||
if not targets or not stop_loss:
|
||||
return {'result': 'incomplete', 'reason': 'missing SL or targets'}
|
||||
|
||||
target = targets[0] # Primary target
|
||||
|
||||
# If entry is 'market', use first candle's open
|
||||
if entry == 'market' or entry is None:
|
||||
if not klines:
|
||||
return {'result': 'no_data'}
|
||||
entry = klines[0]['open']
|
||||
|
||||
signal['entry_resolved'] = entry
|
||||
|
||||
# Calculate risk/reward
|
||||
if direction == 'short':
|
||||
risk = abs(stop_loss - entry)
|
||||
reward = abs(entry - target)
|
||||
risk_pct = risk / entry * 100
|
||||
reward_pct = reward / entry * 100
|
||||
else: # long
|
||||
risk = abs(entry - stop_loss)
|
||||
reward = abs(target - entry)
|
||||
risk_pct = risk / entry * 100
|
||||
reward_pct = reward / entry * 100
|
||||
|
||||
rr_ratio = reward / risk if risk > 0 else 0
|
||||
|
||||
result = {
|
||||
'entry_price': entry,
|
||||
'stop_loss': stop_loss,
|
||||
'target': target,
|
||||
'direction': direction,
|
||||
'leverage': leverage,
|
||||
'risk_pct': round(risk_pct, 2),
|
||||
'reward_pct': round(reward_pct, 2),
|
||||
'rr_ratio': round(rr_ratio, 2),
|
||||
}
|
||||
|
||||
# Walk through candles
|
||||
for i, candle in enumerate(klines):
|
||||
high = candle['high']
|
||||
low = candle['low']
|
||||
|
||||
if direction == 'short':
|
||||
# Check SL hit (price went above SL)
|
||||
sl_hit = high >= stop_loss
|
||||
# Check TP hit (price went below target)
|
||||
tp_hit = low <= target
|
||||
else: # long
|
||||
# Check SL hit (price went below SL)
|
||||
sl_hit = low <= stop_loss
|
||||
# Check TP hit (price went above target)
|
||||
tp_hit = high >= target
|
||||
|
||||
if sl_hit and tp_hit:
|
||||
# Both hit in same candle — assume SL hit first (conservative)
|
||||
result['result'] = 'stop_loss'
|
||||
result['exit_price'] = stop_loss
|
||||
result['candles_to_exit'] = i + 1
|
||||
result['exit_time'] = candle['open_time']
|
||||
break
|
||||
elif tp_hit:
|
||||
result['result'] = 'target_hit'
|
||||
result['exit_price'] = target
|
||||
result['candles_to_exit'] = i + 1
|
||||
result['exit_time'] = candle['open_time']
|
||||
break
|
||||
elif sl_hit:
|
||||
result['result'] = 'stop_loss'
|
||||
result['exit_price'] = stop_loss
|
||||
result['candles_to_exit'] = i + 1
|
||||
result['exit_time'] = candle['open_time']
|
||||
break
|
||||
else:
|
||||
# Never resolved — check current unrealized P&L
|
||||
if klines:
|
||||
last_price = klines[-1]['close']
|
||||
if direction == 'short':
|
||||
unrealized_pct = (entry - last_price) / entry * 100
|
||||
else:
|
||||
unrealized_pct = (last_price - entry) / entry * 100
|
||||
result['result'] = 'open'
|
||||
result['last_price'] = last_price
|
||||
result['unrealized_pct'] = round(unrealized_pct, 2)
|
||||
result['unrealized_pct_leveraged'] = round(unrealized_pct * leverage, 2)
|
||||
else:
|
||||
result['result'] = 'no_data'
|
||||
|
||||
# Calculate P&L
|
||||
if result['result'] in ('target_hit', 'stop_loss'):
|
||||
exit_price = result['exit_price']
|
||||
if direction == 'short':
|
||||
pnl_pct = (entry - exit_price) / entry * 100
|
||||
else:
|
||||
pnl_pct = (exit_price - entry) / entry * 100
|
||||
|
||||
result['pnl_pct'] = round(pnl_pct, 2)
|
||||
result['pnl_pct_leveraged'] = round(pnl_pct * leverage, 2)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def backtest_signals(signals, interval='5m', lookforward_hours=72):
|
||||
"""Backtest a list of parsed signals."""
|
||||
results = []
|
||||
|
||||
for i, signal in enumerate(signals):
|
||||
ticker = signal['ticker']
|
||||
symbol = normalize_symbol(ticker)
|
||||
timestamp = signal.get('timestamp', '')
|
||||
|
||||
print(f"[{i+1}/{len(signals)}] {ticker} {signal['direction']} ...", end=' ', flush=True)
|
||||
|
||||
# Get start time
|
||||
start_ms = datetime_to_ms(timestamp) if timestamp else int(time.time() * 1000)
|
||||
end_ms = start_ms + (lookforward_hours * 60 * 60 * 1000)
|
||||
|
||||
# Cap at current time
|
||||
now_ms = int(time.time() * 1000)
|
||||
if end_ms > now_ms:
|
||||
end_ms = now_ms
|
||||
|
||||
# Fetch candles
|
||||
klines = get_all_klines(symbol, interval, start_ms, end_ms)
|
||||
|
||||
if not klines:
|
||||
print(f"NO DATA")
|
||||
results.append({**signal, 'backtest': {'result': 'no_data', 'reason': f'no klines for {symbol}'}})
|
||||
continue
|
||||
|
||||
# Simulate
|
||||
outcome = simulate_signal(signal, klines)
|
||||
print(f"{outcome['result']} | PnL: {outcome.get('pnl_pct_leveraged', outcome.get('unrealized_pct_leveraged', '?'))}%")
|
||||
|
||||
results.append({**signal, 'backtest': outcome})
|
||||
time.sleep(0.2) # Rate limit
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def generate_report(results):
|
||||
"""Generate a summary report from backtest results."""
|
||||
total = len(results)
|
||||
wins = [r for r in results if r['backtest'].get('result') == 'target_hit']
|
||||
losses = [r for r in results if r['backtest'].get('result') == 'stop_loss']
|
||||
open_trades = [r for r in results if r['backtest'].get('result') == 'open']
|
||||
no_data = [r for r in results if r['backtest'].get('result') in ('no_data', 'incomplete')]
|
||||
|
||||
resolved = wins + losses
|
||||
win_rate = len(wins) / len(resolved) * 100 if resolved else 0
|
||||
|
||||
avg_win = sum(r['backtest']['pnl_pct_leveraged'] for r in wins) / len(wins) if wins else 0
|
||||
avg_loss = sum(r['backtest']['pnl_pct_leveraged'] for r in losses) / len(losses) if losses else 0
|
||||
|
||||
total_pnl = sum(r['backtest'].get('pnl_pct_leveraged', 0) for r in resolved)
|
||||
|
||||
# Profit factor
|
||||
gross_profit = sum(r['backtest']['pnl_pct_leveraged'] for r in wins)
|
||||
gross_loss = abs(sum(r['backtest']['pnl_pct_leveraged'] for r in losses))
|
||||
profit_factor = gross_profit / gross_loss if gross_loss > 0 else float('inf')
|
||||
|
||||
# Risk/reward stats
|
||||
avg_rr = sum(r['backtest'].get('rr_ratio', 0) for r in resolved) / len(resolved) if resolved else 0
|
||||
|
||||
report = {
|
||||
'summary': {
|
||||
'total_signals': total,
|
||||
'wins': len(wins),
|
||||
'losses': len(losses),
|
||||
'open': len(open_trades),
|
||||
'no_data': len(no_data),
|
||||
'win_rate': round(win_rate, 1),
|
||||
'avg_win_pct': round(avg_win, 2),
|
||||
'avg_loss_pct': round(avg_loss, 2),
|
||||
'total_pnl_pct': round(total_pnl, 2),
|
||||
'profit_factor': round(profit_factor, 2),
|
||||
'avg_risk_reward': round(avg_rr, 2),
|
||||
},
|
||||
'trades': results,
|
||||
}
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def print_report(report):
|
||||
"""Pretty print the report."""
|
||||
s = report['summary']
|
||||
print("\n" + "=" * 60)
|
||||
print("CRYPTO SIGNAL BACKTEST REPORT")
|
||||
print("=" * 60)
|
||||
print(f"Total Signals: {s['total_signals']}")
|
||||
print(f"Wins: {s['wins']}")
|
||||
print(f"Losses: {s['losses']}")
|
||||
print(f"Open: {s['open']}")
|
||||
print(f"No Data: {s['no_data']}")
|
||||
print(f"Win Rate: {s['win_rate']}%")
|
||||
print(f"Avg Win: +{s['avg_win_pct']}% (leveraged)")
|
||||
print(f"Avg Loss: {s['avg_loss_pct']}% (leveraged)")
|
||||
print(f"Total P&L: {s['total_pnl_pct']}% (sum of resolved)")
|
||||
print(f"Profit Factor: {s['profit_factor']}")
|
||||
print(f"Avg R:R: {s['avg_risk_reward']}")
|
||||
print("=" * 60)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python3 backtester.py <signals.json> [--interval 5m] [--hours 72]")
|
||||
print("\nRun signal_parser.py first to generate signals.json")
|
||||
sys.exit(1)
|
||||
|
||||
signals_path = sys.argv[1]
|
||||
|
||||
interval = '5m'
|
||||
hours = 72
|
||||
for i, arg in enumerate(sys.argv):
|
||||
if arg == '--interval' and i + 1 < len(sys.argv):
|
||||
interval = sys.argv[i + 1]
|
||||
if arg == '--hours' and i + 1 < len(sys.argv):
|
||||
hours = int(sys.argv[i + 1])
|
||||
|
||||
with open(signals_path) as f:
|
||||
signals = json.load(f)
|
||||
|
||||
print(f"Backtesting {len(signals)} signals (interval={interval}, lookforward={hours}h)\n")
|
||||
|
||||
results = backtest_signals(signals, interval=interval, lookforward_hours=hours)
|
||||
report = generate_report(results)
|
||||
print_report(report)
|
||||
|
||||
# Save full report
|
||||
out_path = signals_path.replace('.json', '_backtest.json')
|
||||
with open(out_path, 'w') as f:
|
||||
json.dump(report, f, indent=2)
|
||||
print(f"\nFull report saved to {out_path}")
|
||||
311
projects/crypto-signals/scripts/polymarket_arb_scanner.py
Normal file
311
projects/crypto-signals/scripts/polymarket_arb_scanner.py
Normal file
@ -0,0 +1,311 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Polymarket 15-Min Crypto Arbitrage Scanner
|
||||
Scans active 15-minute crypto markets for arbitrage opportunities.
|
||||
Alerts via Telegram when combined Up+Down cost < $1.00 (after fees).
|
||||
|
||||
Zero AI tokens — runs as pure Python via systemd timer.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
# Config
|
||||
DATA_DIR = Path(__file__).parent.parent / "data" / "arb-scanner"
|
||||
DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
LOG_FILE = DATA_DIR / "scan_log.json"
|
||||
PAPER_TRADES_FILE = DATA_DIR / "paper_trades.json"
|
||||
|
||||
TELEGRAM_BOT_TOKEN = os.environ.get("TELEGRAM_BOT_TOKEN", "")
|
||||
TELEGRAM_CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID", "6443752046")
|
||||
|
||||
# Polymarket fee formula for 15-min markets
|
||||
def calc_taker_fee(shares, price):
|
||||
"""Calculate taker fee in USDC."""
|
||||
if price <= 0 or price >= 1:
|
||||
return 0
|
||||
return shares * price * 0.25 * (price * (1 - price)) ** 2
|
||||
|
||||
def calc_fee_rate(price):
|
||||
"""Effective fee rate at a given price."""
|
||||
if price <= 0 or price >= 1:
|
||||
return 0
|
||||
return 0.25 * (price * (1 - price)) ** 2
|
||||
|
||||
|
||||
def get_active_15min_markets():
|
||||
"""Fetch active 15-minute crypto markets from Polymarket."""
|
||||
markets = []
|
||||
|
||||
# 15-min markets are scattered across pagination — scan broadly
|
||||
for offset in range(0, 3000, 200):
|
||||
url = (
|
||||
f"https://gamma-api.polymarket.com/markets?"
|
||||
f"active=true&closed=false&limit=200&offset={offset}"
|
||||
f"&order=volume&ascending=false"
|
||||
)
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0"})
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=15)
|
||||
batch = json.loads(resp.read())
|
||||
for m in batch:
|
||||
q = m.get("question", "").lower()
|
||||
if "up or down" in q:
|
||||
markets.append(m)
|
||||
if len(batch) < 200:
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"Error fetching markets (offset={offset}): {e}")
|
||||
break
|
||||
time.sleep(0.1)
|
||||
|
||||
# Only keep markets ending within the next 4 hours (tradeable window)
|
||||
now = datetime.now(timezone.utc)
|
||||
tradeable = []
|
||||
for m in markets:
|
||||
end_str = m.get("endDate", "")
|
||||
if not end_str:
|
||||
continue
|
||||
try:
|
||||
end_dt = datetime.fromisoformat(end_str.replace("Z", "+00:00"))
|
||||
hours_until = (end_dt - now).total_seconds() / 3600
|
||||
if 0.25 < hours_until <= 24: # Skip markets < 15min to expiry (already resolved)
|
||||
m["_hours_until_end"] = round(hours_until, 2)
|
||||
tradeable.append(m)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Deduplicate
|
||||
seen = set()
|
||||
unique = []
|
||||
for m in tradeable:
|
||||
cid = m.get("conditionId", m.get("id", ""))
|
||||
if cid not in seen:
|
||||
seen.add(cid)
|
||||
unique.append(m)
|
||||
|
||||
return unique
|
||||
|
||||
|
||||
def get_orderbook_prices(token_id):
|
||||
"""Get best bid/ask from the CLOB API."""
|
||||
url = f"https://clob.polymarket.com/book?token_id={token_id}"
|
||||
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0"})
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
book = json.loads(resp.read())
|
||||
bids = book.get("bids", [])
|
||||
asks = book.get("asks", [])
|
||||
best_bid = float(bids[0]["price"]) if bids else 0
|
||||
best_ask = float(asks[0]["price"]) if asks else 1
|
||||
bid_size = float(bids[0].get("size", 0)) if bids else 0
|
||||
ask_size = float(asks[0].get("size", 0)) if asks else 0
|
||||
return {
|
||||
"best_bid": best_bid,
|
||||
"best_ask": best_ask,
|
||||
"bid_size": bid_size,
|
||||
"ask_size": ask_size,
|
||||
"spread": best_ask - best_bid
|
||||
}
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
|
||||
def scan_for_arbs():
|
||||
"""Scan all active 15-min markets for arbitrage opportunities."""
|
||||
markets = get_active_15min_markets()
|
||||
print(f"Found {len(markets)} active 15-min crypto markets")
|
||||
|
||||
opportunities = []
|
||||
|
||||
for market in markets:
|
||||
question = market.get("question", market.get("title", ""))
|
||||
hours_left = market.get("_hours_until_end", "?")
|
||||
|
||||
# Get token IDs for both outcomes
|
||||
tokens = market.get("clobTokenIds", "")
|
||||
if isinstance(tokens, str):
|
||||
try:
|
||||
tokens = json.loads(tokens) if tokens.startswith("[") else tokens.split(",")
|
||||
except:
|
||||
tokens = []
|
||||
|
||||
if len(tokens) < 2:
|
||||
continue
|
||||
|
||||
# Get orderbook for both tokens (ask = price to buy)
|
||||
book_up = get_orderbook_prices(tokens[0])
|
||||
book_down = get_orderbook_prices(tokens[1])
|
||||
time.sleep(0.15)
|
||||
|
||||
if not book_up or not book_down:
|
||||
continue
|
||||
|
||||
# For arb: we BUY both sides at the ASK price
|
||||
up_ask = book_up["best_ask"]
|
||||
down_ask = book_down["best_ask"]
|
||||
combined = up_ask + down_ask
|
||||
|
||||
# Calculate fees on 100 shares
|
||||
fee_up = calc_taker_fee(100, up_ask)
|
||||
fee_down = calc_taker_fee(100, down_ask)
|
||||
total_cost_100 = (up_ask + down_ask) * 100 + fee_up + fee_down
|
||||
net_profit_100 = 100 - total_cost_100
|
||||
net_profit_pct = net_profit_100 / total_cost_100 * 100 if total_cost_100 > 0 else 0
|
||||
|
||||
# Fillable size (limited by smaller side)
|
||||
fillable_size = min(book_up["ask_size"], book_down["ask_size"])
|
||||
if fillable_size > 0:
|
||||
fill_fee_up = calc_taker_fee(fillable_size, up_ask)
|
||||
fill_fee_down = calc_taker_fee(fillable_size, down_ask)
|
||||
fill_cost = (up_ask + down_ask) * fillable_size + fill_fee_up + fill_fee_down
|
||||
fill_profit = fillable_size - fill_cost
|
||||
else:
|
||||
fill_profit = 0
|
||||
|
||||
opp = {
|
||||
"question": question,
|
||||
"hours_left": hours_left,
|
||||
"up_ask": up_ask,
|
||||
"down_ask": down_ask,
|
||||
"up_ask_size": book_up["ask_size"],
|
||||
"down_ask_size": book_down["ask_size"],
|
||||
"combined": round(combined, 4),
|
||||
"fee_up_per_100": round(fee_up, 4),
|
||||
"fee_down_per_100": round(fee_down, 4),
|
||||
"total_fees_per_100": round(fee_up + fee_down, 4),
|
||||
"net_profit_per_100": round(net_profit_100, 2),
|
||||
"net_profit_pct": round(net_profit_pct, 2),
|
||||
"fillable_shares": fillable_size,
|
||||
"fillable_profit": round(fill_profit, 2),
|
||||
"is_arb": net_profit_100 > 0,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
}
|
||||
opportunities.append(opp)
|
||||
|
||||
return opportunities
|
||||
|
||||
|
||||
def paper_trade(opp):
|
||||
"""Record a paper trade for an arb opportunity."""
|
||||
trades = []
|
||||
if PAPER_TRADES_FILE.exists():
|
||||
trades = json.loads(PAPER_TRADES_FILE.read_text())
|
||||
|
||||
trade = {
|
||||
"id": len(trades) + 1,
|
||||
"timestamp": opp["timestamp"],
|
||||
"question": opp["question"],
|
||||
"up_price": opp.get("up_ask", opp.get("up_price", 0)),
|
||||
"down_price": opp.get("down_ask", opp.get("down_price", 0)),
|
||||
"combined": opp["combined"],
|
||||
"fees_per_100": opp["total_fees_per_100"],
|
||||
"net_profit_per_100": opp["net_profit_per_100"],
|
||||
"net_profit_pct": opp["net_profit_pct"],
|
||||
"status": "open", # Will be "won" when market resolves (always wins if real arb)
|
||||
"paper_size_usd": 50, # Paper trade $50 per arb
|
||||
}
|
||||
|
||||
expected_profit = 50 * opp["net_profit_pct"] / 100
|
||||
trade["expected_profit_usd"] = round(expected_profit, 2)
|
||||
|
||||
trades.append(trade)
|
||||
PAPER_TRADES_FILE.write_text(json.dumps(trades, indent=2))
|
||||
return trade
|
||||
|
||||
|
||||
def send_telegram_alert(message):
|
||||
"""Send alert via Telegram bot API (zero tokens)."""
|
||||
if not TELEGRAM_BOT_TOKEN:
|
||||
print(f"[ALERT] {message}")
|
||||
return
|
||||
|
||||
url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
|
||||
data = json.dumps({
|
||||
"chat_id": TELEGRAM_CHAT_ID,
|
||||
"text": message,
|
||||
"parse_mode": "HTML"
|
||||
}).encode()
|
||||
req = urllib.request.Request(url, data=data, headers={
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "Mozilla/5.0"
|
||||
})
|
||||
try:
|
||||
urllib.request.urlopen(req, timeout=10)
|
||||
except Exception as e:
|
||||
print(f"Telegram alert failed: {e}")
|
||||
|
||||
|
||||
def main():
|
||||
print(f"=== Polymarket 15-Min Arb Scanner ===")
|
||||
print(f"Time: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')}")
|
||||
print()
|
||||
|
||||
opps = scan_for_arbs()
|
||||
|
||||
arbs = [o for o in opps if o["is_arb"]]
|
||||
non_arbs = [o for o in opps if not o["is_arb"]]
|
||||
|
||||
print(f"\nResults: {len(opps)} markets scanned, {len(arbs)} arb opportunities\n")
|
||||
|
||||
for o in sorted(opps, key=lambda x: x.get("net_profit_pct", 0), reverse=True):
|
||||
emoji = "✅" if o["is_arb"] else "❌"
|
||||
print(f"{emoji} {o['question'][:65]}")
|
||||
up = o.get('up_ask', o.get('up_price', '?'))
|
||||
down = o.get('down_ask', o.get('down_price', '?'))
|
||||
print(f" Up: ${up} | Down: ${down} | Combined: ${o['combined']}")
|
||||
print(f" Fees/100: ${o['total_fees_per_100']} | Net profit/100: ${o['net_profit_per_100']} ({o['net_profit_pct']}%)")
|
||||
if o.get('fillable_shares'):
|
||||
print(f" Fillable: {o['fillable_shares']:.0f} shares | Fillable profit: ${o.get('fillable_profit', '?')}")
|
||||
print()
|
||||
|
||||
# Paper trade any arbs found
|
||||
for arb in arbs:
|
||||
trade = paper_trade(arb)
|
||||
print(f"📝 Paper trade #{trade['id']}: {trade['question'][:50]} | Expected: +${trade['expected_profit_usd']}")
|
||||
|
||||
# Send Telegram alert
|
||||
msg = (
|
||||
f"🔔 <b>Arb Found!</b>\n\n"
|
||||
f"<b>{arb['question']}</b>\n"
|
||||
f"Up: ${arb.get('up_ask', arb.get('up_price', '?'))} | "
|
||||
f"Down: ${arb.get('down_ask', arb.get('down_price', '?'))}\n"
|
||||
f"Combined: ${arb['combined']} (after fees)\n"
|
||||
f"Net profit: {arb['net_profit_pct']}%\n\n"
|
||||
f"📝 Paper traded $50 → expected +${trade['expected_profit_usd']}"
|
||||
)
|
||||
send_telegram_alert(msg)
|
||||
|
||||
# Save scan log
|
||||
log = []
|
||||
if LOG_FILE.exists():
|
||||
try:
|
||||
log = json.loads(LOG_FILE.read_text())
|
||||
except:
|
||||
pass
|
||||
|
||||
log.append({
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"markets_scanned": len(opps),
|
||||
"arbs_found": len(arbs),
|
||||
"opportunities": opps,
|
||||
})
|
||||
|
||||
# Keep last 1000 scans
|
||||
log = log[-1000:]
|
||||
LOG_FILE.write_text(json.dumps(log, indent=2))
|
||||
|
||||
# Summary of paper trades
|
||||
if PAPER_TRADES_FILE.exists():
|
||||
trades = json.loads(PAPER_TRADES_FILE.read_text())
|
||||
total_expected = sum(t.get("expected_profit_usd", 0) for t in trades)
|
||||
print(f"\n📊 Paper trade total: {len(trades)} trades, expected profit: ${total_expected:.2f}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
131
projects/crypto-signals/scripts/price_fetcher.py
Normal file
131
projects/crypto-signals/scripts/price_fetcher.py
Normal file
@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Crypto Price Fetcher
|
||||
Pulls historical OHLCV data from Binance public API (no key needed).
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
import urllib.request
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
# Binance intl is geo-blocked from US; use Binance US
|
||||
BINANCE_KLINES = "https://api.binance.us/api/v3/klines"
|
||||
BINANCE_TICKER = "https://api.binance.us/api/v3/ticker/price"
|
||||
|
||||
|
||||
def get_price_at_time(symbol, timestamp_ms, interval='1m'):
|
||||
"""Get the candle at a specific timestamp."""
|
||||
url = f"{BINANCE_KLINES}?symbol={symbol}&interval={interval}&startTime={timestamp_ms}&limit=1"
|
||||
req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
data = json.loads(resp.read())
|
||||
if data:
|
||||
return {
|
||||
'open_time': data[0][0],
|
||||
'open': float(data[0][1]),
|
||||
'high': float(data[0][2]),
|
||||
'low': float(data[0][3]),
|
||||
'close': float(data[0][4]),
|
||||
'volume': float(data[0][5]),
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error fetching {symbol}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_klines(symbol, interval='1h', start_time_ms=None, end_time_ms=None, limit=1000):
|
||||
"""Get historical klines/candlestick data."""
|
||||
params = f"symbol={symbol}&interval={interval}&limit={limit}"
|
||||
if start_time_ms:
|
||||
params += f"&startTime={start_time_ms}"
|
||||
if end_time_ms:
|
||||
params += f"&endTime={end_time_ms}"
|
||||
|
||||
url = f"{BINANCE_KLINES}?{params}"
|
||||
req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
|
||||
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=15)
|
||||
raw = json.loads(resp.read())
|
||||
return [{
|
||||
'open_time': k[0],
|
||||
'open': float(k[1]),
|
||||
'high': float(k[2]),
|
||||
'low': float(k[3]),
|
||||
'close': float(k[4]),
|
||||
'volume': float(k[5]),
|
||||
'close_time': k[6],
|
||||
} for k in raw]
|
||||
except Exception as e:
|
||||
print(f"Error fetching klines for {symbol}: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_all_klines(symbol, interval, start_time_ms, end_time_ms):
|
||||
"""Paginate through all klines between two timestamps."""
|
||||
all_klines = []
|
||||
current_start = start_time_ms
|
||||
|
||||
while current_start < end_time_ms:
|
||||
batch = get_klines(symbol, interval, current_start, end_time_ms)
|
||||
if not batch:
|
||||
break
|
||||
all_klines.extend(batch)
|
||||
current_start = batch[-1]['close_time'] + 1
|
||||
time.sleep(0.1) # Rate limiting
|
||||
|
||||
return all_klines
|
||||
|
||||
|
||||
def get_current_price(symbol):
|
||||
"""Get current price."""
|
||||
url = f"{BINANCE_TICKER}?symbol={symbol}"
|
||||
req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
|
||||
try:
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
data = json.loads(resp.read())
|
||||
return float(data['price'])
|
||||
except Exception as e:
|
||||
print(f"Error fetching current price for {symbol}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def normalize_symbol(ticker):
|
||||
"""Convert signal ticker to Binance symbol format."""
|
||||
# Remove USDT suffix if present, then add it back
|
||||
ticker = ticker.upper().replace('USDT', '').replace('/', '')
|
||||
return f"{ticker}USDT"
|
||||
|
||||
|
||||
def datetime_to_ms(dt_str):
|
||||
"""Convert datetime string to milliseconds timestamp."""
|
||||
# Handle various formats
|
||||
for fmt in ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S', '%Y-%m-%d']:
|
||||
try:
|
||||
dt = datetime.strptime(dt_str, fmt).replace(tzinfo=timezone.utc)
|
||||
return int(dt.timestamp() * 1000)
|
||||
except ValueError:
|
||||
continue
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Test with current signals
|
||||
for ticker in ['ASTERUSDT', 'HYPEUSDT']:
|
||||
symbol = normalize_symbol(ticker)
|
||||
price = get_current_price(symbol)
|
||||
print(f"{symbol}: ${price}")
|
||||
|
||||
# Get last 24h of 1h candles
|
||||
now_ms = int(time.time() * 1000)
|
||||
day_ago = now_ms - (24 * 60 * 60 * 1000)
|
||||
klines = get_klines(symbol, '1h', day_ago, now_ms)
|
||||
if klines:
|
||||
highs = [k['high'] for k in klines]
|
||||
lows = [k['low'] for k in klines]
|
||||
print(f" 24h range: ${min(lows):.4f} - ${max(highs):.4f}")
|
||||
print(f" Candles: {len(klines)}")
|
||||
print()
|
||||
166
projects/crypto-signals/scripts/signal_parser.py
Normal file
166
projects/crypto-signals/scripts/signal_parser.py
Normal file
@ -0,0 +1,166 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Telegram Crypto Signal Parser
|
||||
Parses exported Telegram JSON chat history and extracts structured trading signals.
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Signal patterns - adapt as we see more formats
|
||||
PATTERNS = {
|
||||
# #TICKER direction entry SL target leverage balance%
|
||||
'standard': re.compile(
|
||||
r'#(\w+)\s+' # ticker
|
||||
r'(Long|Short)\s+' # direction
|
||||
r'(?:market\s+entry!?|entry[:\s]+([0-9.]+))\s*' # entry type/price
|
||||
r'SL[;:\s]+([0-9.]+)\s*' # stop loss
|
||||
r'(?:Targets?|TP)[;:\s]+([0-9.,\s]+)\s*' # targets (can be multiple)
|
||||
r'(?:Lev(?:erage)?[:\s]*x?([0-9.]+))?\s*' # leverage (optional)
|
||||
r'(?:([0-9.]+)%?\s*balance)?', # balance % (optional)
|
||||
re.IGNORECASE
|
||||
),
|
||||
# Simpler: #TICKER Short/Long entry SL targets
|
||||
'simple': re.compile(
|
||||
r'#(\w+)\s+(Long|Short)',
|
||||
re.IGNORECASE
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def parse_signal_text(text):
|
||||
"""Parse a single message text into structured signal(s)."""
|
||||
signals = []
|
||||
|
||||
# Try to find all ticker mentions
|
||||
ticker_blocks = re.split(r'(?=#\w+USDT)', text)
|
||||
|
||||
for block in ticker_blocks:
|
||||
if not block.strip():
|
||||
continue
|
||||
|
||||
signal = {}
|
||||
|
||||
# Extract ticker
|
||||
ticker_match = re.search(r'#(\w+)', block)
|
||||
if not ticker_match:
|
||||
continue
|
||||
signal['ticker'] = ticker_match.group(1).upper()
|
||||
|
||||
# Extract direction
|
||||
dir_match = re.search(r'\b(Long|Short)\b', block, re.IGNORECASE)
|
||||
if not dir_match:
|
||||
continue
|
||||
signal['direction'] = dir_match.group(1).lower()
|
||||
|
||||
# Extract entry price (or "market")
|
||||
entry_match = re.search(r'(?:entry|enter)[:\s]*([0-9.]+)', block, re.IGNORECASE)
|
||||
if entry_match:
|
||||
signal['entry'] = float(entry_match.group(1))
|
||||
else:
|
||||
signal['entry'] = 'market'
|
||||
|
||||
# Extract stop loss
|
||||
sl_match = re.search(r'SL[;:\s]+([0-9.]+)', block, re.IGNORECASE)
|
||||
if sl_match:
|
||||
signal['stop_loss'] = float(sl_match.group(1))
|
||||
|
||||
# Extract targets (can be multiple, comma or space separated)
|
||||
tp_match = re.search(r'(?:Targets?|TP)[;:\s]+([0-9.,\s]+)', block, re.IGNORECASE)
|
||||
if tp_match:
|
||||
targets_str = tp_match.group(1)
|
||||
targets = [float(t.strip()) for t in re.findall(r'[0-9.]+', targets_str)]
|
||||
signal['targets'] = targets
|
||||
|
||||
# Extract leverage
|
||||
lev_match = re.search(r'Lev(?:erage)?[:\s]*x?([0-9.]+)', block, re.IGNORECASE)
|
||||
if lev_match:
|
||||
signal['leverage'] = float(lev_match.group(1))
|
||||
|
||||
# Extract balance percentage
|
||||
bal_match = re.search(r'([0-9.]+)%?\s*balance', block, re.IGNORECASE)
|
||||
if bal_match:
|
||||
signal['balance_pct'] = float(bal_match.group(1))
|
||||
|
||||
if signal.get('ticker') and signal.get('direction'):
|
||||
signals.append(signal)
|
||||
|
||||
return signals
|
||||
|
||||
|
||||
def parse_telegram_export(json_path):
|
||||
"""Parse a Telegram JSON export file."""
|
||||
with open(json_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
messages = data.get('messages', [])
|
||||
all_signals = []
|
||||
|
||||
for msg in messages:
|
||||
if msg.get('type') != 'message':
|
||||
continue
|
||||
|
||||
# Get text content (can be string or list of text entities)
|
||||
text_parts = msg.get('text', '')
|
||||
if isinstance(text_parts, list):
|
||||
text = ''.join(
|
||||
p if isinstance(p, str) else p.get('text', '')
|
||||
for p in text_parts
|
||||
)
|
||||
else:
|
||||
text = text_parts
|
||||
|
||||
if not text or '#' not in text:
|
||||
continue
|
||||
|
||||
# Check if it looks like a signal
|
||||
if not re.search(r'(Long|Short)', text, re.IGNORECASE):
|
||||
continue
|
||||
|
||||
signals = parse_signal_text(text)
|
||||
|
||||
for signal in signals:
|
||||
signal['timestamp'] = msg.get('date', '')
|
||||
signal['message_id'] = msg.get('id', '')
|
||||
signal['raw_text'] = text[:500]
|
||||
all_signals.append(signal)
|
||||
|
||||
return all_signals
|
||||
|
||||
|
||||
def parse_forwarded_messages(messages_text):
|
||||
"""Parse signals from forwarded message text (copy-pasted or forwarded to bot)."""
|
||||
signals = parse_signal_text(messages_text)
|
||||
return signals
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
# Demo with the test signals
|
||||
test_text = """#ASTERUSDT Short market entry! SL: 0.6385 Targets: 0.51 Lev x15 1.3% balance
|
||||
#HYPEUSDT Short market entry! SL; 33.5 Target 25 Lev x12 1.4% balance"""
|
||||
|
||||
signals = parse_signal_text(test_text)
|
||||
print(f"Parsed {len(signals)} signals:\n")
|
||||
for s in signals:
|
||||
print(json.dumps(s, indent=2))
|
||||
else:
|
||||
json_path = sys.argv[1]
|
||||
signals = parse_telegram_export(json_path)
|
||||
print(f"Parsed {len(signals)} signals from export\n")
|
||||
|
||||
# Save to output
|
||||
out_path = json_path.replace('.json', '_signals.json')
|
||||
with open(out_path, 'w') as f:
|
||||
json.dump(signals, f, indent=2)
|
||||
print(f"Saved to {out_path}")
|
||||
|
||||
# Quick summary
|
||||
longs = sum(1 for s in signals if s['direction'] == 'long')
|
||||
shorts = sum(1 for s in signals if s['direction'] == 'short')
|
||||
print(f"Longs: {longs}, Shorts: {shorts}")
|
||||
tickers = set(s['ticker'] for s in signals)
|
||||
print(f"Unique tickers: {len(tickers)}")
|
||||
File diff suppressed because one or more lines are too long
@ -1,5 +1,5 @@
|
||||
{
|
||||
"last_check": "2026-02-09T16:58:59.975254+00:00",
|
||||
"last_check": "2026-02-09T20:27:59.790122+00:00",
|
||||
"total_tracked": 3100,
|
||||
"new_this_check": 0
|
||||
}
|
||||
Reference in New Issue
Block a user