#!/usr/bin/env python3 """ Fetch complete trade history for kch123 on Polymarket """ import json import time import requests from typing import List, Dict def fetch_page(offset: int) -> List[Dict]: """Fetch a single page of trade data""" url = f"https://data-api.polymarket.com/activity?user=0x6a72f61820b26b1fe4d956e17b6dc2a1ea3033ee&limit=100&offset={offset}" try: response = requests.get(url) response.raise_for_status() data = response.json() return data if isinstance(data, list) else [] except Exception as e: print(f"Error fetching offset {offset}: {e}") return [] def fetch_all_trades() -> List[Dict]: """Fetch all trades by paginating through the API""" all_trades = [] offset = 0 print("Fetching trade history...") while True: print(f"Fetching offset {offset}...") page_data = fetch_page(offset) if not page_data: print(f"No more data at offset {offset}, stopping.") break all_trades.extend(page_data) print(f"Got {len(page_data)} trades. Total so far: {len(all_trades)}") # If we got less than 100 results, we've reached the end if len(page_data) < 100: print("Reached end of data (partial page).") break offset += 100 time.sleep(0.1) # Be nice to the API return all_trades def main(): trades = fetch_all_trades() print(f"\nTotal trades fetched: {len(trades)}") # Save to file output_file = "/home/wdjones/.openclaw/workspace/projects/feed-hunter/data/investigations/kch123-trades.json" with open(output_file, 'w') as f: json.dump(trades, f, indent=2) print(f"Saved to {output_file}") # Quick stats buy_trades = [t for t in trades if t.get('type') == 'TRADE' and t.get('side') == 'BUY'] redeem_trades = [t for t in trades if t.get('type') == 'REDEEM'] print(f"BUY trades: {len(buy_trades)}") print(f"REDEEM trades: {len(redeem_trades)}") if trades: earliest = min(t['timestamp'] for t in trades) latest = max(t['timestamp'] for t in trades) print(f"Date range: {time.ctime(earliest)} to {time.ctime(latest)}") if __name__ == "__main__": main()