Files
workspace/projects/feed-hunter/data/investigations/fetch-all-trades.py

52 lines
1.6 KiB
Python

#!/usr/bin/env python3
"""
Manual script to help coordinate fetching all kch123 trades
We'll use this to track progress and combine results
"""
import json
import os
def load_partial_data(filename):
"""Load partial data if it exists"""
if os.path.exists(filename):
with open(filename, 'r') as f:
return json.load(f)
return []
def save_partial_data(data, filename):
"""Save partial data"""
with open(filename, 'w') as f:
json.dump(data, f, indent=2)
def combine_trade_files():
"""Combine all fetched trade files into one"""
base_dir = "/home/wdjones/.openclaw/workspace/projects/feed-hunter/data/investigations/"
all_trades = []
# Look for files named trades_<offset>.json
offset = 0
while True:
filename = f"{base_dir}trades_{offset}.json"
if not os.path.exists(filename):
break
with open(filename, 'r') as f:
page_data = json.load(f)
all_trades.extend(page_data)
print(f"Loaded {len(page_data)} trades from offset {offset}")
offset += 100
# Save combined data
output_file = f"{base_dir}kch123-trades.json"
with open(output_file, 'w') as f:
json.dump(all_trades, f, indent=2)
print(f"Combined {len(all_trades)} total trades into {output_file}")
return all_trades
if __name__ == "__main__":
print("Run this after manually fetching all trade pages")
print("Usage: fetch pages manually with web_fetch, save as trades_0.json, trades_100.json, etc.")
print("Then run combine_trade_files() to merge them all")