#!/bin/bash # X Feed Scraper — Automated screenshot pipeline # Usage: ./x-feed-scraper.sh [pages] [output_dir] PAGES=${1:-5} OUTPUT_DIR=${2:-/home/wdjones/.openclaw/workspace/data/x-feed} DELAY=3 mkdir -p "$OUTPUT_DIR" TIMESTAMP=$(date +%Y%m%d-%H%M%S) RUN_DIR="$OUTPUT_DIR/$TIMESTAMP" mkdir -p "$RUN_DIR" echo "=== X Feed Scraper ===" echo "Pages: $PAGES" echo "Output: $RUN_DIR" # Check if Chrome is running on X CHROME_PID=$(pgrep -f "google-chrome.*x.com" | head -1) if [ -z "$CHROME_PID" ]; then echo "Launching Chrome with X..." DISPLAY=:0 nohup /usr/bin/google-chrome-stable --no-sandbox \ --user-data-dir=/home/wdjones/.config/google-chrome \ https://x.com/home > /dev/null 2>&1 & sleep 8 echo "Chrome launched" else echo "Chrome already running (PID: $CHROME_PID)" fi # Click on the feed area to ensure focus DISPLAY=:0 xdotool mousemove 960 540 click 1 sleep 1 # Scroll to top first DISPLAY=:0 xdotool key Home sleep 2 echo "Starting capture..." for i in $(seq 1 $PAGES); do DISPLAY=:0 import -window root "$RUN_DIR/page-$i.png" echo "Captured page $i/$PAGES" if [ $i -lt $PAGES ]; then # Scroll down DISPLAY=:0 xdotool key Page_Down sleep $DELAY fi done # Generate manifest echo "{" > "$RUN_DIR/manifest.json" echo " \"timestamp\": \"$TIMESTAMP\"," >> "$RUN_DIR/manifest.json" echo " \"pages\": $PAGES," >> "$RUN_DIR/manifest.json" echo " \"files\": [" >> "$RUN_DIR/manifest.json" for i in $(seq 1 $PAGES); do COMMA="" [ $i -lt $PAGES ] && COMMA="," echo " \"page-$i.png\"$COMMA" >> "$RUN_DIR/manifest.json" done echo " ]" >> "$RUN_DIR/manifest.json" echo "}" >> "$RUN_DIR/manifest.json" echo "" echo "=== Done ===" echo "Captured $PAGES pages to $RUN_DIR" echo "Run analysis with: ws x-analyze $RUN_DIR"