Night shift: tweet analyzer, data connectors, feed monitor, market watch portal
This commit is contained in:
389
tools/analyze_tweet.py
Executable file
389
tools/analyze_tweet.py
Executable file
@ -0,0 +1,389 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Tweet Analysis Tool - Scrapes and analyzes tweets via Chrome CDP."""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
from playwright.async_api import async_playwright
|
||||
except ImportError:
|
||||
print("ERROR: playwright not installed. Run: pip install playwright", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
import yfinance as yf
|
||||
except ImportError:
|
||||
yf = None
|
||||
|
||||
|
||||
def extract_tickers(text: str) -> list[str]:
|
||||
"""Extract $TICKER patterns from text."""
|
||||
return list(set(re.findall(r'\$([A-Z]{1,5}(?:\.[A-Z]{1,2})?)', text.upper())))
|
||||
|
||||
|
||||
def lookup_tickers(tickers: list[str]) -> dict:
|
||||
"""Look up ticker data via yfinance."""
|
||||
if not yf or not tickers:
|
||||
return {}
|
||||
results = {}
|
||||
for t in tickers[:5]: # limit to 5
|
||||
try:
|
||||
info = yf.Ticker(t).info
|
||||
results[t] = {
|
||||
"price": info.get("currentPrice") or info.get("regularMarketPrice"),
|
||||
"market_cap": info.get("marketCap"),
|
||||
"name": info.get("shortName"),
|
||||
"volume": info.get("volume"),
|
||||
"day_change_pct": info.get("regularMarketChangePercent"),
|
||||
"52w_high": info.get("fiftyTwoWeekHigh"),
|
||||
"52w_low": info.get("fiftyTwoWeekLow"),
|
||||
}
|
||||
except Exception:
|
||||
results[t] = {"error": "lookup failed"}
|
||||
return results
|
||||
|
||||
|
||||
async def scrape_tweet(url: str) -> dict:
|
||||
"""Connect to Chrome CDP and scrape tweet data."""
|
||||
# Normalize URL
|
||||
url = url.replace("twitter.com", "x.com")
|
||||
if not url.startswith("http"):
|
||||
url = "https://" + url
|
||||
|
||||
data = {
|
||||
"url": url,
|
||||
"author": None,
|
||||
"handle": None,
|
||||
"text": None,
|
||||
"timestamp": None,
|
||||
"metrics": {},
|
||||
"images": [],
|
||||
"bio": None,
|
||||
"followers": None,
|
||||
"following": None,
|
||||
"reply_to": None,
|
||||
"replies_sample": [],
|
||||
"scrape_error": None,
|
||||
}
|
||||
|
||||
async with async_playwright() as p:
|
||||
try:
|
||||
browser = await p.chromium.connect_over_cdp("http://localhost:9222")
|
||||
except Exception as e:
|
||||
data["scrape_error"] = f"CDP connection failed: {e}"
|
||||
return data
|
||||
|
||||
try:
|
||||
ctx = browser.contexts[0] if browser.contexts else await browser.new_context()
|
||||
page = await ctx.new_page()
|
||||
await page.goto(url, wait_until="domcontentloaded", timeout=30000)
|
||||
await page.wait_for_timeout(4000)
|
||||
|
||||
# Get the main tweet article
|
||||
# Try to find the focal tweet
|
||||
tweet_sel = 'article[data-testid="tweet"]'
|
||||
articles = await page.query_selector_all(tweet_sel)
|
||||
|
||||
if not articles:
|
||||
data["scrape_error"] = "No tweet articles found on page"
|
||||
await page.close()
|
||||
return data
|
||||
|
||||
# The focal tweet is typically the one with the largest text or specific structure
|
||||
# On a tweet permalink, it's usually the first or second article
|
||||
focal = None
|
||||
for art in articles:
|
||||
# The focal tweet has a different time display (absolute vs relative)
|
||||
time_el = await art.query_selector('time')
|
||||
if time_el:
|
||||
dt = await time_el.get_attribute('datetime')
|
||||
if dt:
|
||||
focal = art
|
||||
data["timestamp"] = dt
|
||||
break
|
||||
if not focal:
|
||||
focal = articles[0]
|
||||
|
||||
# Author info
|
||||
user_links = await focal.query_selector_all('a[role="link"]')
|
||||
for link in user_links:
|
||||
href = await link.get_attribute("href") or ""
|
||||
if href.startswith("/") and href.count("/") == 1 and len(href) > 1:
|
||||
spans = await link.query_selector_all("span")
|
||||
for span in spans:
|
||||
txt = (await span.inner_text()).strip()
|
||||
if txt.startswith("@"):
|
||||
data["handle"] = txt
|
||||
elif txt and not data["author"] and not txt.startswith("@"):
|
||||
data["author"] = txt
|
||||
break
|
||||
|
||||
# Tweet text
|
||||
text_el = await focal.query_selector('div[data-testid="tweetText"]')
|
||||
if text_el:
|
||||
data["text"] = await text_el.inner_text()
|
||||
|
||||
# Metrics (replies, retweets, likes, views)
|
||||
group = await focal.query_selector('div[role="group"]')
|
||||
if group:
|
||||
buttons = await group.query_selector_all('button')
|
||||
metric_names = ["replies", "retweets", "likes", "bookmarks"]
|
||||
for i, btn in enumerate(buttons):
|
||||
aria = await btn.get_attribute("aria-label") or ""
|
||||
# Parse numbers from aria labels like "123 replies"
|
||||
nums = re.findall(r'[\d,]+', aria)
|
||||
if nums and i < len(metric_names):
|
||||
data["metrics"][metric_names[i]] = nums[0].replace(",", "")
|
||||
|
||||
# Views - often in a separate span
|
||||
view_spans = await focal.query_selector_all('a[role="link"] span')
|
||||
for vs in view_spans:
|
||||
txt = (await vs.inner_text()).strip()
|
||||
if "views" in txt.lower() or "Views" in txt:
|
||||
nums = re.findall(r'[\d,.KkMm]+', txt)
|
||||
if nums:
|
||||
data["metrics"]["views"] = nums[0]
|
||||
|
||||
# Images
|
||||
imgs = await focal.query_selector_all('img[alt="Image"]')
|
||||
for img in imgs:
|
||||
src = await img.get_attribute("src")
|
||||
if src:
|
||||
data["images"].append(src)
|
||||
|
||||
# Check if it's a reply
|
||||
reply_indicators = await page.query_selector_all('div[data-testid="tweet"] a[role="link"]')
|
||||
|
||||
# Try to get author profile info by hovering or checking
|
||||
# We'll grab it from the page if visible
|
||||
if data["handle"]:
|
||||
handle_clean = data["handle"].lstrip("@")
|
||||
# Check for bio/follower info in any hover cards or visible elements
|
||||
all_text = await page.inner_text("body")
|
||||
# Look for follower patterns
|
||||
follower_match = re.search(r'([\d,.]+[KkMm]?)\s+Followers', all_text)
|
||||
following_match = re.search(r'([\d,.]+[KkMm]?)\s+Following', all_text)
|
||||
if follower_match:
|
||||
data["followers"] = follower_match.group(1)
|
||||
if following_match:
|
||||
data["following"] = following_match.group(1)
|
||||
|
||||
# Sample some replies (articles after the focal tweet)
|
||||
if len(articles) > 1:
|
||||
for art in articles[1:4]:
|
||||
reply_text_el = await art.query_selector('div[data-testid="tweetText"]')
|
||||
if reply_text_el:
|
||||
rt = await reply_text_el.inner_text()
|
||||
if rt:
|
||||
data["replies_sample"].append(rt[:200])
|
||||
|
||||
await page.close()
|
||||
|
||||
except Exception as e:
|
||||
data["scrape_error"] = str(e)
|
||||
try:
|
||||
await page.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def analyze(data: dict) -> dict:
|
||||
"""Produce structured analysis from scraped data."""
|
||||
text = data.get("text") or ""
|
||||
tickers = extract_tickers(text)
|
||||
ticker_data = lookup_tickers(tickers)
|
||||
|
||||
# Red flags detection
|
||||
red_flags = []
|
||||
text_lower = text.lower()
|
||||
promo_words = ["100x", "1000x", "moon", "gem", "rocket", "guaranteed", "easy money",
|
||||
"don't miss", "last chance", "about to explode", "next big", "sleeping giant",
|
||||
"never stops printing", "true freedom", "beat the institutions", "revolution",
|
||||
"empire", "vault", "get rich", "financial freedom", "life changing",
|
||||
"without a degree", "from a bedroom", "join this"]
|
||||
for w in promo_words:
|
||||
if w in text_lower:
|
||||
red_flags.append(f"Promotional language: '{w}'")
|
||||
|
||||
if len(tickers) > 3:
|
||||
red_flags.append(f"Multiple tickers mentioned ({len(tickers)})")
|
||||
|
||||
if len(text) > 2000:
|
||||
red_flags.append("Extremely long promotional thread")
|
||||
if "github" in text_lower and ("star" in text_lower or "repo" in text_lower):
|
||||
red_flags.append("Pushing GitHub repo (potential funnel to paid product)")
|
||||
if any(w in text_lower for w in ["course", "discord", "premium", "paid group", "subscribe"]):
|
||||
red_flags.append("Funneling to paid product/community")
|
||||
|
||||
# Check replies for coordinated patterns
|
||||
replies = data.get("replies_sample", [])
|
||||
if replies:
|
||||
rocket_replies = sum(1 for r in replies if any(e in r for e in ["🚀", "💎", "🔥", "LFG"]))
|
||||
if rocket_replies >= 2:
|
||||
red_flags.append("Replies show coordinated hype patterns")
|
||||
|
||||
# Check for penny stock characteristics
|
||||
for t, info in ticker_data.items():
|
||||
if isinstance(info, dict) and not info.get("error"):
|
||||
price = info.get("price")
|
||||
mcap = info.get("market_cap")
|
||||
if price and price < 1:
|
||||
red_flags.append(f"${t} is a penny stock (${price})")
|
||||
if mcap and mcap < 50_000_000:
|
||||
red_flags.append(f"${t} micro-cap (<$50M market cap)")
|
||||
|
||||
# Build verdict
|
||||
if len(red_flags) >= 3:
|
||||
verdict = "High risk - multiple red flags detected, exercise extreme caution"
|
||||
elif len(red_flags) >= 1:
|
||||
verdict = "Some concerns - verify claims independently before acting"
|
||||
elif tickers:
|
||||
verdict = "Worth investigating - do your own due diligence"
|
||||
else:
|
||||
verdict = "Informational tweet - no immediate financial claims detected"
|
||||
|
||||
return {
|
||||
"tweet_data": data,
|
||||
"tickers_found": tickers,
|
||||
"ticker_data": ticker_data,
|
||||
"red_flags": red_flags,
|
||||
"verdict": verdict,
|
||||
}
|
||||
|
||||
|
||||
def format_markdown(analysis: dict) -> str:
|
||||
"""Format analysis as markdown."""
|
||||
d = analysis["tweet_data"]
|
||||
lines = [f"# Tweet Analysis", ""]
|
||||
lines.append(f"**URL:** {d['url']}")
|
||||
lines.append(f"**Analyzed:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
lines.append("")
|
||||
|
||||
# WHO
|
||||
lines.append("## 👤 WHO")
|
||||
lines.append(f"- **Author:** {d.get('author') or 'Unknown'}")
|
||||
lines.append(f"- **Handle:** {d.get('handle') or 'Unknown'}")
|
||||
if d.get("followers"):
|
||||
lines.append(f"- **Followers:** {d['followers']}")
|
||||
if d.get("following"):
|
||||
lines.append(f"- **Following:** {d['following']}")
|
||||
if d.get("bio"):
|
||||
lines.append(f"- **Bio:** {d['bio']}")
|
||||
lines.append("")
|
||||
|
||||
# WHAT
|
||||
lines.append("## 📝 WHAT")
|
||||
lines.append(f"> {d.get('text') or 'Could not extract tweet text'}")
|
||||
lines.append("")
|
||||
if d.get("timestamp"):
|
||||
lines.append(f"**Posted:** {d['timestamp']}")
|
||||
metrics = d.get("metrics", {})
|
||||
if metrics:
|
||||
m_parts = [f"{v} {k}" for k, v in metrics.items()]
|
||||
lines.append(f"**Metrics:** {' | '.join(m_parts)}")
|
||||
if d.get("images"):
|
||||
lines.append(f"**Images:** {len(d['images'])} attached")
|
||||
lines.append("")
|
||||
|
||||
# VERIFY
|
||||
lines.append("## ✅ VERIFY")
|
||||
tickers = analysis.get("tickers_found", [])
|
||||
td = analysis.get("ticker_data", {})
|
||||
if tickers:
|
||||
lines.append(f"**Tickers mentioned:** {', '.join('$' + t for t in tickers)}")
|
||||
lines.append("")
|
||||
for t, info in td.items():
|
||||
if isinstance(info, dict) and not info.get("error"):
|
||||
lines.append(f"### ${t}" + (f" - {info.get('name', '')}" if info.get('name') else ""))
|
||||
if info.get("price"):
|
||||
lines.append(f"- **Price:** ${info['price']}")
|
||||
if info.get("market_cap"):
|
||||
mc = info["market_cap"]
|
||||
if mc > 1e9:
|
||||
lines.append(f"- **Market Cap:** ${mc/1e9:.2f}B")
|
||||
else:
|
||||
lines.append(f"- **Market Cap:** ${mc/1e6:.1f}M")
|
||||
if info.get("volume"):
|
||||
lines.append(f"- **Volume:** {info['volume']:,}")
|
||||
if info.get("day_change_pct"):
|
||||
lines.append(f"- **Day Change:** {info['day_change_pct']:.2f}%")
|
||||
if info.get("52w_high") and info.get("52w_low"):
|
||||
lines.append(f"- **52W Range:** ${info['52w_low']} - ${info['52w_high']}")
|
||||
lines.append("")
|
||||
elif isinstance(info, dict) and info.get("error"):
|
||||
lines.append(f"- ${t}: lookup failed")
|
||||
else:
|
||||
lines.append("No tickers mentioned in tweet.")
|
||||
lines.append("")
|
||||
|
||||
# RED FLAGS
|
||||
lines.append("## 🚩 RED FLAGS")
|
||||
flags = analysis.get("red_flags", [])
|
||||
if flags:
|
||||
for f in flags:
|
||||
lines.append(f"- ⚠️ {f}")
|
||||
else:
|
||||
lines.append("- None detected")
|
||||
lines.append("")
|
||||
|
||||
# MONEY
|
||||
lines.append("## 💰 MONEY")
|
||||
if tickers and not flags:
|
||||
lines.append("Potential opportunity identified. Research further before any position.")
|
||||
elif tickers and flags:
|
||||
lines.append("Tickers mentioned but red flags present. High risk of promoted/manipulated asset.")
|
||||
else:
|
||||
lines.append("No direct financial opportunity identified in this tweet.")
|
||||
lines.append("")
|
||||
|
||||
# VERDICT
|
||||
lines.append("## 🎯 VERDICT")
|
||||
lines.append(f"**{analysis['verdict']}**")
|
||||
lines.append("")
|
||||
|
||||
# Scrape issues
|
||||
if d.get("scrape_error"):
|
||||
lines.append(f"---\n⚠️ *Scrape warning: {d['scrape_error']}*")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
async def main():
|
||||
parser = argparse.ArgumentParser(description="Analyze a tweet")
|
||||
parser.add_argument("url", help="Tweet URL (x.com or twitter.com)")
|
||||
parser.add_argument("--json", action="store_true", dest="json_output", help="Output JSON")
|
||||
parser.add_argument("-o", "--output", help="Write output to file")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Validate URL
|
||||
if not re.search(r'(x\.com|twitter\.com)/.+/status/\d+', args.url):
|
||||
print("ERROR: Invalid tweet URL", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print("Scraping tweet...", file=sys.stderr)
|
||||
data = await scrape_tweet(args.url)
|
||||
|
||||
print("Analyzing...", file=sys.stderr)
|
||||
analysis = analyze(data)
|
||||
|
||||
if args.json_output:
|
||||
output = json.dumps(analysis, indent=2, default=str)
|
||||
else:
|
||||
output = format_markdown(analysis)
|
||||
|
||||
if args.output:
|
||||
with open(args.output, "w") as f:
|
||||
f.write(output)
|
||||
print(f"Written to {args.output}", file=sys.stderr)
|
||||
else:
|
||||
print(output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
51
tools/data_sources/README.md
Normal file
51
tools/data_sources/README.md
Normal file
@ -0,0 +1,51 @@
|
||||
# Data Source Connectors
|
||||
|
||||
Standalone Python scripts for fetching crypto/market data. Each has CLI with `--pretty` (JSON formatting) and `--summary` (human-readable output).
|
||||
|
||||
## defillama.py ✅ (no auth needed)
|
||||
|
||||
DefiLlama API — DeFi protocol data, token prices, yield farming opportunities.
|
||||
|
||||
```bash
|
||||
./defillama.py protocols --limit 10 --summary # Top protocols by TVL
|
||||
./defillama.py tvl aave --pretty # TVL for specific protocol
|
||||
./defillama.py prices coingecko:bitcoin coingecko:ethereum --summary
|
||||
./defillama.py yields --limit 20 --stablecoins --summary # Top stablecoin yields
|
||||
```
|
||||
|
||||
**Endpoints used:** api.llama.fi/protocols, api.llama.fi/tvl/{name}, coins.llama.fi/prices, yields.llama.fi/pools
|
||||
|
||||
## coinglass.py 🔑 (API key recommended)
|
||||
|
||||
Coinglass — funding rates, open interest, long/short ratios.
|
||||
|
||||
```bash
|
||||
export COINGLASS_API_KEY=your_key # Get at coinglass.com/pricing
|
||||
./coinglass.py funding --summary
|
||||
./coinglass.py oi --summary
|
||||
./coinglass.py long-short --summary
|
||||
```
|
||||
|
||||
**Note:** Free internal API endpoints often return empty data. API key required for reliable access.
|
||||
|
||||
## arkham.py 🔑 (API key required)
|
||||
|
||||
Arkham Intelligence — whale wallet tracking, token transfers, entity search.
|
||||
|
||||
```bash
|
||||
export ARKHAM_API_KEY=your_key # Sign up at platform.arkhamintelligence.com
|
||||
./arkham.py notable --summary # List known whale addresses
|
||||
./arkham.py address vitalik --summary # Address intelligence (supports name shortcuts)
|
||||
./arkham.py transfers 0x1234... --limit 10 --pretty
|
||||
./arkham.py search "binance" --pretty
|
||||
```
|
||||
|
||||
**Built-in shortcuts:** vitalik, justin-sun, binance-hot, coinbase-prime, aave-treasury, uniswap-deployer
|
||||
|
||||
## Programmatic Usage
|
||||
|
||||
```python
|
||||
from tools.data_sources.defillama import get_protocols, get_prices, get_yield_pools
|
||||
from tools.data_sources.coinglass import get_funding_rates
|
||||
from tools.data_sources.arkham import get_address_info, NOTABLE_ADDRESSES
|
||||
```
|
||||
4
tools/data_sources/__init__.py
Executable file
4
tools/data_sources/__init__.py
Executable file
@ -0,0 +1,4 @@
|
||||
"""Crypto & market data source connectors."""
|
||||
from pathlib import Path
|
||||
|
||||
DATA_SOURCES_DIR = Path(__file__).parent
|
||||
167
tools/data_sources/arkham.py
Executable file
167
tools/data_sources/arkham.py
Executable file
@ -0,0 +1,167 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Arkham Intelligence connector — whale tracking, token flows, address intelligence.
|
||||
|
||||
Requires API key for most endpoints. Set ARKHAM_API_KEY env var.
|
||||
Sign up at https://platform.arkhamintelligence.com
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
BASE = "https://api.arkhamintelligence.com"
|
||||
TIMEOUT = 30
|
||||
|
||||
NOTABLE_ADDRESSES = {
|
||||
"vitalik": "0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045",
|
||||
"justin-sun": "0x3DdfA8eC3052539b6C9549F12cEA2C295cfF5296",
|
||||
"binance-hot": "0x28C6c06298d514Db089934071355E5743bf21d60",
|
||||
"coinbase-prime": "0xA9D1e08C7793af67e9d92fe308d5697FB81d3E43",
|
||||
"aave-treasury": "0x464C71f6c2F760DdA6093dCB91C24c39e5d6e18c",
|
||||
"uniswap-deployer": "0x41653c7d61609D856f29355E404F310Ec4142Cfb",
|
||||
}
|
||||
|
||||
|
||||
def _get(path: str, params: dict | None = None) -> Any:
|
||||
key = os.environ.get("ARKHAM_API_KEY")
|
||||
headers = {"User-Agent": "Mozilla/5.0"}
|
||||
if key:
|
||||
headers["API-Key"] = key
|
||||
r = requests.get(f"{BASE}/{path}", params=params, headers=headers, timeout=TIMEOUT)
|
||||
if r.status_code in (401, 403) or "api key" in r.text.lower():
|
||||
raise EnvironmentError(
|
||||
"Arkham API key required. Set ARKHAM_API_KEY env var.\n"
|
||||
"Sign up at https://platform.arkhamintelligence.com"
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
def resolve_address(name_or_addr: str) -> str:
|
||||
return NOTABLE_ADDRESSES.get(name_or_addr.lower(), name_or_addr)
|
||||
|
||||
|
||||
# ── Data fetchers ───────────────────────────────────────────────────────────
|
||||
|
||||
def get_address_info(address: str) -> dict:
|
||||
return _get(f"intelligence/address/{resolve_address(address)}")
|
||||
|
||||
|
||||
def get_transfers(address: str, limit: int = 20) -> dict:
|
||||
return _get("token/transfers", {"address": resolve_address(address), "limit": limit})
|
||||
|
||||
|
||||
def search_entity(query: str) -> dict:
|
||||
return _get("intelligence/search", {"query": query})
|
||||
|
||||
|
||||
# ── Summary helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
def summary_address(data: dict) -> str:
|
||||
lines = ["═══ Address Intelligence ═══", ""]
|
||||
if isinstance(data, dict):
|
||||
entity = data.get("entity", {}) or {}
|
||||
if entity:
|
||||
lines.append(f" Entity: {entity.get('name', 'Unknown')}")
|
||||
lines.append(f" Type: {entity.get('type', 'Unknown')}")
|
||||
lines.append(f" Address: {data.get('address', '?')}")
|
||||
labels = data.get("labels", [])
|
||||
if labels:
|
||||
lines.append(f" Labels: {', '.join(str(l) for l in labels)}")
|
||||
else:
|
||||
lines.append(f" {data}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_transfers(data) -> str:
|
||||
lines = ["═══ Recent Transfers ═══", ""]
|
||||
transfers = data if isinstance(data, list) else (data.get("transfers", data.get("data", [])) if isinstance(data, dict) else [])
|
||||
if not transfers:
|
||||
lines.append(" No transfers found.")
|
||||
return "\n".join(lines)
|
||||
for t in transfers[:15]:
|
||||
token = t.get("token", {}).get("symbol", "?") if isinstance(t.get("token"), dict) else "?"
|
||||
amount = t.get("amount", t.get("value", "?"))
|
||||
fr = t.get("from", {})
|
||||
to = t.get("to", {})
|
||||
fl = (fr.get("label") or fr.get("address", "?")[:12]) if isinstance(fr, dict) else str(fr)[:12]
|
||||
tl = (to.get("label") or to.get("address", "?")[:12]) if isinstance(to, dict) else str(to)[:12]
|
||||
lines.append(f" {token:<8} {str(amount):>15} {fl} → {tl}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_notable() -> str:
|
||||
lines = ["═══ Notable/Whale Addresses ═══", ""]
|
||||
for name, addr in NOTABLE_ADDRESSES.items():
|
||||
lines.append(f" {name:<20} {addr}")
|
||||
lines.append("")
|
||||
lines.append(" Use these as shortcuts: arkham.py address vitalik")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# ── CLI ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
common = argparse.ArgumentParser(add_help=False)
|
||||
common.add_argument("--pretty", action="store_true", help="Pretty-print JSON output")
|
||||
common.add_argument("--summary", action="store_true", help="Human-readable summary")
|
||||
|
||||
parser = argparse.ArgumentParser(description="Arkham Intelligence connector", parents=[common])
|
||||
sub = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
p_addr = sub.add_parser("address", help="Address intelligence", parents=[common])
|
||||
p_addr.add_argument("address", help="Ethereum address or notable name")
|
||||
|
||||
p_tx = sub.add_parser("transfers", help="Recent token transfers", parents=[common])
|
||||
p_tx.add_argument("address")
|
||||
p_tx.add_argument("--limit", type=int, default=20)
|
||||
|
||||
p_search = sub.add_parser("search", help="Search entities", parents=[common])
|
||||
p_search.add_argument("query")
|
||||
|
||||
sub.add_parser("notable", help="List notable/whale addresses", parents=[common])
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
if args.command == "notable":
|
||||
if args.summary:
|
||||
print(summary_notable())
|
||||
else:
|
||||
json.dump(NOTABLE_ADDRESSES, sys.stdout, indent=2 if args.pretty else None)
|
||||
print()
|
||||
return
|
||||
|
||||
if args.command == "address":
|
||||
data = get_address_info(args.address)
|
||||
if args.summary:
|
||||
print(summary_address(data)); return
|
||||
result = data
|
||||
elif args.command == "transfers":
|
||||
data = get_transfers(args.address, args.limit)
|
||||
if args.summary:
|
||||
print(summary_transfers(data)); return
|
||||
result = data
|
||||
elif args.command == "search":
|
||||
result = search_entity(args.query)
|
||||
else:
|
||||
parser.print_help(); return
|
||||
|
||||
json.dump(result, sys.stdout, indent=2 if args.pretty else None)
|
||||
print()
|
||||
|
||||
except EnvironmentError as e:
|
||||
print(str(e), file=sys.stderr); sys.exit(1)
|
||||
except requests.HTTPError as e:
|
||||
detail = e.response.text[:200] if e.response is not None else ""
|
||||
print(json.dumps({"error": str(e), "detail": detail}), file=sys.stderr); sys.exit(1)
|
||||
except Exception as e:
|
||||
print(json.dumps({"error": f"{type(e).__name__}: {e}"}), file=sys.stderr); sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
181
tools/data_sources/coinglass.py
Executable file
181
tools/data_sources/coinglass.py
Executable file
@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Coinglass data connector — funding rates, open interest, long/short ratios.
|
||||
|
||||
Uses the free fapi.coinglass.com internal API where available.
|
||||
Some endpoints may return empty data without authentication.
|
||||
Set COINGLASS_API_KEY env var for authenticated access to open-api.coinglass.com.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
FREE_BASE = "https://fapi.coinglass.com/api"
|
||||
AUTH_BASE = "https://open-api.coinglass.com/public/v2"
|
||||
TIMEOUT = 30
|
||||
|
||||
|
||||
def _free_get(path: str, params: dict | None = None) -> Any:
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
|
||||
"Referer": "https://www.coinglass.com/",
|
||||
}
|
||||
r = requests.get(f"{FREE_BASE}/{path}", params=params, headers=headers, timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if data.get("code") == "0" or data.get("success"):
|
||||
return data.get("data", [])
|
||||
raise ValueError(f"API error: {data.get('msg', 'unknown')}")
|
||||
|
||||
|
||||
def _auth_get(path: str, params: dict | None = None) -> Any:
|
||||
key = os.environ.get("COINGLASS_API_KEY")
|
||||
if not key:
|
||||
raise EnvironmentError("COINGLASS_API_KEY not set. Get one at https://www.coinglass.com/pricing")
|
||||
headers = {"coinglassSecret": key}
|
||||
r = requests.get(f"{AUTH_BASE}/{path}", params=params, headers=headers, timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if data.get("success") or data.get("code") == "0":
|
||||
return data.get("data", [])
|
||||
raise ValueError(f"API error: {data.get('msg', 'unknown')}")
|
||||
|
||||
|
||||
# ── Data fetchers ───────────────────────────────────────────────────────────
|
||||
|
||||
def get_funding_rates() -> list[dict]:
|
||||
"""Funding rates across exchanges."""
|
||||
try:
|
||||
data = _free_get("fundingRate/v2/home")
|
||||
if data:
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
return _auth_get("funding")
|
||||
|
||||
|
||||
def get_open_interest() -> list[dict]:
|
||||
"""Aggregated open interest data."""
|
||||
try:
|
||||
data = _free_get("openInterest/v3/home")
|
||||
if data:
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
return _auth_get("open_interest")
|
||||
|
||||
|
||||
def get_long_short_ratio() -> list[dict]:
|
||||
"""Global long/short account ratios."""
|
||||
try:
|
||||
data = _free_get("futures/longShort/v2/home")
|
||||
if data:
|
||||
return data
|
||||
except Exception:
|
||||
pass
|
||||
return _auth_get("long_short")
|
||||
|
||||
|
||||
# ── Summary helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
def _no_data_msg(name: str) -> str:
|
||||
return (f"No {name} data available (free API may be restricted).\n"
|
||||
"Set COINGLASS_API_KEY for full access: https://www.coinglass.com/pricing")
|
||||
|
||||
|
||||
def summary_funding(data: list[dict]) -> str:
|
||||
if not data:
|
||||
return _no_data_msg("funding rate")
|
||||
lines = ["═══ Funding Rates ═══", ""]
|
||||
for item in data[:20]:
|
||||
symbol = item.get("symbol", item.get("coin", "?"))
|
||||
rate = None
|
||||
if "uMarginList" in item:
|
||||
for m in item["uMarginList"]:
|
||||
rate = m.get("rate")
|
||||
if rate is not None:
|
||||
break
|
||||
else:
|
||||
rate = item.get("rate")
|
||||
if rate is not None:
|
||||
lines.append(f" {symbol:<10} {float(rate)*100:>8.4f}%")
|
||||
else:
|
||||
lines.append(f" {symbol:<10} (rate unavailable)")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_oi(data: list[dict]) -> str:
|
||||
if not data:
|
||||
return _no_data_msg("open interest")
|
||||
lines = ["═══ Open Interest ═══", ""]
|
||||
for item in data[:20]:
|
||||
symbol = item.get("symbol", item.get("coin", "?"))
|
||||
oi = item.get("openInterest", item.get("oi", 0))
|
||||
lines.append(f" {symbol:<10} OI: ${float(oi):>15,.0f}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_ls(data: list[dict]) -> str:
|
||||
if not data:
|
||||
return _no_data_msg("long/short")
|
||||
lines = ["═══ Long/Short Ratios ═══", ""]
|
||||
for item in data[:20]:
|
||||
symbol = item.get("symbol", item.get("coin", "?"))
|
||||
long_rate = item.get("longRate", item.get("longRatio", "?"))
|
||||
short_rate = item.get("shortRate", item.get("shortRatio", "?"))
|
||||
lines.append(f" {symbol:<10} Long: {long_rate} Short: {short_rate}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# ── CLI ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
common = argparse.ArgumentParser(add_help=False)
|
||||
common.add_argument("--pretty", action="store_true", help="Pretty-print JSON output")
|
||||
common.add_argument("--summary", action="store_true", help="Human-readable summary")
|
||||
|
||||
parser = argparse.ArgumentParser(description="Coinglass data connector", parents=[common])
|
||||
sub = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
sub.add_parser("funding", help="Funding rates across exchanges", parents=[common])
|
||||
sub.add_parser("oi", help="Open interest overview", parents=[common])
|
||||
sub.add_parser("long-short", help="Long/short ratios", parents=[common])
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
if args.command == "funding":
|
||||
data = get_funding_rates()
|
||||
if args.summary:
|
||||
print(summary_funding(data)); return
|
||||
result = data
|
||||
elif args.command == "oi":
|
||||
data = get_open_interest()
|
||||
if args.summary:
|
||||
print(summary_oi(data)); return
|
||||
result = data
|
||||
elif args.command == "long-short":
|
||||
data = get_long_short_ratio()
|
||||
if args.summary:
|
||||
print(summary_ls(data)); return
|
||||
result = data
|
||||
else:
|
||||
parser.print_help(); return
|
||||
|
||||
json.dump(result, sys.stdout, indent=2 if args.pretty else None)
|
||||
print()
|
||||
|
||||
except EnvironmentError as e:
|
||||
print(str(e), file=sys.stderr); sys.exit(1)
|
||||
except requests.HTTPError as e:
|
||||
print(json.dumps({"error": str(e)}), file=sys.stderr); sys.exit(1)
|
||||
except Exception as e:
|
||||
print(json.dumps({"error": f"{type(e).__name__}: {e}"}), file=sys.stderr); sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
176
tools/data_sources/defillama.py
Executable file
176
tools/data_sources/defillama.py
Executable file
@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env python3
|
||||
"""DefiLlama API connector — TVL, token prices, yield/APY data.
|
||||
|
||||
No authentication required. All endpoints are free.
|
||||
API base: https://api.llama.fi | Prices: https://coins.llama.fi | Yields: https://yields.llama.fi
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
BASE = "https://api.llama.fi"
|
||||
COINS_BASE = "https://coins.llama.fi"
|
||||
YIELDS_BASE = "https://yields.llama.fi"
|
||||
|
||||
TIMEOUT = 30
|
||||
|
||||
|
||||
def _get(url: str, params: dict | None = None) -> Any:
|
||||
r = requests.get(url, params=params, timeout=TIMEOUT)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
|
||||
# ── Protocol / TVL ──────────────────────────────────────────────────────────
|
||||
|
||||
def get_protocols(limit: int = 20) -> list[dict]:
|
||||
"""Top protocols by TVL."""
|
||||
data = _get(f"{BASE}/protocols")
|
||||
# Sort by tvl descending, filter out CEXes
|
||||
protos = [p for p in data if p.get("category") != "CEX" and p.get("tvl")]
|
||||
protos.sort(key=lambda p: p.get("tvl", 0), reverse=True)
|
||||
return protos[:limit]
|
||||
|
||||
|
||||
def get_tvl(protocol: str) -> dict:
|
||||
"""Get current TVL for a specific protocol (slug name)."""
|
||||
val = _get(f"{BASE}/tvl/{protocol}")
|
||||
return {"protocol": protocol, "tvl": val}
|
||||
|
||||
|
||||
def get_protocol_detail(protocol: str) -> dict:
|
||||
"""Full protocol details including chain breakdowns."""
|
||||
return _get(f"{BASE}/protocol/{protocol}")
|
||||
|
||||
|
||||
# ── Token Prices ────────────────────────────────────────────────────────────
|
||||
|
||||
def get_prices(coins: list[str]) -> dict:
|
||||
"""Get current prices. Coins format: 'coingecko:ethereum', 'ethereum:0x...', etc."""
|
||||
joined = ",".join(coins)
|
||||
data = _get(f"{COINS_BASE}/prices/current/{joined}")
|
||||
return data.get("coins", {})
|
||||
|
||||
|
||||
# ── Yields / APY ────────────────────────────────────────────────────────────
|
||||
|
||||
def get_yield_pools(limit: int = 30, min_tvl: float = 1_000_000, stablecoin_only: bool = False) -> list[dict]:
|
||||
"""Top yield pools sorted by APY."""
|
||||
data = _get(f"{YIELDS_BASE}/pools")
|
||||
pools = data.get("data", [])
|
||||
# Filter
|
||||
pools = [p for p in pools if (p.get("tvlUsd") or 0) >= min_tvl and (p.get("apy") or 0) > 0]
|
||||
if stablecoin_only:
|
||||
pools = [p for p in pools if p.get("stablecoin")]
|
||||
pools.sort(key=lambda p: p.get("apy", 0), reverse=True)
|
||||
return pools[:limit]
|
||||
|
||||
|
||||
# ── Summary helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
def _fmt_usd(v: float) -> str:
|
||||
if v >= 1e9:
|
||||
return f"${v/1e9:.2f}B"
|
||||
if v >= 1e6:
|
||||
return f"${v/1e6:.1f}M"
|
||||
return f"${v:,.0f}"
|
||||
|
||||
|
||||
def summary_protocols(protos: list[dict]) -> str:
|
||||
lines = ["═══ Top Protocols by TVL ═══", ""]
|
||||
for i, p in enumerate(protos, 1):
|
||||
lines.append(f" {i:>2}. {p['name']:<25} TVL: {_fmt_usd(p.get('tvl', 0)):>12} chain: {p.get('chain', '?')}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_prices(prices: dict) -> str:
|
||||
lines = ["═══ Token Prices ═══", ""]
|
||||
for coin, info in prices.items():
|
||||
lines.append(f" {info.get('symbol', coin):<10} ${info['price']:>12,.2f} (confidence: {info.get('confidence', '?')})")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def summary_yields(pools: list[dict]) -> str:
|
||||
lines = ["═══ Top Yield Pools ═══", ""]
|
||||
for i, p in enumerate(pools, 1):
|
||||
lines.append(
|
||||
f" {i:>2}. {p.get('symbol','?'):<25} APY: {p.get('apy',0):>8.2f}% "
|
||||
f"TVL: {_fmt_usd(p.get('tvlUsd',0)):>10} {p.get('chain','?')}/{p.get('project','?')}"
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
# ── CLI ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
def main():
|
||||
common = argparse.ArgumentParser(add_help=False)
|
||||
common.add_argument("--pretty", action="store_true", help="Pretty-print JSON output")
|
||||
common.add_argument("--summary", action="store_true", help="Human-readable summary")
|
||||
parser = argparse.ArgumentParser(description="DefiLlama data connector", parents=[common])
|
||||
sub = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# protocols
|
||||
p_proto = sub.add_parser("protocols", help="Top protocols by TVL", parents=[common])
|
||||
p_proto.add_argument("--limit", type=int, default=20)
|
||||
|
||||
# tvl
|
||||
p_tvl = sub.add_parser("tvl", help="TVL for a specific protocol", parents=[common])
|
||||
p_tvl.add_argument("protocol", help="Protocol slug (e.g. aave, lido)")
|
||||
|
||||
# prices
|
||||
p_price = sub.add_parser("prices", help="Token prices", parents=[common])
|
||||
p_price.add_argument("coins", nargs="+", help="Coin IDs: coingecko:ethereum, ethereum:0x...")
|
||||
|
||||
# yields
|
||||
p_yield = sub.add_parser("yields", help="Top yield pools", parents=[common])
|
||||
p_yield.add_argument("--limit", type=int, default=30)
|
||||
p_yield.add_argument("--min-tvl", type=float, default=1_000_000)
|
||||
p_yield.add_argument("--stablecoins", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
if args.command == "protocols":
|
||||
data = get_protocols(args.limit)
|
||||
if args.summary:
|
||||
print(summary_protocols(data))
|
||||
return
|
||||
result = [{"name": p["name"], "tvl": p.get("tvl"), "chain": p.get("chain"), "category": p.get("category"), "symbol": p.get("symbol")} for p in data]
|
||||
elif args.command == "tvl":
|
||||
result = get_tvl(args.protocol)
|
||||
if args.summary:
|
||||
print(f"{args.protocol}: {_fmt_usd(result['tvl'])}")
|
||||
return
|
||||
elif args.command == "prices":
|
||||
result = get_prices(args.coins)
|
||||
if args.summary:
|
||||
print(summary_prices(result))
|
||||
return
|
||||
elif args.command == "yields":
|
||||
data = get_yield_pools(args.limit, args.min_tvl, args.stablecoins)
|
||||
if args.summary:
|
||||
print(summary_yields(data))
|
||||
return
|
||||
result = [{"symbol": p.get("symbol"), "apy": p.get("apy"), "tvlUsd": p.get("tvlUsd"), "chain": p.get("chain"), "project": p.get("project"), "pool": p.get("pool")} for p in data]
|
||||
else:
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
indent = 2 if args.pretty else None
|
||||
json.dump(result, sys.stdout, indent=indent)
|
||||
print()
|
||||
|
||||
except requests.HTTPError as e:
|
||||
print(json.dumps({"error": str(e)}), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(json.dumps({"error": f"Unexpected: {e}"}), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
15
tools/tweet_analyzer_wrapper.sh
Executable file
15
tools/tweet_analyzer_wrapper.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tweet Analyzer Wrapper - for OpenClaw agent use
|
||||
# Usage: ./tweet_analyzer_wrapper.sh <tweet_url> [output_file]
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
URL="${1:?Usage: $0 <tweet_url> [output_file]}"
|
||||
OUTPUT="${2:-}"
|
||||
|
||||
if [ -n "$OUTPUT" ]; then
|
||||
python3 "$SCRIPT_DIR/analyze_tweet.py" "$URL" -o "$OUTPUT"
|
||||
echo "Analysis written to $OUTPUT"
|
||||
else
|
||||
python3 "$SCRIPT_DIR/analyze_tweet.py" "$URL"
|
||||
fi
|
||||
Reference in New Issue
Block a user