Weekly Auto-Update #9
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Weekly Auto-Update | |
| on: | |
| schedule: | |
| - cron: '0 8 * * 1' # Every Monday at 8:00 AM UTC | |
| workflow_dispatch: | |
| permissions: | |
| contents: write | |
| jobs: | |
| update: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| - name: Fetch network data and generate briefing | |
| env: | |
| ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} | |
| run: | | |
| python3 << 'PYEOF' | |
| import json, urllib.request, urllib.error, os, re | |
| from datetime import datetime, timedelta, timezone | |
| # ── 1. Load Instagram posts (updated daily by fetch-instagram.yml) ── | |
| posts = [] | |
| try: | |
| with open("assets/data/instagram.json") as f: | |
| ig_data = json.load(f) | |
| posts = ig_data.get("posts", []) | |
| print(f"Loaded {len(posts)} posts from instagram.json") | |
| except Exception as e: | |
| print(f"Failed to load instagram.json: {e}") | |
| now_iso = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") | |
| # ── 2. Fetch network node count ── | |
| NODES_URL = "https://cluster.1.pools.functionyard.fula.network/nodes" | |
| total_nodes = 900 # fallback | |
| try: | |
| req = urllib.request.Request(NODES_URL, headers={"User-Agent": "Mozilla/5.0"}) | |
| with urllib.request.urlopen(req, timeout=15) as resp: | |
| nodes_data = resp.read() | |
| # The endpoint might return a JSON array or object | |
| parsed = json.loads(nodes_data) | |
| if isinstance(parsed, list): | |
| total_nodes = len(parsed) | |
| elif isinstance(parsed, dict) and "count" in parsed: | |
| total_nodes = parsed["count"] | |
| elif isinstance(parsed, dict): | |
| total_nodes = len(parsed) | |
| print(f"Network nodes: {total_nodes}") | |
| except Exception as e: | |
| print(f"Node count fetch failed, using fallback: {e}") | |
| network_data = {"updated": now_iso, "total_nodes": total_nodes} | |
| with open("assets/data/network.json", "w") as f: | |
| json.dump(network_data, f, indent=2) | |
| print("Wrote network.json") | |
| # ── 3. Determine content seed for Claude briefing ── | |
| now_utc = datetime.now(timezone.utc) | |
| seven_days_ago = now_utc - timedelta(days=7) | |
| def parse_date(d): | |
| # Strip milliseconds if present (e.g. ".000" in "2026-02-23T22:13:00.000Z") | |
| d_clean = re.sub(r'\.\d+', '', d) | |
| for fmt in ["%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S%z", "%a, %d %b %Y %H:%M:%S %z"]: | |
| try: | |
| dt = datetime.strptime(d_clean, fmt) | |
| return dt.replace(tzinfo=timezone.utc) if dt.tzinfo is None else dt | |
| except ValueError: | |
| continue | |
| return None | |
| recent_posts = [] | |
| for p in posts: | |
| dt = parse_date(p.get("timestamp", "")) | |
| if dt and dt >= seven_days_ago: | |
| recent_posts.append(p) | |
| if recent_posts: | |
| seed_posts = recent_posts | |
| prompt_mode = "recent" | |
| else: | |
| seed_posts = posts[:3] | |
| prompt_mode = "quiet" | |
| print(f"Content seed: {len(seed_posts)} posts ({prompt_mode} mode)") | |
| # ── 4. Load previous pulse for continuity ── | |
| prev_summary = "" | |
| if os.path.exists("assets/data/pulse.json"): | |
| try: | |
| with open("assets/data/pulse.json") as f: | |
| prev_pulse = json.load(f) | |
| prev_summary = prev_pulse.get("briefing", "") | |
| except Exception: | |
| pass | |
| # ── 5. Call Claude API with web search ── | |
| api_key = os.environ.get("ANTHROPIC_API_KEY", "") | |
| if not api_key: | |
| print("No ANTHROPIC_API_KEY set, skipping briefing generation") | |
| pulse_data = { | |
| "updated": now_iso, | |
| "week_of": (now_utc - timedelta(days=now_utc.weekday())).strftime("%Y-%m-%d"), | |
| "headline": "Weekly Update", | |
| "subtitle": "Check back for this week's briefing.", | |
| "briefing": "", | |
| "sources": [] | |
| } | |
| else: | |
| seed_text = "\n".join([f"- [{p.get('timestamp','')}] {p.get('caption','')}" for p in seed_posts]) | |
| if prompt_mode == "recent": | |
| user_context = f"""## This Week's Posts from @functionland | |
| {seed_text} | |
| ## Network Stats | |
| Active nodes: {total_nodes} | |
| ## Previous Briefing (for continuity, do not repeat) | |
| {prev_summary[:500]}""" | |
| else: | |
| user_context = f"""## Recent Posts from @functionland (no new posts this week) | |
| {seed_text} | |
| ## Network Stats | |
| Active nodes: {total_nodes} | |
| ## Previous Briefing (for continuity, do not repeat) | |
| {prev_summary[:500]} | |
| Note: There were no new posts this week. Based on Functionland's recent activity and current industry developments, write a briefing on where the project stands in the broader DePIN landscape.""" | |
| system_prompt = """You are a senior technology journalist writing a weekly briefing for Functionland's website. Your style is authoritative, clear, and concise — like a Wall Street Journal technology correspondent. | |
| Your task: | |
| 1. Review the provided posts and network data from Functionland this week | |
| 2. Use web search to research relevant context: DePIN industry trends, competing projects, regulatory developments, technology breakthroughs related to the topics in the posts | |
| 3. Write a 150-250 word weekly briefing that: | |
| - Leads with Functionland's most newsworthy update from the posts | |
| - Provides industry context from your research (why this matters in the broader DePIN/Web3 landscape) | |
| - Includes specific facts, numbers, or comparisons from your research | |
| - Ends with a forward-looking sentence about what to watch next | |
| - If there are recent announcements or news or developments, focus on those instead of generic update | |
| 4. Also generate a short headline (max 10 words) and a one-sentence subtitle | |
| Rules: | |
| - Never fabricate statistics or quotes | |
| - If you cannot verify something, don't include it | |
| - Do not use exclamation marks or hype language | |
| - Write for an informed but not expert audience | |
| - Keep the tone professional and credible | |
| - Every claim should be grounded in the post data or your web research | |
| - Every claim should be grounded in the tweet data or your web research | |
| - It should have positive tone and be encouraging for Functionland focused on positive news and not with a negative tone | |
| Respond with ONLY valid JSON in this exact format: | |
| {"headline": "...", "subtitle": "...", "briefing": "...", "sources": ["url1", "url2"]}""" | |
| request_body = json.dumps({ | |
| "model": "claude-sonnet-4-6", | |
| "max_tokens": 1024, | |
| "system": system_prompt, | |
| "tools": [{"type": "web_search_20250305", "name": "web_search", "max_uses": 5}], | |
| "messages": [{"role": "user", "content": user_context}] | |
| }).encode() | |
| headers = { | |
| "Content-Type": "application/json", | |
| "X-Api-Key": api_key, | |
| "Anthropic-Version": "2023-06-01" | |
| } | |
| req = urllib.request.Request("https://api.anthropic.com/v1/messages", data=request_body, headers=headers, method="POST") | |
| print("Calling Claude API with web search...") | |
| try: | |
| with urllib.request.urlopen(req, timeout=120) as resp: | |
| result = json.loads(resp.read()) | |
| # Extract text from response content blocks | |
| text_content = "" | |
| for block in result.get("content", []): | |
| if block.get("type") == "text": | |
| text_content += block["text"] | |
| # Parse JSON from response | |
| json_match = re.search(r'\{[^{}]*"headline"[^{}]*\}', text_content, re.DOTALL) | |
| if json_match: | |
| briefing_data = json.loads(json_match.group()) | |
| else: | |
| # Try parsing the entire text as JSON | |
| briefing_data = json.loads(text_content.strip()) | |
| week_of = (now_utc - timedelta(days=now_utc.weekday())).strftime("%Y-%m-%d") | |
| pulse_data = { | |
| "updated": now_iso, | |
| "week_of": week_of, | |
| "headline": briefing_data.get("headline", "Weekly Update"), | |
| "subtitle": briefing_data.get("subtitle", ""), | |
| "briefing": briefing_data.get("briefing", ""), | |
| "sources": briefing_data.get("sources", []) | |
| } | |
| print("Claude briefing generated successfully") | |
| except urllib.error.HTTPError as e: | |
| error_body = e.read().decode() if e.fp else "" | |
| print(f"Claude API call failed: {e} — {error_body}") | |
| pulse_data = { | |
| "updated": now_iso, | |
| "week_of": (now_utc - timedelta(days=now_utc.weekday())).strftime("%Y-%m-%d"), | |
| "headline": "Weekly Update", | |
| "subtitle": "Check back for this week's briefing.", | |
| "briefing": "", | |
| "sources": [] | |
| } | |
| except Exception as e: | |
| print(f"Claude API call failed: {e}") | |
| pulse_data = { | |
| "updated": now_iso, | |
| "week_of": (now_utc - timedelta(days=now_utc.weekday())).strftime("%Y-%m-%d"), | |
| "headline": "Weekly Update", | |
| "subtitle": "Check back for this week's briefing.", | |
| "briefing": "", | |
| "sources": [] | |
| } | |
| # Archive previous pulse before overwriting | |
| try: | |
| with open("assets/data/pulse.json", "r") as f: | |
| old_pulse = json.load(f) | |
| if old_pulse.get("briefing", "").strip(): | |
| archive_path = f"assets/data/history/pulse-before-{pulse_data['week_of']}.json" | |
| os.makedirs("assets/data/history", exist_ok=True) | |
| with open(archive_path, "w") as f: | |
| json.dump(old_pulse, f, indent=2) | |
| print(f"Archived previous pulse to {archive_path}") | |
| except FileNotFoundError: | |
| print("No existing pulse.json to archive") | |
| except Exception as e: | |
| print(f"Pulse archive failed (non-fatal): {e}") | |
| with open("assets/data/pulse.json", "w") as f: | |
| json.dump(pulse_data, f, indent=2) | |
| print("Wrote pulse.json") | |
| # ── 6. Update sitemap.xml homepage lastmod ── | |
| today = now_utc.strftime("%Y-%m-%d") | |
| try: | |
| with open("sitemap.xml", "r") as f: | |
| sitemap = f.read() | |
| import re as re2 | |
| sitemap = re2.sub( | |
| r'(<loc>https://fx\.land/</loc>\s*<lastmod>)\d{4}-\d{2}-\d{2}(</lastmod>)', | |
| rf'\g<1>{today}\2', | |
| sitemap | |
| ) | |
| with open("sitemap.xml", "w") as f: | |
| f.write(sitemap) | |
| print(f"Updated sitemap.xml homepage lastmod to {today}") | |
| except Exception as e: | |
| print(f"Sitemap update failed: {e}") | |
| print("Done!") | |
| PYEOF | |
| - name: Commit and push changes | |
| run: | | |
| git config user.name "github-actions[bot]" | |
| git config user.email "github-actions[bot]@users.noreply.github.com" | |
| git add assets/data/network.json assets/data/pulse.json assets/data/history/ sitemap.xml | |
| if git diff --staged --quiet; then | |
| echo "No changes to commit" | |
| else | |
| git commit -m "chore: weekly auto-update ($(date -u +%Y-%m-%d))" | |
| git push | |
| fi |