From e7ca3481a6bd83a104dae82340131de54f44cb59 Mon Sep 17 00:00:00 2001 From: louisdevzz Date: Thu, 5 Mar 2026 00:27:50 +0700 Subject: [PATCH 1/2] chore(github): add .githooks and .github configuration from zeroclaw - Add pre-commit and pre-push git hooks - Add GitHub workflows, issue templates, and CI/CD configs - Add CODEOWNERS with louisdevzz as owner and reviewer - Add security, release, and connectivity policies --- .githooks/pre-commit | 8 + .githooks/pre-push | 53 ++ .github/CODEOWNERS | 32 + .github/ISSUE_TEMPLATE/bug_report.yml | 148 ++++ .github/ISSUE_TEMPLATE/config.yml | 17 + .github/ISSUE_TEMPLATE/feature_request.yml | 107 +++ .github/actionlint.yaml | 7 + .github/codeql/codeql-config.yml | 8 + .github/connectivity/probe-contract.json | 70 ++ .github/connectivity/providers.json | 77 ++ .github/dependabot.yml | 52 ++ .github/label-policy.json | 21 + .github/labeler.yml | 147 ++++ .github/pull_request_template.md | 117 +++ .github/release/canary-policy.json | 39 + .github/release/docs-deploy-policy.json | 10 + .github/release/ghcr-tag-policy.json | 18 + .../release/ghcr-vulnerability-policy.json | 17 + .github/release/nightly-owner-routing.json | 9 + .github/release/prerelease-stage-gates.json | 33 + .../release/release-artifact-contract.json | 30 + .github/security/deny-ignore-governance.json | 26 + .../gitleaks-allowlist-governance.json | 56 ++ .github/security/unsafe-audit-governance.json | 5 + .github/workflows/README.md | 36 + .github/workflows/ci-build-fast.yml | 63 ++ .github/workflows/ci-canary-gate.yml | 329 +++++++ .github/workflows/ci-change-audit.yml | 154 ++++ .../workflows/ci-provider-connectivity.yml | 112 +++ .github/workflows/ci-reproducible-build.yml | 121 +++ .github/workflows/ci-rollback.yml | 257 ++++++ .github/workflows/ci-run.yml | 446 ++++++++++ .../workflows/ci-supply-chain-provenance.yml | 110 +++ .github/workflows/deploy-web.yml | 56 ++ .github/workflows/docs-deploy.yml | 291 +++++++ .github/workflows/feature-matrix.yml | 382 +++++++++ .github/workflows/main-branch-flow.md | 266 ++++++ .github/workflows/nightly-all-features.yml | 187 ++++ .github/workflows/pages-deploy.yml | 64 ++ .github/workflows/pr-auto-response.yml | 89 ++ .github/workflows/pr-check-stale.yml | 49 ++ .github/workflows/pr-check-status.yml | 36 + .github/workflows/pr-intake-checks.yml | 37 + .github/workflows/pr-label-policy-check.yml | 80 ++ .github/workflows/pr-labeler.yml | 56 ++ .github/workflows/pub-docker-img.yml | 352 ++++++++ .github/workflows/pub-prerelease.yml | 259 ++++++ .github/workflows/pub-release.yml | 645 ++++++++++++++ .../scripts/ci_human_review_guard.js | 61 ++ .../scripts/ci_license_file_owner_guard.js | 54 ++ .../scripts/ci_workflow_owner_approval.js | 83 ++ .github/workflows/scripts/lint_feedback.js | 90 ++ .../pr_auto_response_contributor_tier.js | 132 +++ .../pr_auto_response_labeled_routes.js | 94 ++ .../scripts/pr_check_status_nudge.js | 161 ++++ .github/workflows/scripts/pr_intake_checks.js | 202 +++++ .github/workflows/scripts/pr_labeler.js | 805 ++++++++++++++++++ .../scripts/test_benchmarks_pr_comment.js | 57 ++ .github/workflows/sec-audit.yml | 597 +++++++++++++ .github/workflows/sec-codeql.yml | 72 ++ .github/workflows/sec-vorpal-reviewdog.yml | 191 +++++ .github/workflows/sync-contributors.yml | 116 +++ .github/workflows/test-benchmarks.yml | 53 ++ .github/workflows/test-e2e.yml | 33 + .github/workflows/test-fuzz.yml | 75 ++ .github/workflows/workflow-sanity.yml | 106 +++ 66 files changed, 8566 insertions(+) create mode 100755 .githooks/pre-commit create mode 100755 .githooks/pre-push create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 .github/actionlint.yaml create mode 100644 .github/codeql/codeql-config.yml create mode 100644 .github/connectivity/probe-contract.json create mode 100644 .github/connectivity/providers.json create mode 100644 .github/dependabot.yml create mode 100644 .github/label-policy.json create mode 100644 .github/labeler.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/release/canary-policy.json create mode 100644 .github/release/docs-deploy-policy.json create mode 100644 .github/release/ghcr-tag-policy.json create mode 100644 .github/release/ghcr-vulnerability-policy.json create mode 100644 .github/release/nightly-owner-routing.json create mode 100644 .github/release/prerelease-stage-gates.json create mode 100644 .github/release/release-artifact-contract.json create mode 100644 .github/security/deny-ignore-governance.json create mode 100644 .github/security/gitleaks-allowlist-governance.json create mode 100644 .github/security/unsafe-audit-governance.json create mode 100644 .github/workflows/README.md create mode 100644 .github/workflows/ci-build-fast.yml create mode 100644 .github/workflows/ci-canary-gate.yml create mode 100644 .github/workflows/ci-change-audit.yml create mode 100644 .github/workflows/ci-provider-connectivity.yml create mode 100644 .github/workflows/ci-reproducible-build.yml create mode 100644 .github/workflows/ci-rollback.yml create mode 100644 .github/workflows/ci-run.yml create mode 100644 .github/workflows/ci-supply-chain-provenance.yml create mode 100644 .github/workflows/deploy-web.yml create mode 100644 .github/workflows/docs-deploy.yml create mode 100644 .github/workflows/feature-matrix.yml create mode 100644 .github/workflows/main-branch-flow.md create mode 100644 .github/workflows/nightly-all-features.yml create mode 100644 .github/workflows/pages-deploy.yml create mode 100644 .github/workflows/pr-auto-response.yml create mode 100644 .github/workflows/pr-check-stale.yml create mode 100644 .github/workflows/pr-check-status.yml create mode 100644 .github/workflows/pr-intake-checks.yml create mode 100644 .github/workflows/pr-label-policy-check.yml create mode 100644 .github/workflows/pr-labeler.yml create mode 100644 .github/workflows/pub-docker-img.yml create mode 100644 .github/workflows/pub-prerelease.yml create mode 100644 .github/workflows/pub-release.yml create mode 100644 .github/workflows/scripts/ci_human_review_guard.js create mode 100644 .github/workflows/scripts/ci_license_file_owner_guard.js create mode 100644 .github/workflows/scripts/ci_workflow_owner_approval.js create mode 100644 .github/workflows/scripts/lint_feedback.js create mode 100644 .github/workflows/scripts/pr_auto_response_contributor_tier.js create mode 100644 .github/workflows/scripts/pr_auto_response_labeled_routes.js create mode 100644 .github/workflows/scripts/pr_check_status_nudge.js create mode 100644 .github/workflows/scripts/pr_intake_checks.js create mode 100644 .github/workflows/scripts/pr_labeler.js create mode 100644 .github/workflows/scripts/test_benchmarks_pr_comment.js create mode 100644 .github/workflows/sec-audit.yml create mode 100644 .github/workflows/sec-codeql.yml create mode 100644 .github/workflows/sec-vorpal-reviewdog.yml create mode 100644 .github/workflows/sync-contributors.yml create mode 100644 .github/workflows/test-benchmarks.yml create mode 100644 .github/workflows/test-e2e.yml create mode 100644 .github/workflows/test-fuzz.yml create mode 100644 .github/workflows/workflow-sanity.yml diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 0000000..d162ba3 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +set -euo pipefail + +if command -v gitleaks >/dev/null 2>&1; then + gitleaks protect --staged --redact +else + echo "warning: gitleaks not found; skipping staged secret scan" >&2 +fi diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 0000000..f69e1cb --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# +# pre-push hook — runs fmt, clippy, and tests before every push. +# Install: git config core.hooksPath .githooks +# Skip: git push --no-verify + +set -euo pipefail + +echo "==> pre-push: running rust quality gate..." +./scripts/ci/rust_quality_gate.sh || { + echo "FAIL: rust quality gate failed." + exit 1 +} + +if [ "${ZEROCLAW_STRICT_LINT:-0}" = "1" ]; then + echo "==> pre-push: running strict clippy warnings gate (ZEROCLAW_STRICT_LINT=1)..." + ./scripts/ci/rust_quality_gate.sh --strict || { + echo "FAIL: strict clippy warnings gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_STRICT_DELTA_LINT:-0}" = "1" ]; then + echo "==> pre-push: running strict delta lint gate (ZEROCLAW_STRICT_DELTA_LINT=1)..." + ./scripts/ci/rust_strict_delta_gate.sh || { + echo "FAIL: strict delta lint gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_DOCS_LINT:-0}" = "1" ]; then + echo "==> pre-push: running docs quality gate (ZEROCLAW_DOCS_LINT=1)..." + ./scripts/ci/docs_quality_gate.sh || { + echo "FAIL: docs quality gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_DOCS_LINKS:-0}" = "1" ]; then + echo "==> pre-push: running docs links gate (ZEROCLAW_DOCS_LINKS=1)..." + ./scripts/ci/docs_links_gate.sh || { + echo "FAIL: docs links gate reported issues." + exit 1 + } +fi + +echo "==> pre-push: running tests..." +cargo test --locked || { + echo "FAIL: some tests did not pass." + exit 1 +} + +echo "==> pre-push: all checks passed." diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..cdb9b92 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,32 @@ +# Default owner for all files +* @louisdevzz + +# Important functional modules +/src/agent/** @louisdevzz +/src/providers/** @louisdevzz +/src/channels/** @louisdevzz +/src/tools/** @louisdevzz +/src/gateway/** @louisdevzz +/src/runtime/** @louisdevzz +/src/memory/** @louisdevzz +/Cargo.toml @louisdevzz +/Cargo.lock @louisdevzz + +# Security / tests / CI-CD ownership +/src/security/** @louisdevzz +/tests/** @louisdevzz +/.github/** @louisdevzz +/.github/workflows/** @louisdevzz +/.github/codeql/** @louisdevzz +/.github/dependabot.yml @louisdevzz +/SECURITY.md @louisdevzz +/docs/actions-source-policy.md @louisdevzz +/docs/ci-map.md @louisdevzz + +# Docs & governance +/docs/** @louisdevzz +/AGENTS.md @louisdevzz +/CLAUDE.md @louisdevzz +/CONTRIBUTING.md @louisdevzz +/docs/pr-workflow.md @louisdevzz +/docs/reviewer-playbook.md @louisdevzz diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000..8ac7419 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,148 @@ +name: Bug Report +description: Report a reproducible defect in ZeroClaw +title: "[Bug]: " +labels: + - bug +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to report a bug. + Please provide a minimal reproducible case so maintainers can triage quickly. + Do not include personal/sensitive data; redact and anonymize all logs/payloads. + + - type: input + id: summary + attributes: + label: Summary + description: One-line description of the problem. + placeholder: zeroclaw daemon exits immediately when ... + validations: + required: true + + - type: dropdown + id: component + attributes: + label: Affected component + options: + - runtime/daemon + - provider + - channel + - memory + - security/sandbox + - tooling/ci + - docs + - unknown + validations: + required: true + + - type: dropdown + id: severity + attributes: + label: Severity + options: + - S0 - data loss / security risk + - S1 - workflow blocked + - S2 - degraded behavior + - S3 - minor issue + validations: + required: true + + - type: textarea + id: current + attributes: + label: Current behavior + description: What is happening now? + placeholder: The process exits with ... + validations: + required: true + + - type: textarea + id: expected + attributes: + label: Expected behavior + description: What should happen instead? + placeholder: The daemon should stay alive and ... + validations: + required: true + + - type: textarea + id: reproduce + attributes: + label: Steps to reproduce + description: Please provide exact commands/config. + placeholder: | + 1. zeroclaw onboard --interactive + 2. zeroclaw daemon + 3. Observe crash in logs + render: bash + validations: + required: true + + - type: textarea + id: impact + attributes: + label: Impact + description: Who is affected, how often, and practical consequences. + placeholder: | + Affected users: ... + Frequency: always/intermittent + Consequence: ... + validations: + required: true + + - type: textarea + id: logs + attributes: + label: Logs / stack traces + description: Paste relevant logs (redact secrets, personal identifiers, and sensitive data). + render: text + validations: + required: false + + - type: input + id: version + attributes: + label: ZeroClaw version + placeholder: v0.1.0 / commit SHA + validations: + required: true + + - type: input + id: rust + attributes: + label: Rust version + placeholder: rustc 1.xx.x + validations: + required: true + + - type: input + id: os + attributes: + label: Operating system + placeholder: Ubuntu 24.04 / macOS 15 / Windows 11 + validations: + required: true + + - type: dropdown + id: regression + attributes: + label: Regression? + options: + - Unknown + - Yes, it worked before + - No, first-time setup + validations: + required: true + + - type: checkboxes + id: checks + attributes: + label: Pre-flight checks + options: + - label: I reproduced this on the latest main branch or latest release. + required: true + - label: I redacted secrets/tokens from logs. + required: true + - label: I removed personal identifiers and replaced identity-specific data with neutral placeholders. + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..4de85aa --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,17 @@ +blank_issues_enabled: false +contact_links: + - name: Security vulnerability report + url: https://github.com/zeroclaw-labs/zeroclaw/security/policy + about: Please report security vulnerabilities privately via SECURITY.md policy. + - name: Private vulnerability report template + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.md + about: Use this template when filing a private vulnerability report in Security Advisories. + - name: 私密漏洞报告模板(中文) + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.zh-CN.md + about: 使用该中文模板通过 Security Advisories 进行私密漏洞提交。 + - name: Contribution guide + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/CONTRIBUTING.md + about: Please read contribution and PR requirements before opening an issue. + - name: PR workflow & reviewer expectations + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/pr-workflow.md + about: Read risk-based PR tracks, CI gates, and merge criteria before filing feature requests. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..25fa32b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,107 @@ +name: Feature Request +description: Propose an improvement or new capability +title: "[Feature]: " +labels: + - enhancement +body: + - type: markdown + attributes: + value: | + Thanks for sharing your idea. + Please focus on user value, constraints, and rollout safety. + Do not include personal/sensitive data; use neutral project-scoped placeholders. + + - type: input + id: summary + attributes: + label: Summary + description: One-line statement of the requested capability. + placeholder: Add a provider-level retry budget override for long-running channels. + validations: + required: true + + - type: textarea + id: problem + attributes: + label: Problem statement + description: What user pain does this solve and why is current behavior insufficient? + placeholder: Teams operating in unstable networks cannot tune retries per provider... + validations: + required: true + + - type: textarea + id: proposal + attributes: + label: Proposed solution + description: Describe preferred behavior and interfaces. + placeholder: Add `[provider.retry]` config and enforce bounds in config validation. + validations: + required: true + + - type: textarea + id: non_goals + attributes: + label: Non-goals / out of scope + description: Clarify what should not be included in the first iteration. + placeholder: No UI changes, no cross-provider dynamic adaptation in v1. + validations: + required: true + + - type: textarea + id: alternatives + attributes: + label: Alternatives considered + description: What alternatives did you evaluate? + placeholder: Keep current behavior, use wrapper scripts, etc. + validations: + required: false + + - type: textarea + id: acceptance + attributes: + label: Acceptance criteria + description: What outcomes would make this request complete? + placeholder: | + - Config key is documented and validated + - Runtime path uses configured retry budget + - Regression tests cover fallback and invalid config + validations: + required: true + + - type: textarea + id: architecture + attributes: + label: Architecture impact + description: Which subsystem(s) are affected? + placeholder: providers/, channels/, memory/, runtime/, security/, docs/ ... + validations: + required: true + + - type: textarea + id: risk + attributes: + label: Risk and rollback + description: Main risk + how to disable/revert quickly. + placeholder: Risk is ... rollback is ... + validations: + required: true + + - type: dropdown + id: breaking + attributes: + label: Breaking change? + options: + - "No" + - "Yes" + validations: + required: true + + - type: checkboxes + id: hygiene + attributes: + label: Data hygiene checks + options: + - label: I removed personal/sensitive data from examples, payloads, and logs. + required: true + - label: I used neutral, project-focused wording and placeholders. + required: true diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 0000000..3c46a6f --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,7 @@ +self-hosted-runner: + labels: + - blacksmith-2vcpu-ubuntu-2404 + - aws-india + - hetzner + - Linux + - X64 diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml new file mode 100644 index 0000000..5c82c1b --- /dev/null +++ b/.github/codeql/codeql-config.yml @@ -0,0 +1,8 @@ +# CodeQL configuration for ZeroClaw +# +# We intentionally ignore integration tests under `tests/` because they often +# contain security-focused fixtures (example secrets, malformed payloads, etc.) +# that can trigger false positives in security queries. + +paths-ignore: + - tests/** diff --git a/.github/connectivity/probe-contract.json b/.github/connectivity/probe-contract.json new file mode 100644 index 0000000..4c6b3a2 --- /dev/null +++ b/.github/connectivity/probe-contract.json @@ -0,0 +1,70 @@ +{ + "version": 1, + "description": "Provider/model connectivity probe contract for scheduled CI checks.", + "consecutive_transient_failures_to_escalate": 2, + "providers": [ + { + "name": "OpenAI", + "provider": "openai", + "required": true, + "secret_env": "OPENAI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Primary reference provider; validates baseline OpenAI-compatible path." + }, + { + "name": "Anthropic", + "provider": "anthropic", + "required": true, + "secret_env": "ANTHROPIC_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Checks non-OpenAI provider fetch path and account health." + }, + { + "name": "Gemini", + "provider": "gemini", + "required": true, + "secret_env": "GEMINI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Validates Google model discovery endpoint availability." + }, + { + "name": "OpenRouter", + "provider": "openrouter", + "required": true, + "secret_env": "OPENROUTER_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Routes across many providers; signal for aggregator-side health." + }, + { + "name": "Qwen", + "provider": "qwen", + "required": false, + "secret_env": "DASHSCOPE_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Regional provider check; optional for global deployments." + }, + { + "name": "NVIDIA NIM", + "provider": "nvidia", + "required": false, + "secret_env": "NVIDIA_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Optional ecosystem endpoint check." + }, + { + "name": "OpenAI Codex", + "provider": "openai-codex", + "required": false, + "secret_env": "OPENAI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Uses OpenAI-compatible models endpoint to verify Codex-profile discovery path." + } + ] +} diff --git a/.github/connectivity/providers.json b/.github/connectivity/providers.json new file mode 100644 index 0000000..559a064 --- /dev/null +++ b/.github/connectivity/providers.json @@ -0,0 +1,77 @@ +{ + "global_timeout_seconds": 8, + "providers": [ + { + "id": "openrouter", + "url": "https://openrouter.ai/api/v1/models", + "method": "GET", + "critical": true + }, + { + "id": "openai", + "url": "https://api.openai.com/v1/models", + "method": "GET", + "critical": true + }, + { + "id": "anthropic", + "url": "https://api.anthropic.com/v1/messages", + "method": "POST", + "critical": true + }, + { + "id": "groq", + "url": "https://api.groq.com/openai/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "deepseek", + "url": "https://api.deepseek.com/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "moonshot", + "url": "https://api.moonshot.ai/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "qwen", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "zai", + "url": "https://api.z.ai/api/paas/v4/models", + "method": "GET", + "critical": false + }, + { + "id": "glm", + "url": "https://open.bigmodel.cn/api/paas/v4/models", + "method": "GET", + "critical": false + }, + { + "id": "together", + "url": "https://api.together.xyz/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "fireworks", + "url": "https://api.fireworks.ai/inference/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "cohere", + "url": "https://api.cohere.com/v1/models", + "method": "GET", + "critical": false + } + ] +} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..eb81c96 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,52 @@ +version: 2 + +updates: + - package-ecosystem: cargo + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 3 + labels: + - "dependencies" + groups: + rust-all: + patterns: + - "*" + update-types: + - minor + - patch + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 1 + labels: + - "ci" + - "dependencies" + groups: + actions-all: + patterns: + - "*" + update-types: + - minor + - patch + + - package-ecosystem: docker + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 1 + labels: + - "ci" + - "dependencies" + groups: + docker-all: + patterns: + - "*" + update-types: + - minor + - patch diff --git a/.github/label-policy.json b/.github/label-policy.json new file mode 100644 index 0000000..e8b254f --- /dev/null +++ b/.github/label-policy.json @@ -0,0 +1,21 @@ +{ + "contributor_tier_color": "2ED9FF", + "contributor_tiers": [ + { + "label": "distinguished contributor", + "min_merged_prs": 50 + }, + { + "label": "principal contributor", + "min_merged_prs": 20 + }, + { + "label": "experienced contributor", + "min_merged_prs": 10 + }, + { + "label": "trusted contributor", + "min_merged_prs": 5 + } + ] +} diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..21e851f --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,147 @@ +"docs": + - changed-files: + - any-glob-to-any-file: + - "docs/**" + - "**/*.md" + - "**/*.mdx" + - "LICENSE" + - ".markdownlint-cli2.yaml" + +"dependencies": + - changed-files: + - any-glob-to-any-file: + - "Cargo.toml" + - "Cargo.lock" + - "deny.toml" + - ".github/dependabot.yml" + +"ci": + - changed-files: + - any-glob-to-any-file: + - ".github/**" + - ".githooks/**" + +"core": + - changed-files: + - any-glob-to-any-file: + - "src/*.rs" + +"agent": + - changed-files: + - any-glob-to-any-file: + - "src/agent/**" + +"channel": + - changed-files: + - any-glob-to-any-file: + - "src/channels/**" + +"gateway": + - changed-files: + - any-glob-to-any-file: + - "src/gateway/**" + +"config": + - changed-files: + - any-glob-to-any-file: + - "src/config/**" + +"cron": + - changed-files: + - any-glob-to-any-file: + - "src/cron/**" + +"daemon": + - changed-files: + - any-glob-to-any-file: + - "src/daemon/**" + +"doctor": + - changed-files: + - any-glob-to-any-file: + - "src/doctor/**" + +"health": + - changed-files: + - any-glob-to-any-file: + - "src/health/**" + +"heartbeat": + - changed-files: + - any-glob-to-any-file: + - "src/heartbeat/**" + +"integration": + - changed-files: + - any-glob-to-any-file: + - "src/integrations/**" + +"memory": + - changed-files: + - any-glob-to-any-file: + - "src/memory/**" + +"security": + - changed-files: + - any-glob-to-any-file: + - "src/security/**" + +"runtime": + - changed-files: + - any-glob-to-any-file: + - "src/runtime/**" + +"onboard": + - changed-files: + - any-glob-to-any-file: + - "src/onboard/**" + +"provider": + - changed-files: + - any-glob-to-any-file: + - "src/providers/**" + +"service": + - changed-files: + - any-glob-to-any-file: + - "src/service/**" + +"skillforge": + - changed-files: + - any-glob-to-any-file: + - "src/skillforge/**" + +"skills": + - changed-files: + - any-glob-to-any-file: + - "src/skills/**" + +"tool": + - changed-files: + - any-glob-to-any-file: + - "src/tools/**" + +"tunnel": + - changed-files: + - any-glob-to-any-file: + - "src/tunnel/**" + +"observability": + - changed-files: + - any-glob-to-any-file: + - "src/observability/**" + +"tests": + - changed-files: + - any-glob-to-any-file: + - "tests/**" + +"scripts": + - changed-files: + - any-glob-to-any-file: + - "scripts/**" + +"dev": + - changed-files: + - any-glob-to-any-file: + - "dev/**" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..fe3cd6f --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,117 @@ +## Summary + +Describe this PR in 2-5 bullets: + +- Base branch target (`main` by default; use `dev` only when maintainers explicitly request integration batching): +- Problem: +- Why it matters: +- What changed: +- What did **not** change (scope boundary): + +## Label Snapshot (required) + +- Risk label (`risk: low|medium|high`): +- Size label (`size: XS|S|M|L|XL`, auto-managed/read-only): +- Scope labels (`core|agent|channel|config|cron|daemon|doctor|gateway|health|heartbeat|integration|memory|observability|onboard|provider|runtime|security|service|skillforge|skills|tool|tunnel|docs|dependencies|ci|tests|scripts|dev`, comma-separated): +- Module labels (`: `, for example `channel: telegram`, `provider: kimi`, `tool: shell`): +- Contributor tier label (`trusted contributor|experienced contributor|principal contributor|distinguished contributor`, auto-managed/read-only; author merged PRs >=5/10/20/50): +- If any auto-label is incorrect, note requested correction: + +## Change Metadata + +- Change type (`bug|feature|refactor|docs|security|chore`): +- Primary scope (`runtime|provider|channel|memory|security|ci|docs|multi`): + +## Linked Issue + +- Closes # +- Related # +- Depends on # (if stacked) +- Existing overlapping PR(s) reviewed for this issue (list `# by @` or `N/A`): +- Supersedes # (if replacing older PR) +- Linear issue key(s) (required, e.g. `RMN-123`): +- Linear issue URL(s): + +## Supersede Attribution (required when `Supersedes #` is used) + +- Superseded PRs + authors (`# by @`, one per line): +- Integrated scope by source PR (what was materially carried forward): +- `Co-authored-by` trailers added for materially incorporated contributors? (`Yes/No`) +- If `No`, explain why (for example: inspiration-only, no direct code/design carry-over): +- Trailer format check (separate lines, no escaped `\n`): (`Pass/Fail`) + +## Validation Evidence (required) + +Commands and result summary: + +```bash +cargo fmt --all -- --check +cargo clippy --all-targets -- -D warnings +cargo test +``` + +- Evidence provided (test/log/trace/screenshot/perf): +- If any command is intentionally skipped, explain why: + +## Security Impact (required) + +- New permissions/capabilities? (`Yes/No`) +- New external network calls? (`Yes/No`) +- Secrets/tokens handling changed? (`Yes/No`) +- File system access scope changed? (`Yes/No`) +- If any `Yes`, describe risk and mitigation: + +## Privacy and Data Hygiene (required) + +- Data-hygiene status (`pass|needs-follow-up`): +- Redaction/anonymization notes: +- Neutral wording confirmation (use ZeroClaw/project-native labels if identity-like wording is needed): + +## Compatibility / Migration + +- Backward compatible? (`Yes/No`) +- Config/env changes? (`Yes/No`) +- Migration needed? (`Yes/No`) +- If yes, exact upgrade steps: + +## i18n Follow-Through (required when docs or user-facing wording changes) + +- i18n follow-through triggered? (`Yes/No`) +- If `Yes`, locale navigation parity updated in `README*`, `docs/README*`, and `docs/SUMMARY.md` for supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`)? (`Yes/No`) +- If `Yes`, localized runtime-contract docs updated where equivalents exist (minimum for `fr`/`vi`: `commands-reference`, `config-reference`, `troubleshooting`)? (`Yes/No/N.A.`) +- If `Yes`, Vietnamese canonical docs under `docs/i18n/vi/**` synced and compatibility shims under `docs/*.vi.md` validated? (`Yes/No/N.A.`) +- If any `No`/`N.A.`, link follow-up issue/PR and explain scope decision: + +## Human Verification (required) + +What was personally validated beyond CI: + +- Verified scenarios: +- Edge cases checked: +- What was not verified: + +## Side Effects / Blast Radius (required) + +- Affected subsystems/workflows: +- Potential unintended effects: +- Guardrails/monitoring for early detection: + +## Agent Collaboration Notes (recommended) + +- Agent tools used (if any): +- Workflow/plan summary (if any): +- Verification focus: +- Confirmation: naming + architecture boundaries followed (`AGENTS.md` + `CONTRIBUTING.md`): + +## Rollback Plan (required) + +- Fast rollback command/path: +- Feature flags or config toggles (if any): +- Observable failure symptoms: + +## Risks and Mitigations + +List real risks in this PR (or write `None`). + +- Risk: + - Mitigation: diff --git a/.github/release/canary-policy.json b/.github/release/canary-policy.json new file mode 100644 index 0000000..e032311 --- /dev/null +++ b/.github/release/canary-policy.json @@ -0,0 +1,39 @@ +{ + "schema_version": "zeroclaw.canary-policy.v1", + "release_channel": "stable", + "observation_window_minutes": 60, + "minimum_sample_size": 500, + "cohorts": [ + { + "name": "canary-5pct", + "traffic_percent": 5, + "duration_minutes": 20 + }, + { + "name": "canary-20pct", + "traffic_percent": 20, + "duration_minutes": 20 + }, + { + "name": "canary-50pct", + "traffic_percent": 50, + "duration_minutes": 20 + }, + { + "name": "canary-100pct", + "traffic_percent": 100, + "duration_minutes": 60 + } + ], + "observability_signals": [ + "error_rate", + "crash_rate", + "p95_latency_ms", + "sample_size" + ], + "thresholds": { + "max_error_rate": 0.02, + "max_crash_rate": 0.01, + "max_p95_latency_ms": 1200 + } +} diff --git a/.github/release/docs-deploy-policy.json b/.github/release/docs-deploy-policy.json new file mode 100644 index 0000000..ba8db88 --- /dev/null +++ b/.github/release/docs-deploy-policy.json @@ -0,0 +1,10 @@ +{ + "schema_version": "zeroclaw.docs-deploy-policy.v1", + "production_branch": "main", + "allow_manual_production_dispatch": true, + "require_preview_evidence_on_manual_production": true, + "allow_manual_rollback_dispatch": true, + "rollback_ref_must_be_ancestor_of_production_branch": true, + "docs_preview_retention_days": 14, + "docs_guard_artifact_retention_days": 21 +} diff --git a/.github/release/ghcr-tag-policy.json b/.github/release/ghcr-tag-policy.json new file mode 100644 index 0000000..bbac3ff --- /dev/null +++ b/.github/release/ghcr-tag-policy.json @@ -0,0 +1,18 @@ +{ + "schema_version": "zeroclaw.ghcr-tag-policy.v1", + "release_tag_regex": "^v[0-9]+\\.[0-9]+\\.[0-9]+$", + "sha_tag_prefix": "sha-", + "sha_tag_length": 12, + "latest_tag": "latest", + "require_latest_on_release": true, + "immutable_tag_classes": [ + "release", + "sha" + ], + "rollback_priority": [ + "sha", + "release" + ], + "contract_artifact_retention_days": 21, + "scan_artifact_retention_days": 14 +} diff --git a/.github/release/ghcr-vulnerability-policy.json b/.github/release/ghcr-vulnerability-policy.json new file mode 100644 index 0000000..64209b0 --- /dev/null +++ b/.github/release/ghcr-vulnerability-policy.json @@ -0,0 +1,17 @@ +{ + "schema_version": "zeroclaw.ghcr-vulnerability-policy.v1", + "required_tag_classes": [ + "release", + "sha", + "latest" + ], + "blocking_severities": [ + "HIGH", + "CRITICAL" + ], + "max_blocking_findings_per_tag": 0, + "require_blocking_count_parity": true, + "require_artifact_id_parity": true, + "scan_artifact_retention_days": 14, + "audit_artifact_retention_days": 21 +} diff --git a/.github/release/nightly-owner-routing.json b/.github/release/nightly-owner-routing.json new file mode 100644 index 0000000..236f74c --- /dev/null +++ b/.github/release/nightly-owner-routing.json @@ -0,0 +1,9 @@ +{ + "schema_version": "zeroclaw.nightly-owner-routing.v1", + "owners": { + "default": "@chumyin", + "whatsapp-web": "@chumyin", + "browser-native": "@chumyin", + "nightly-all-features": "@chumyin" + } +} diff --git a/.github/release/prerelease-stage-gates.json b/.github/release/prerelease-stage-gates.json new file mode 100644 index 0000000..e2614ae --- /dev/null +++ b/.github/release/prerelease-stage-gates.json @@ -0,0 +1,33 @@ +{ + "schema_version": "zeroclaw.prerelease-stage-gates.v1", + "stage_order": ["alpha", "beta", "rc", "stable"], + "required_previous_stage": { + "beta": "alpha", + "rc": "beta", + "stable": "rc" + }, + "required_checks": { + "alpha": [ + "CI Required Gate", + "Security Audit" + ], + "beta": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary" + ], + "rc": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary", + "Nightly Summary & Routing" + ], + "stable": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary", + "Verify Artifact Set", + "Nightly Summary & Routing" + ] + } +} diff --git a/.github/release/release-artifact-contract.json b/.github/release/release-artifact-contract.json new file mode 100644 index 0000000..1459588 --- /dev/null +++ b/.github/release/release-artifact-contract.json @@ -0,0 +1,30 @@ +{ + "schema_version": "zeroclaw.release-artifact-contract.v1", + "release_archive_patterns": [ + "zeroclaw-x86_64-unknown-linux-gnu.tar.gz", + "zeroclaw-x86_64-unknown-linux-musl.tar.gz", + "zeroclaw-aarch64-unknown-linux-gnu.tar.gz", + "zeroclaw-aarch64-unknown-linux-musl.tar.gz", + "zeroclaw-armv7-unknown-linux-gnueabihf.tar.gz", + "zeroclaw-armv7-linux-androideabi.tar.gz", + "zeroclaw-aarch64-linux-android.tar.gz", + "zeroclaw-x86_64-unknown-freebsd.tar.gz", + "zeroclaw-x86_64-apple-darwin.tar.gz", + "zeroclaw-aarch64-apple-darwin.tar.gz", + "zeroclaw-x86_64-pc-windows-msvc.zip" + ], + "required_manifest_files": [ + "release-manifest.json", + "release-manifest.md", + "SHA256SUMS" + ], + "required_sbom_files": [ + "zeroclaw.cdx.json", + "zeroclaw.spdx.json" + ], + "required_notice_files": [ + "LICENSE-APACHE", + "LICENSE-MIT", + "NOTICE" + ] +} diff --git a/.github/security/deny-ignore-governance.json b/.github/security/deny-ignore-governance.json new file mode 100644 index 0000000..d959274 --- /dev/null +++ b/.github/security/deny-ignore-governance.json @@ -0,0 +1,26 @@ +{ + "schema_version": "zeroclaw.deny-governance.v1", + "advisories": [ + { + "id": "RUSTSEC-2025-0141", + "owner": "repo-maintainers", + "reason": "Transitive via probe-rs in current release path; tracked for replacement when probe-rs updates.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + }, + { + "id": "RUSTSEC-2024-0384", + "owner": "repo-maintainers", + "reason": "Upstream rust-nostr advisory mitigation is still in progress; monitor until released fix lands.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + }, + { + "id": "RUSTSEC-2024-0388", + "owner": "repo-maintainers", + "reason": "Transitive via matrix-sdk indexeddb dependency chain in current matrix release line; track removal when upstream drops derivative.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + } + ] +} diff --git a/.github/security/gitleaks-allowlist-governance.json b/.github/security/gitleaks-allowlist-governance.json new file mode 100644 index 0000000..4ec7714 --- /dev/null +++ b/.github/security/gitleaks-allowlist-governance.json @@ -0,0 +1,56 @@ +{ + "schema_version": "zeroclaw.secrets-governance.v1", + "paths": [ + { + "pattern": "src/security/leak_detector\\.rs", + "owner": "repo-maintainers", + "reason": "Fixture patterns are intentionally embedded for regression tests in leak detector logic.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "src/agent/loop_\\.rs", + "owner": "repo-maintainers", + "reason": "Contains escaped template snippets used for command orchestration and parser coverage.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "src/security/secrets\\.rs", + "owner": "repo-maintainers", + "reason": "Contains detector test vectors and redaction examples required for secret scanning tests.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "docs/(i18n/vi/|vi/)?zai-glm-setup\\.md", + "owner": "repo-maintainers", + "reason": "Documentation contains literal environment variable placeholders for onboarding commands.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "\\.github/workflows/pub-release\\.yml", + "owner": "repo-maintainers", + "reason": "Release workflow emits masked authorization header examples during registry smoke checks.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + } + ], + "regexes": [ + { + "pattern": "Authorization: Bearer \\$\\{[^}]+\\}", + "owner": "repo-maintainers", + "reason": "Intentional placeholder used in docs/workflow snippets for safe header examples.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "curl -sS -o /tmp/ghcr-release-manifest\\.json -w \"%\\{http_code\\}\"", + "owner": "repo-maintainers", + "reason": "Release smoke command string is non-secret telemetry and should not be flagged as credential leakage.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + } + ] +} diff --git a/.github/security/unsafe-audit-governance.json b/.github/security/unsafe-audit-governance.json new file mode 100644 index 0000000..e8edb6c --- /dev/null +++ b/.github/security/unsafe-audit-governance.json @@ -0,0 +1,5 @@ +{ + "schema_version": "zeroclaw.unsafe-audit-governance.v1", + "ignore_paths": [], + "ignore_pattern_ids": [] +} diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000..fe3b3d8 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,36 @@ +# Workflow Directory Layout + +GitHub Actions only loads workflow entry files from: + +- `.github/workflows/*.yml` +- `.github/workflows/*.yaml` + +Subdirectories are not valid locations for workflow entry files. + +Repository convention: + +1. Keep runnable workflow entry files at `.github/workflows/` root. +2. Keep workflow-only helper scripts under `.github/workflows/scripts/`. +3. Keep cross-tooling/local CI scripts under `scripts/ci/` when they are used outside Actions. + +Workflow behavior documentation in this directory: + +- `.github/workflows/main-branch-flow.md` + +Current workflow helper scripts: + +- `.github/workflows/scripts/ci_workflow_owner_approval.js` +- `.github/workflows/scripts/ci_license_file_owner_guard.js` +- `.github/workflows/scripts/lint_feedback.js` +- `.github/workflows/scripts/pr_auto_response_contributor_tier.js` +- `.github/workflows/scripts/pr_auto_response_labeled_routes.js` +- `.github/workflows/scripts/pr_check_status_nudge.js` +- `.github/workflows/scripts/pr_intake_checks.js` +- `.github/workflows/scripts/pr_labeler.js` +- `.github/workflows/scripts/test_benchmarks_pr_comment.js` + +Release/CI policy assets introduced for advanced delivery lanes: + +- `.github/release/nightly-owner-routing.json` +- `.github/release/canary-policy.json` +- `.github/release/prerelease-stage-gates.json` diff --git a/.github/workflows/ci-build-fast.yml b/.github/workflows/ci-build-fast.yml new file mode 100644 index 0000000..a9cab2b --- /dev/null +++ b/.github/workflows/ci-build-fast.yml @@ -0,0 +1,63 @@ +name: CI Build (Fast) + +# Optional fast release build that runs alongside the normal Build (Smoke) job. +# This workflow is informational and does not gate merges. + +on: + push: + branches: [dev, main] + pull_request: + branches: [dev, main] + +concurrency: + group: ci-fast-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + changes: + name: Detect Change Scope + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + rust_changed: ${{ steps.scope.outputs.rust_changed }} + docs_only: ${{ steps.scope.outputs.docs_only }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + - name: Detect docs-only changes + id: scope + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} + run: ./scripts/ci/detect_change_scope.sh + + build-fast: + name: Build (Fast) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: fast-build + cache-targets: true + + - name: Build release binary + run: cargo build --release --locked --verbose diff --git a/.github/workflows/ci-canary-gate.yml b/.github/workflows/ci-canary-gate.yml new file mode 100644 index 0000000..9fcfdff --- /dev/null +++ b/.github/workflows/ci-canary-gate.yml @@ -0,0 +1,329 @@ +name: CI Canary Gate + +on: + workflow_dispatch: + inputs: + mode: + description: "dry-run computes decision only; execute enables canary dispatch" + required: true + default: dry-run + type: choice + options: + - dry-run + - execute + candidate_tag: + description: "Candidate release tag (e.g. v0.1.8-rc.1 or v0.1.8)" + required: false + default: "" + type: string + candidate_sha: + description: "Optional explicit candidate SHA" + required: false + default: "" + type: string + error_rate: + description: "Observed canary error rate (0.0-1.0)" + required: true + default: "0.0" + type: string + crash_rate: + description: "Observed canary crash rate (0.0-1.0)" + required: true + default: "0.0" + type: string + p95_latency_ms: + description: "Observed canary p95 latency in milliseconds" + required: true + default: "0" + type: string + sample_size: + description: "Observed canary sample size" + required: true + default: "0" + type: string + emit_repository_dispatch: + description: "Emit canary decision repository_dispatch event" + required: true + default: false + type: boolean + trigger_rollback_on_abort: + description: "Automatically dispatch CI Rollback Guard when canary decision is abort" + required: true + default: true + type: boolean + rollback_branch: + description: "Rollback integration branch used by CI Rollback Guard dispatch" + required: true + default: dev + type: choice + options: + - dev + - main + rollback_target_ref: + description: "Optional explicit rollback target ref passed to CI Rollback Guard" + required: false + default: "" + type: string + fail_on_violation: + description: "Fail on policy violations" + required: true + default: true + type: boolean + schedule: + - cron: "45 7 * * 1" # Weekly Monday 07:45 UTC + +concurrency: + group: canary-gate-${{ github.event.inputs.candidate_tag || github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: read + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + canary-plan: + name: Canary Plan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + mode: ${{ steps.inputs.outputs.mode }} + candidate_tag: ${{ steps.inputs.outputs.candidate_tag }} + candidate_sha: ${{ steps.inputs.outputs.candidate_sha }} + trigger_rollback_on_abort: ${{ steps.inputs.outputs.trigger_rollback_on_abort }} + rollback_branch: ${{ steps.inputs.outputs.rollback_branch }} + rollback_target_ref: ${{ steps.inputs.outputs.rollback_target_ref }} + decision: ${{ steps.extract.outputs.decision }} + ready_to_execute: ${{ steps.extract.outputs.ready_to_execute }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve canary inputs + id: inputs + shell: bash + run: | + set -euo pipefail + + mode="dry-run" + candidate_tag="" + candidate_sha="" + error_rate="0.0" + crash_rate="0.0" + p95_latency_ms="0" + sample_size="0" + trigger_rollback_on_abort="true" + rollback_branch="dev" + rollback_target_ref="" + fail_on_violation="true" + + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + mode="${{ github.event.inputs.mode || 'dry-run' }}" + candidate_tag="${{ github.event.inputs.candidate_tag || '' }}" + candidate_sha="${{ github.event.inputs.candidate_sha || '' }}" + error_rate="${{ github.event.inputs.error_rate || '0.0' }}" + crash_rate="${{ github.event.inputs.crash_rate || '0.0' }}" + p95_latency_ms="${{ github.event.inputs.p95_latency_ms || '0' }}" + sample_size="${{ github.event.inputs.sample_size || '0' }}" + trigger_rollback_on_abort="${{ github.event.inputs.trigger_rollback_on_abort || 'true' }}" + rollback_branch="${{ github.event.inputs.rollback_branch || 'dev' }}" + rollback_target_ref="${{ github.event.inputs.rollback_target_ref || '' }}" + fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}" + else + git fetch --tags --force origin + candidate_tag="$(git tag --list 'v*' --sort=-version:refname | head -n1)" + if [ -n "$candidate_tag" ]; then + candidate_sha="$(git rev-parse "${candidate_tag}^{commit}")" + fi + fi + + { + echo "mode=${mode}" + echo "candidate_tag=${candidate_tag}" + echo "candidate_sha=${candidate_sha}" + echo "error_rate=${error_rate}" + echo "crash_rate=${crash_rate}" + echo "p95_latency_ms=${p95_latency_ms}" + echo "sample_size=${sample_size}" + echo "trigger_rollback_on_abort=${trigger_rollback_on_abort}" + echo "rollback_branch=${rollback_branch}" + echo "rollback_target_ref=${rollback_target_ref}" + echo "fail_on_violation=${fail_on_violation}" + } >> "$GITHUB_OUTPUT" + + - name: Run canary guard + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + args=() + if [ "${{ steps.inputs.outputs.fail_on_violation }}" = "true" ]; then + args+=(--fail-on-violation) + fi + + python3 scripts/ci/canary_guard.py \ + --policy-file .github/release/canary-policy.json \ + --candidate-tag "${{ steps.inputs.outputs.candidate_tag }}" \ + --candidate-sha "${{ steps.inputs.outputs.candidate_sha }}" \ + --mode "${{ steps.inputs.outputs.mode }}" \ + --error-rate "${{ steps.inputs.outputs.error_rate }}" \ + --crash-rate "${{ steps.inputs.outputs.crash_rate }}" \ + --p95-latency-ms "${{ steps.inputs.outputs.p95_latency_ms }}" \ + --sample-size "${{ steps.inputs.outputs.sample_size }}" \ + --output-json artifacts/canary-guard.json \ + --output-md artifacts/canary-guard.md \ + "${args[@]}" + + - name: Extract canary decision outputs + id: extract + shell: bash + run: | + set -euo pipefail + decision="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/canary-guard.json', encoding='utf-8')) + print(data.get('decision', 'hold')) + PY + )" + ready_to_execute="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/canary-guard.json', encoding='utf-8')) + print(str(bool(data.get('ready_to_execute', False))).lower()) + PY + )" + echo "decision=${decision}" >> "$GITHUB_OUTPUT" + echo "ready_to_execute=${ready_to_execute}" >> "$GITHUB_OUTPUT" + + - name: Emit canary audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type canary_guard \ + --input-json artifacts/canary-guard.json \ + --output-json artifacts/audit-event-canary-guard.json \ + --artifact-name canary-guard \ + --retention-days 21 + + - name: Publish canary summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/canary-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload canary artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: canary-guard + path: | + artifacts/canary-guard.json + artifacts/canary-guard.md + artifacts/audit-event-canary-guard.json + if-no-files-found: error + retention-days: 21 + + canary-execute: + name: Canary Execute + needs: [canary-plan] + if: github.event_name == 'workflow_dispatch' && needs.canary-plan.outputs.mode == 'execute' && needs.canary-plan.outputs.ready_to_execute == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + permissions: + contents: write + actions: write + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Create canary marker tag + shell: bash + run: | + set -euo pipefail + marker_tag="canary-${{ needs.canary-plan.outputs.candidate_tag }}-${{ github.run_id }}" + git fetch --tags --force origin + git tag -a "$marker_tag" "${{ needs.canary-plan.outputs.candidate_sha }}" -m "Canary decision marker from run ${{ github.run_id }}" + git push origin "$marker_tag" + echo "Created marker tag: $marker_tag" >> "$GITHUB_STEP_SUMMARY" + + - name: Emit canary repository dispatch + if: github.event.inputs.emit_repository_dispatch == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + await github.rest.repos.createDispatchEvent({ + owner: context.repo.owner, + repo: context.repo.repo, + event_type: `canary_${{ needs.canary-plan.outputs.decision }}`, + client_payload: { + candidate_tag: "${{ needs.canary-plan.outputs.candidate_tag }}", + candidate_sha: "${{ needs.canary-plan.outputs.candidate_sha }}", + decision: "${{ needs.canary-plan.outputs.decision }}", + run_id: context.runId, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + source_sha: context.sha + } + }); + + - name: Trigger rollback guard workflow on abort + if: needs.canary-plan.outputs.decision == 'abort' && needs.canary-plan.outputs.trigger_rollback_on_abort == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const rollbackBranch = "${{ needs.canary-plan.outputs.rollback_branch }}" || "dev"; + const rollbackTargetRef = `${{ needs.canary-plan.outputs.rollback_target_ref }}`.trim(); + const workflowRef = process.env.GITHUB_REF_NAME || "dev"; + + const inputs = { + branch: rollbackBranch, + mode: "execute", + allow_non_ancestor: "false", + fail_on_violation: "true", + create_marker_tag: "true", + emit_repository_dispatch: "true", + }; + + if (rollbackTargetRef.length > 0) { + inputs.target_ref = rollbackTargetRef; + } + + await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: "ci-rollback.yml", + ref: workflowRef, + inputs, + }); + + - name: Publish rollback trigger summary + if: needs.canary-plan.outputs.decision == 'abort' + shell: bash + run: | + set -euo pipefail + if [ "${{ needs.canary-plan.outputs.trigger_rollback_on_abort }}" = "true" ]; then + { + echo "### Canary Abort Rollback Trigger" + echo "- CI Rollback Guard dispatch: triggered" + echo "- Rollback branch: \`${{ needs.canary-plan.outputs.rollback_branch }}\`" + if [ -n "${{ needs.canary-plan.outputs.rollback_target_ref }}" ]; then + echo "- Rollback target ref: \`${{ needs.canary-plan.outputs.rollback_target_ref }}\`" + else + echo "- Rollback target ref: _auto (latest release tag strategy)_" + fi + } >> "$GITHUB_STEP_SUMMARY" + else + { + echo "### Canary Abort Rollback Trigger" + echo "- CI Rollback Guard dispatch: skipped (trigger_rollback_on_abort=false)" + } >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/ci-change-audit.yml b/.github/workflows/ci-change-audit.yml new file mode 100644 index 0000000..1fa4970 --- /dev/null +++ b/.github/workflows/ci-change-audit.yml @@ -0,0 +1,154 @@ +name: CI/CD Change Audit + +on: + pull_request: + branches: [dev, main] + paths: + - ".github/workflows/**" + - ".github/release/**" + - ".github/codeql/**" + - "scripts/ci/**" + - ".github/dependabot.yml" + - "deny.toml" + - ".gitleaks.toml" + push: + branches: [dev, main] + paths: + - ".github/workflows/**" + - ".github/release/**" + - ".github/codeql/**" + - "scripts/ci/**" + - ".github/dependabot.yml" + - "deny.toml" + - ".gitleaks.toml" + workflow_dispatch: + inputs: + base_sha: + description: "Optional base SHA (default: HEAD~1)" + required: false + default: "" + type: string + fail_on_policy: + description: "Fail when audit policy violations are found" + required: true + default: true + type: boolean + +concurrency: + group: ci-change-audit-${{ github.event.pull_request.number || github.sha || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + audit: + name: CI Change Audit + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve base/head commits + id: refs + shell: bash + run: | + set -euo pipefail + head_sha="$(git rev-parse HEAD)" + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + # For pull_request events, checkout uses refs/pull/*/merge; HEAD^1 is the + # effective base commit for this synthesized merge and avoids stale base.sha. + if git rev-parse --verify HEAD^1 >/dev/null 2>&1; then + base_sha="$(git rev-parse HEAD^1)" + else + base_sha="${{ github.event.pull_request.base.sha }}" + fi + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + else + base_sha="${{ github.event.inputs.base_sha || '' }}" + if [ -z "$base_sha" ]; then + base_sha="$(git rev-parse HEAD~1)" + fi + fi + echo "base_sha=$base_sha" >> "$GITHUB_OUTPUT" + echo "head_sha=$head_sha" >> "$GITHUB_OUTPUT" + + - name: Run CI helper script unit tests + shell: bash + run: | + set -euo pipefail + python3 -m unittest discover -s scripts/ci/tests -p 'test_*.py' -v + + - name: Generate CI change audit + shell: bash + env: + BASE_SHA: ${{ steps.refs.outputs.base_sha }} + HEAD_SHA: ${{ steps.refs.outputs.head_sha }} + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_policy="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_policy="${{ github.event.inputs.fail_on_policy || 'true' }}" + fi + cmd=(python3 scripts/ci/ci_change_audit.py + --base-sha "$BASE_SHA" + --head-sha "$HEAD_SHA" + --output-json artifacts/ci-change-audit.json + --output-md artifacts/ci-change-audit.md) + if [ "$fail_on_policy" = "true" ]; then + cmd+=(--fail-on-violations) + fi + "${cmd[@]}" + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ci-change-audit.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ci_change_audit \ + --input-json artifacts/ci-change-audit.json \ + --output-json artifacts/audit-event-ci-change-audit.json \ + --artifact-name ci-change-audit-event \ + --retention-days 14 + fi + + - name: Upload audit artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ci-change-audit + path: artifacts/ci-change-audit.* + retention-days: 14 + + - name: Publish audit summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ci-change-audit.md ]; then + cat artifacts/ci-change-audit.md >> "$GITHUB_STEP_SUMMARY" + else + echo "CI change audit report was not generated." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload audit event artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ci-change-audit-event + path: artifacts/audit-event-ci-change-audit.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/ci-provider-connectivity.yml b/.github/workflows/ci-provider-connectivity.yml new file mode 100644 index 0000000..701f923 --- /dev/null +++ b/.github/workflows/ci-provider-connectivity.yml @@ -0,0 +1,112 @@ +name: CI Provider Connectivity + +on: + schedule: + - cron: "30 */6 * * *" # Every 6 hours + workflow_dispatch: + inputs: + fail_on_critical: + description: "Fail run when critical endpoints are unreachable" + required: true + default: false + type: boolean + pull_request: + branches: [dev, main] + paths: + - ".github/workflows/ci-provider-connectivity.yml" + - ".github/connectivity/providers.json" + - "scripts/ci/provider_connectivity_matrix.py" + push: + branches: [dev, main] + paths: + - ".github/workflows/ci-provider-connectivity.yml" + - ".github/connectivity/providers.json" + - "scripts/ci/provider_connectivity_matrix.py" + +concurrency: + group: provider-connectivity-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + probe: + name: Provider Connectivity Probe + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Run connectivity matrix probe + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_critical="false" + case "${GITHUB_EVENT_NAME}" in + schedule) + fail_on_critical="true" + ;; + workflow_dispatch) + fail_on_critical="${{ github.event.inputs.fail_on_critical || 'false' }}" + ;; + esac + + cmd=(python3 scripts/ci/provider_connectivity_matrix.py + --config .github/connectivity/providers.json + --output-json artifacts/provider-connectivity-matrix.json + --output-md artifacts/provider-connectivity-matrix.md) + if [ "$fail_on_critical" = "true" ]; then + cmd+=(--fail-on-critical) + fi + "${cmd[@]}" + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/provider-connectivity-matrix.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type provider_connectivity \ + --input-json artifacts/provider-connectivity-matrix.json \ + --output-json artifacts/audit-event-provider-connectivity.json \ + --artifact-name provider-connectivity-audit-event \ + --retention-days 14 + fi + + - name: Upload connectivity artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: provider-connectivity-matrix + path: artifacts/provider-connectivity-matrix.* + retention-days: 14 + + - name: Publish summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/provider-connectivity-matrix.md ]; then + cat artifacts/provider-connectivity-matrix.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Provider connectivity report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload audit event artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: provider-connectivity-audit-event + path: artifacts/audit-event-provider-connectivity.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/ci-reproducible-build.yml b/.github/workflows/ci-reproducible-build.yml new file mode 100644 index 0000000..9deb0d6 --- /dev/null +++ b/.github/workflows/ci-reproducible-build.yml @@ -0,0 +1,121 @@ +name: CI Reproducible Build + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/reproducible_build_check.sh" + - ".github/workflows/ci-reproducible-build.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/reproducible_build_check.sh" + - ".github/workflows/ci-reproducible-build.yml" + schedule: + - cron: "45 5 * * 1" # Weekly Monday 05:45 UTC + workflow_dispatch: + inputs: + fail_on_drift: + description: "Fail workflow if deterministic hash drift is detected" + required: true + default: true + type: boolean + allow_build_id_drift: + description: "Treat GNU build-id-only drift as non-blocking" + required: true + default: true + type: boolean + +concurrency: + group: repro-build-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + reproducibility: + name: Reproducible Build Probe + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Run reproducible build check + shell: bash + run: | + set -euo pipefail + fail_on_drift="false" + allow_build_id_drift="true" + if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then + fail_on_drift="true" + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_drift="${{ github.event.inputs.fail_on_drift || 'true' }}" + allow_build_id_drift="${{ github.event.inputs.allow_build_id_drift || 'true' }}" + fi + FAIL_ON_DRIFT="$fail_on_drift" \ + ALLOW_BUILD_ID_DRIFT="$allow_build_id_drift" \ + OUTPUT_DIR="artifacts" \ + ./scripts/ci/reproducible_build_check.sh + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/reproducible-build.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type reproducible_build \ + --input-json artifacts/reproducible-build.json \ + --output-json artifacts/audit-event-reproducible-build.json \ + --artifact-name reproducible-build-audit-event \ + --retention-days 14 + fi + + - name: Upload reproducibility artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: reproducible-build + path: artifacts/reproducible-build* + retention-days: 14 + + - name: Upload audit event artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: reproducible-build-audit-event + path: artifacts/audit-event-reproducible-build.json + if-no-files-found: ignore + retention-days: 14 + + - name: Publish summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/reproducible-build.md ]; then + cat artifacts/reproducible-build.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Reproducible build report missing." >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/ci-rollback.yml b/.github/workflows/ci-rollback.yml new file mode 100644 index 0000000..cc6cde2 --- /dev/null +++ b/.github/workflows/ci-rollback.yml @@ -0,0 +1,257 @@ +name: CI Rollback Guard + +on: + workflow_dispatch: + inputs: + branch: + description: "Integration branch this rollback targets" + required: true + default: dev + type: choice + options: + - dev + - main + mode: + description: "dry-run only plans; execute enables rollback marker/dispatch actions" + required: true + default: dry-run + type: choice + options: + - dry-run + - execute + target_ref: + description: "Optional explicit rollback target (tag/sha/ref). Empty = latest matching tag." + required: false + default: "" + type: string + allow_non_ancestor: + description: "Allow target not being ancestor of current head (warning-only)" + required: true + default: false + type: boolean + fail_on_violation: + description: "Fail workflow when guard violations are detected" + required: true + default: true + type: boolean + create_marker_tag: + description: "In execute mode, create and push rollback marker tag" + required: true + default: false + type: boolean + emit_repository_dispatch: + description: "In execute mode, emit repository_dispatch event `rollback_execute`" + required: true + default: false + type: boolean + schedule: + - cron: "15 7 * * 1" # Weekly Monday 07:15 UTC + +concurrency: + group: ci-rollback-${{ github.event.inputs.branch || 'dev' }} + cancel-in-progress: false + +permissions: + contents: read + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + rollback-plan: + name: Rollback Guard Plan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + branch: ${{ steps.plan.outputs.branch }} + mode: ${{ steps.plan.outputs.mode }} + target_sha: ${{ steps.plan.outputs.target_sha }} + target_ref: ${{ steps.plan.outputs.target_ref }} + ready_to_execute: ${{ steps.plan.outputs.ready_to_execute }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + ref: ${{ github.event.inputs.branch || 'dev' }} + + - name: Build rollback plan + id: plan + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + branch_input="dev" + mode_input="dry-run" + target_ref_input="" + allow_non_ancestor="false" + fail_on_violation="true" + + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + branch_input="${{ github.event.inputs.branch || 'dev' }}" + mode_input="${{ github.event.inputs.mode || 'dry-run' }}" + target_ref_input="${{ github.event.inputs.target_ref || '' }}" + allow_non_ancestor="${{ github.event.inputs.allow_non_ancestor || 'false' }}" + fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}" + fi + + cmd=(python3 scripts/ci/rollback_guard.py + --repo-root . + --branch "$branch_input" + --mode "$mode_input" + --strategy latest-release-tag + --tag-pattern "v*" + --output-json artifacts/rollback-plan.json + --output-md artifacts/rollback-plan.md) + + if [ -n "$target_ref_input" ]; then + cmd+=(--target-ref "$target_ref_input") + fi + if [ "$allow_non_ancestor" = "true" ]; then + cmd+=(--allow-non-ancestor) + fi + if [ "$fail_on_violation" = "true" ]; then + cmd+=(--fail-on-violation) + fi + + "${cmd[@]}" + + target_sha="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(d.get("target_sha", "")) + PY + )" + target_ref="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(d.get("target_ref", "")) + PY + )" + ready_to_execute="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(str(d.get("ready_to_execute", False)).lower()) + PY + )" + + { + echo "branch=$branch_input" + echo "mode=$mode_input" + echo "target_sha=$target_sha" + echo "target_ref=$target_ref" + echo "ready_to_execute=$ready_to_execute" + } >> "$GITHUB_OUTPUT" + + - name: Emit rollback audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/rollback-plan.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type rollback_guard \ + --input-json artifacts/rollback-plan.json \ + --output-json artifacts/audit-event-rollback-guard.json \ + --artifact-name ci-rollback-plan \ + --retention-days 21 + fi + + - name: Upload rollback artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ci-rollback-plan + path: | + artifacts/rollback-plan.* + artifacts/audit-event-rollback-guard.json + if-no-files-found: ignore + retention-days: 21 + + - name: Publish rollback summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/rollback-plan.md ]; then + cat artifacts/rollback-plan.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Rollback plan markdown report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + rollback-execute: + name: Rollback Execute Actions + needs: [rollback-plan] + if: github.event_name == 'workflow_dispatch' && needs.rollback-plan.outputs.mode == 'execute' && needs.rollback-plan.outputs.ready_to_execute == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + permissions: + contents: write + actions: read + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + ref: ${{ needs.rollback-plan.outputs.branch }} + + - name: Fetch tags + shell: bash + run: | + set -euo pipefail + git fetch --tags --force origin + + - name: Create rollback marker tag + id: marker + if: github.event.inputs.create_marker_tag == 'true' + shell: bash + run: | + set -euo pipefail + target_sha="${{ needs.rollback-plan.outputs.target_sha }}" + if [ -z "$target_sha" ]; then + echo "Rollback guard did not resolve target_sha." + exit 1 + fi + marker_tag="rollback-${{ needs.rollback-plan.outputs.branch }}-${{ github.run_id }}" + git tag -a "$marker_tag" "$target_sha" -m "Rollback marker from run ${{ github.run_id }}" + git push origin "$marker_tag" + echo "marker_tag=$marker_tag" >> "$GITHUB_OUTPUT" + + - name: Emit rollback repository dispatch + if: github.event.inputs.emit_repository_dispatch == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + await github.rest.repos.createDispatchEvent({ + owner: context.repo.owner, + repo: context.repo.repo, + event_type: "rollback_execute", + client_payload: { + branch: "${{ needs.rollback-plan.outputs.branch }}", + target_ref: "${{ needs.rollback-plan.outputs.target_ref }}", + target_sha: "${{ needs.rollback-plan.outputs.target_sha }}", + run_id: context.runId, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + source_sha: context.sha + } + }); + + - name: Publish execute summary + if: always() + shell: bash + run: | + set -euo pipefail + { + echo "### Rollback Execute Actions" + echo "- Branch: \`${{ needs.rollback-plan.outputs.branch }}\`" + echo "- Target ref: \`${{ needs.rollback-plan.outputs.target_ref }}\`" + echo "- Target sha: \`${{ needs.rollback-plan.outputs.target_sha }}\`" + if [ -n "${{ steps.marker.outputs.marker_tag || '' }}" ]; then + echo "- Marker tag: \`${{ steps.marker.outputs.marker_tag }}\`" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/ci-run.yml b/.github/workflows/ci-run.yml new file mode 100644 index 0000000..fd74bf4 --- /dev/null +++ b/.github/workflows/ci-run.yml @@ -0,0 +1,446 @@ +name: CI Run + +on: + push: + branches: [dev, main] + pull_request: + branches: [dev, main] + merge_group: + branches: [dev, main] + +concurrency: + group: ci-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + changes: + name: Detect Change Scope + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + docs_only: ${{ steps.scope.outputs.docs_only }} + docs_changed: ${{ steps.scope.outputs.docs_changed }} + rust_changed: ${{ steps.scope.outputs.rust_changed }} + workflow_changed: ${{ steps.scope.outputs.workflow_changed }} + docs_files: ${{ steps.scope.outputs.docs_files }} + base_sha: ${{ steps.scope.outputs.base_sha }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Detect docs-only changes + id: scope + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event_name == 'merge_group' && github.event.merge_group.base_sha || github.event.before }} + run: ./scripts/ci/detect_change_scope.sh + + lint: + name: Lint Gate (Format + Clippy + Strict Delta) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + components: rustfmt, clippy + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-lint + - name: Run rust quality gate + run: ./scripts/ci/rust_quality_gate.sh + - name: Run strict lint delta gate + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + run: ./scripts/ci/rust_strict_delta_gate.sh + + test: + name: Test + needs: [changes, lint] + if: needs.changes.outputs.rust_changed == 'true' && needs.lint.result == 'success' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-test + - name: Run tests + run: cargo test --locked --verbose + + build: + name: Build (Smoke) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-build + cache-targets: true + - name: Build binary (smoke check) + run: cargo build --profile release-fast --locked --verbose + - name: Check binary size + run: bash scripts/ci/check_binary_size.sh target/release-fast/zeroclaw + + flake-probe: + name: Test Flake Retry Probe + needs: [changes, lint, test] + if: always() && needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full')) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-flake-probe + - name: Probe flaky failure via single retry + shell: bash + env: + INITIAL_TEST_RESULT: ${{ needs.test.result }} + BLOCK_ON_FLAKE: ${{ vars.CI_BLOCK_ON_FLAKE_SUSPECTED || 'false' }} + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/flake_retry_probe.py \ + --initial-result "${INITIAL_TEST_RESULT}" \ + --retry-command "cargo test --locked --verbose" \ + --output-json artifacts/flake-probe.json \ + --output-md artifacts/flake-probe.md \ + --block-on-flake "${BLOCK_ON_FLAKE}" + - name: Publish flake probe summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/flake-probe.md ]; then + cat artifacts/flake-probe.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Flake probe report missing." >> "$GITHUB_STEP_SUMMARY" + fi + - name: Upload flake probe artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: test-flake-probe + path: artifacts/flake-probe.* + if-no-files-found: ignore + retention-days: 14 + + docs-only: + name: Docs-Only Fast Path + needs: [changes] + if: needs.changes.outputs.docs_only == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Skip heavy jobs for docs-only change + run: echo "Docs-only change detected. Rust lint/test/build skipped." + + non-rust: + name: Non-Rust Fast Path + needs: [changes] + if: needs.changes.outputs.docs_only != 'true' && needs.changes.outputs.rust_changed != 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Skip Rust jobs for non-Rust change scope + run: echo "No Rust-impacting files changed. Rust lint/test/build skipped." + + docs-quality: + name: Docs Quality + needs: [changes] + if: needs.changes.outputs.docs_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Markdown lint (changed lines only) + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + DOCS_FILES: ${{ needs.changes.outputs.docs_files }} + run: ./scripts/ci/docs_quality_gate.sh + + - name: Collect added links + id: collect_links + shell: bash + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + DOCS_FILES: ${{ needs.changes.outputs.docs_files }} + run: | + set -euo pipefail + python3 ./scripts/ci/collect_changed_links.py \ + --base "$BASE_SHA" \ + --docs-files "$DOCS_FILES" \ + --output .ci-added-links.txt + count=$(wc -l < .ci-added-links.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + if [ "$count" -gt 0 ]; then + echo "Added links queued for check:" + cat .ci-added-links.txt + else + echo "No added links found in changed docs lines." + fi + + - name: Link check (offline, added links only) + if: steps.collect_links.outputs.count != '0' + uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2 + with: + fail: true + args: >- + --offline + --no-progress + --format detailed + .ci-added-links.txt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Skip link check (no added links) + if: steps.collect_links.outputs.count == '0' + run: echo "No added links in changed docs lines. Link check skipped." + + lint-feedback: + name: Lint Feedback + if: github.event_name == 'pull_request' + needs: [changes, lint, docs-quality] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: write + issues: write + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Post actionable lint failure summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + RUST_CHANGED: ${{ needs.changes.outputs.rust_changed }} + DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }} + LINT_RESULT: ${{ needs.lint.result }} + LINT_DELTA_RESULT: ${{ needs.lint.result }} + DOCS_RESULT: ${{ needs.docs-quality.result }} + with: + script: | + const script = require('./.github/workflows/scripts/lint_feedback.js'); + await script({github, context, core}); + + workflow-owner-approval: + name: Workflow Owner Approval + needs: [changes] + if: github.event_name == 'pull_request' && needs.changes.outputs.workflow_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Require owner approval for workflow file changes + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_OWNER_LOGINS: ${{ vars.WORKFLOW_OWNER_LOGINS }} + with: + script: | + const script = require('./.github/workflows/scripts/ci_workflow_owner_approval.js'); + await script({ github, context, core }); + + human-review-approval: + name: Human Review Approval + needs: [changes] + if: github.event_name == 'pull_request' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + + - name: Require at least one human approving review + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + HUMAN_REVIEW_BOT_LOGINS: ${{ vars.HUMAN_REVIEW_BOT_LOGINS }} + with: + script: | + const script = require('./.github/workflows/scripts/ci_human_review_guard.js'); + await script({ github, context, core }); + + license-file-owner-guard: + name: License File Owner Guard + needs: [changes] + if: github.event_name == 'pull_request' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Enforce owner-only edits for root license files + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/ci_license_file_owner_guard.js'); + await script({ github, context, core }); + ci-required: + name: CI Required Gate + if: always() + needs: [changes, lint, test, build, flake-probe, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval, human-review-approval, license-file-owner-guard] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Enforce required status + shell: bash + run: | + set -euo pipefail + + event_name="${{ github.event_name }}" + rust_changed="${{ needs.changes.outputs.rust_changed }}" + docs_changed="${{ needs.changes.outputs.docs_changed }}" + workflow_changed="${{ needs.changes.outputs.workflow_changed }}" + docs_result="${{ needs.docs-quality.result }}" + workflow_owner_result="${{ needs.workflow-owner-approval.result }}" + human_review_result="${{ needs.human-review-approval.result }}" + license_owner_result="${{ needs.license-file-owner-guard.result }}" + + if [ "${{ needs.changes.outputs.docs_only }}" = "true" ]; then + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Docs-only change detected, but docs-quality did not pass." + exit 1 + fi + echo "Docs-only fast path passed." + exit 0 + fi + + if [ "$rust_changed" != "true" ]; then + echo "rust_changed=false (non-rust fast path)" + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Non-rust change touched docs, but docs-quality did not pass." + exit 1 + fi + echo "Non-rust fast path passed." + exit 0 + fi + + lint_result="${{ needs.lint.result }}" + lint_strict_delta_result="${{ needs.lint.result }}" + test_result="${{ needs.test.result }}" + build_result="${{ needs.build.result }}" + flake_result="${{ needs.flake-probe.result }}" + + echo "lint=${lint_result}" + echo "lint_strict_delta=${lint_strict_delta_result}" + echo "test=${test_result}" + echo "build=${build_result}" + echo "flake_probe=${flake_result}" + echo "docs=${docs_result}" + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ]; then + if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then + echo "Required PR CI jobs did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "PR changed docs, but docs-quality did not pass." + exit 1 + fi + echo "PR required checks passed." + exit 0 + fi + + if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then + echo "Required push CI jobs did not pass." + exit 1 + fi + + if [ "$flake_result" != "success" ]; then + echo "Flake probe did not pass under current blocking policy." + exit 1 + fi + + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Push changed docs, but docs-quality did not pass." + exit 1 + fi + + echo "Push required checks passed." diff --git a/.github/workflows/ci-supply-chain-provenance.yml b/.github/workflows/ci-supply-chain-provenance.yml new file mode 100644 index 0000000..55eb28c --- /dev/null +++ b/.github/workflows/ci-supply-chain-provenance.yml @@ -0,0 +1,110 @@ +name: CI Supply Chain Provenance + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/generate_provenance.py" + - ".github/workflows/ci-supply-chain-provenance.yml" + workflow_dispatch: + schedule: + - cron: "20 6 * * 1" # Weekly Monday 06:20 UTC + +concurrency: + group: supply-chain-provenance-${{ github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + id-token: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + provenance: + name: Build + Provenance Bundle + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 35 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Build release-fast artifact + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + host_target="$(rustc -vV | sed -n 's/^host: //p')" + cargo build --profile release-fast --locked --target "$host_target" + cp "target/${host_target}/release-fast/zeroclaw" "artifacts/zeroclaw-${host_target}" + sha256sum "artifacts/zeroclaw-${host_target}" > "artifacts/zeroclaw-${host_target}.sha256" + + - name: Generate provenance statement + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + python3 scripts/ci/generate_provenance.py \ + --artifact "artifacts/zeroclaw-${host_target}" \ + --subject-name "zeroclaw-${host_target}" \ + --output "artifacts/provenance-${host_target}.intoto.json" + + - name: Install cosign + uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 + + - name: Sign provenance bundle + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + statement="artifacts/provenance-${host_target}.intoto.json" + cosign sign-blob --yes \ + --bundle="${statement}.sigstore.json" \ + --output-signature="${statement}.sig" \ + --output-certificate="${statement}.pem" \ + "${statement}" + + - name: Emit normalized audit event + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + python3 scripts/ci/emit_audit_event.py \ + --event-type supply_chain_provenance \ + --input-json "artifacts/provenance-${host_target}.intoto.json" \ + --output-json "artifacts/audit-event-supply-chain-provenance.json" \ + --artifact-name supply-chain-provenance \ + --retention-days 30 + + - name: Upload provenance artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: supply-chain-provenance + path: artifacts/* + retention-days: 30 + + - name: Publish summary + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + { + echo "### Supply Chain Provenance" + echo "- Target: \`${host_target}\`" + echo "- Artifact: \`artifacts/zeroclaw-${host_target}\`" + echo "- Statement: \`artifacts/provenance-${host_target}.intoto.json\`" + echo "- Signature: \`artifacts/provenance-${host_target}.intoto.json.sig\`" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/deploy-web.yml b/.github/workflows/deploy-web.yml new file mode 100644 index 0000000..8ad35e6 --- /dev/null +++ b/.github/workflows/deploy-web.yml @@ -0,0 +1,56 @@ +name: Deploy Web to GitHub Pages + +on: + push: + branches: [main] + paths: + - 'web/**' + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + build: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 + + - name: Setup Node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + + - name: Install dependencies + working-directory: ./web + run: npm ci + + - name: Build + working-directory: ./web + run: npm run build + + - name: Setup Pages + uses: actions/configure-pages@1f0c5cde4bc74cd7e1254d0cb4de8d49e9068c7d + + - name: Upload artifact + uses: actions/upload-pages-artifact@56afc609e74202658d3ffba0e8f6dda462b719fa + with: + path: ./web/dist + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml new file mode 100644 index 0000000..a344c7b --- /dev/null +++ b/.github/workflows/docs-deploy.yml @@ -0,0 +1,291 @@ +name: Docs Deploy + +on: + pull_request: + branches: [dev, main] + paths: + - "docs/**" + - "README*.md" + - ".github/workflows/docs-deploy.yml" + - "scripts/ci/docs_quality_gate.sh" + - "scripts/ci/collect_changed_links.py" + - ".github/release/docs-deploy-policy.json" + - "scripts/ci/docs_deploy_guard.py" + push: + branches: [dev, main] + paths: + - "docs/**" + - "README*.md" + - ".github/workflows/docs-deploy.yml" + - "scripts/ci/docs_quality_gate.sh" + - "scripts/ci/collect_changed_links.py" + - ".github/release/docs-deploy-policy.json" + - "scripts/ci/docs_deploy_guard.py" + workflow_dispatch: + inputs: + deploy_target: + description: "preview uploads artifact only; production deploys to Pages" + required: true + default: preview + type: choice + options: + - preview + - production + preview_evidence_run_url: + description: "Required for manual production deploys when policy enforces preview promotion evidence" + required: false + default: "" + rollback_ref: + description: "Optional rollback source ref (tag/sha/ref) for manual production dispatch" + required: false + default: "" + +concurrency: + group: docs-deploy-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + docs-quality: + name: Docs Quality Gate + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + docs_files: ${{ steps.scope.outputs.docs_files }} + base_sha: ${{ steps.scope.outputs.base_sha }} + deploy_target: ${{ steps.deploy_guard.outputs.deploy_target }} + deploy_mode: ${{ steps.deploy_guard.outputs.deploy_mode }} + source_ref: ${{ steps.deploy_guard.outputs.source_ref }} + production_branch_ref: ${{ steps.deploy_guard.outputs.production_branch_ref }} + ready_to_deploy: ${{ steps.deploy_guard.outputs.ready_to_deploy }} + docs_preview_retention_days: ${{ steps.deploy_guard.outputs.docs_preview_retention_days }} + docs_guard_artifact_retention_days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve docs diff scope + id: scope + shell: bash + run: | + set -euo pipefail + + base_sha="" + docs_files="" + + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + base_sha="${{ github.event.pull_request.base.sha }}" + docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')" + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then + docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')" + fi + else + docs_files="$(git ls-files 'docs/**/*.md' 'README*.md')" + fi + + { + echo "base_sha=${base_sha}" + echo "docs_files<> "$GITHUB_OUTPUT" + + - name: Validate docs deploy contract + id: deploy_guard + shell: bash + env: + INPUT_DEPLOY_TARGET: ${{ github.event.inputs.deploy_target || '' }} + INPUT_PREVIEW_EVIDENCE_RUN_URL: ${{ github.event.inputs.preview_evidence_run_url || '' }} + INPUT_ROLLBACK_REF: ${{ github.event.inputs.rollback_ref || '' }} + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/docs_deploy_guard.py \ + --repo-root "$PWD" \ + --event-name "${GITHUB_EVENT_NAME}" \ + --git-ref "${GITHUB_REF}" \ + --git-sha "${GITHUB_SHA}" \ + --input-deploy-target "${INPUT_DEPLOY_TARGET}" \ + --input-preview-evidence-run-url "${INPUT_PREVIEW_EVIDENCE_RUN_URL}" \ + --input-rollback-ref "${INPUT_ROLLBACK_REF}" \ + --policy-file .github/release/docs-deploy-policy.json \ + --output-json artifacts/docs-deploy-guard.json \ + --output-md artifacts/docs-deploy-guard.md \ + --github-output-file "$GITHUB_OUTPUT" \ + --fail-on-violation + + - name: Emit docs deploy guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/docs-deploy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type docs_deploy_guard \ + --input-json artifacts/docs-deploy-guard.json \ + --output-json artifacts/audit-event-docs-deploy-guard.json \ + --artifact-name docs-deploy-guard \ + --retention-days 21 + fi + + - name: Publish docs deploy guard summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/docs-deploy-guard.md ]; then + cat artifacts/docs-deploy-guard.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload docs deploy guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: docs-deploy-guard + path: | + artifacts/docs-deploy-guard.json + artifacts/docs-deploy-guard.md + artifacts/audit-event-docs-deploy-guard.json + if-no-files-found: ignore + retention-days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days || 21 }} + + - name: Markdown quality gate + env: + BASE_SHA: ${{ steps.scope.outputs.base_sha }} + DOCS_FILES: ${{ steps.scope.outputs.docs_files }} + run: ./scripts/ci/docs_quality_gate.sh + + - name: Collect added links + id: links + if: github.event_name != 'workflow_dispatch' + shell: bash + env: + BASE_SHA: ${{ steps.scope.outputs.base_sha }} + DOCS_FILES: ${{ steps.scope.outputs.docs_files }} + run: | + set -euo pipefail + python3 ./scripts/ci/collect_changed_links.py \ + --base "$BASE_SHA" \ + --docs-files "$DOCS_FILES" \ + --output .ci-added-links.txt + count=$(wc -l < .ci-added-links.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + + - name: Link check (added links) + if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count != '0' + uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2 + with: + fail: true + args: >- + --offline + --no-progress + --format detailed + .ci-added-links.txt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Skip link check (none added) + if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count == '0' + run: echo "No added links detected in changed docs lines." + + docs-preview: + name: Docs Preview Artifact + needs: [docs-quality] + if: github.event_name == 'pull_request' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_target == 'preview') + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Build preview bundle + shell: bash + run: | + set -euo pipefail + rm -rf site + mkdir -p site/docs + cp -R docs/. site/docs/ + cp README.md site/README.md + cat > site/index.md <<'EOF' + # ZeroClaw Docs Preview + + This preview bundle is produced by `.github/workflows/docs-deploy.yml`. + + - [Repository README](./README.md) + - [Docs Home](./docs/README.md) + EOF + + - name: Upload preview artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: docs-preview + path: site/** + if-no-files-found: error + retention-days: ${{ needs.docs-quality.outputs.docs_preview_retention_days || 14 }} + + docs-deploy: + name: Deploy Docs to GitHub Pages + needs: [docs-quality] + if: needs.docs-quality.outputs.deploy_target == 'production' && needs.docs-quality.outputs.ready_to_deploy == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + permissions: + contents: read + pages: write + id-token: write + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.docs-quality.outputs.source_ref }} + + - name: Build deploy bundle + shell: bash + run: | + set -euo pipefail + rm -rf site + mkdir -p site/docs + cp -R docs/. site/docs/ + cp README.md site/README.md + cat > site/index.md <<'EOF' + # ZeroClaw Documentation + + This site is deployed automatically from `main` by `.github/workflows/docs-deploy.yml`. + + - [Repository README](./README.md) + - [Docs Home](./docs/README.md) + EOF + + - name: Publish deploy source summary + shell: bash + run: | + { + echo "## Docs Deploy Source" + echo "- Deploy mode: \`${{ needs.docs-quality.outputs.deploy_mode }}\`" + echo "- Source ref: \`${{ needs.docs-quality.outputs.source_ref }}\`" + echo "- Production branch ref: \`${{ needs.docs-quality.outputs.production_branch_ref }}\`" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Setup Pages + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5 + + - name: Upload Pages artifact + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4 + with: + path: site + + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4 diff --git a/.github/workflows/feature-matrix.yml b/.github/workflows/feature-matrix.yml new file mode 100644 index 0000000..8002724 --- /dev/null +++ b/.github/workflows/feature-matrix.yml @@ -0,0 +1,382 @@ +name: Feature Matrix + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "tests/**" + - "scripts/ci/nightly_matrix_report.py" + - ".github/release/nightly-owner-routing.json" + - ".github/workflows/feature-matrix.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "tests/**" + - "scripts/ci/nightly_matrix_report.py" + - ".github/release/nightly-owner-routing.json" + - ".github/workflows/feature-matrix.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "30 4 * * 1" # Weekly Monday 04:30 UTC + - cron: "15 3 * * *" # Daily 03:15 UTC (nightly profile) + workflow_dispatch: + inputs: + profile: + description: "compile = merge-gate matrix, nightly = integration-oriented lane commands" + required: true + default: compile + type: choice + options: + - compile + - nightly + fail_on_failure: + description: "Fail summary job when any lane fails" + required: true + default: true + type: boolean + +concurrency: + group: feature-matrix-${{ github.event.pull_request.number || github.ref || github.run_id }}-${{ github.event.inputs.profile || 'auto' }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + resolve-profile: + name: Resolve Matrix Profile + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + profile: ${{ steps.resolve.outputs.profile }} + lane_job_prefix: ${{ steps.resolve.outputs.lane_job_prefix }} + summary_job_name: ${{ steps.resolve.outputs.summary_job_name }} + lane_retention_days: ${{ steps.resolve.outputs.lane_retention_days }} + lane_timeout_minutes: ${{ steps.resolve.outputs.lane_timeout_minutes }} + max_attempts: ${{ steps.resolve.outputs.max_attempts }} + summary_artifact_name: ${{ steps.resolve.outputs.summary_artifact_name }} + summary_json_name: ${{ steps.resolve.outputs.summary_json_name }} + summary_md_name: ${{ steps.resolve.outputs.summary_md_name }} + lane_artifact_prefix: ${{ steps.resolve.outputs.lane_artifact_prefix }} + fail_on_failure: ${{ steps.resolve.outputs.fail_on_failure }} + collect_history: ${{ steps.resolve.outputs.collect_history }} + steps: + - name: Resolve effective profile + id: resolve + shell: bash + run: | + set -euo pipefail + + profile="compile" + fail_on_failure="true" + lane_job_prefix="Matrix Lane" + summary_job_name="Feature Matrix Summary" + lane_retention_days="21" + lane_timeout_minutes="55" + max_attempts="1" + summary_artifact_name="feature-matrix-summary" + summary_json_name="feature-matrix-summary.json" + summary_md_name="feature-matrix-summary.md" + lane_artifact_prefix="feature-matrix" + collect_history="false" + + if [ "${GITHUB_EVENT_NAME}" = "schedule" ] && [ "${{ github.event.schedule }}" = "15 3 * * *" ]; then + profile="nightly" + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + profile="${{ github.event.inputs.profile || 'compile' }}" + fail_on_failure="${{ github.event.inputs.fail_on_failure || 'true' }}" + fi + + if [ "$profile" = "nightly" ]; then + lane_job_prefix="Nightly Lane" + summary_job_name="Nightly Summary & Routing" + lane_retention_days="30" + lane_timeout_minutes="70" + max_attempts="2" + summary_artifact_name="nightly-all-features-summary" + summary_json_name="nightly-summary.json" + summary_md_name="nightly-summary.md" + lane_artifact_prefix="nightly-lane" + collect_history="true" + fi + + { + echo "profile=${profile}" + echo "lane_job_prefix=${lane_job_prefix}" + echo "summary_job_name=${summary_job_name}" + echo "lane_retention_days=${lane_retention_days}" + echo "lane_timeout_minutes=${lane_timeout_minutes}" + echo "max_attempts=${max_attempts}" + echo "summary_artifact_name=${summary_artifact_name}" + echo "summary_json_name=${summary_json_name}" + echo "summary_md_name=${summary_md_name}" + echo "lane_artifact_prefix=${lane_artifact_prefix}" + echo "fail_on_failure=${fail_on_failure}" + echo "collect_history=${collect_history}" + } >> "$GITHUB_OUTPUT" + + feature-check: + name: ${{ needs.resolve-profile.outputs.lane_job_prefix }} (${{ matrix.name }}) + needs: [resolve-profile] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: ${{ fromJSON(needs.resolve-profile.outputs.lane_timeout_minutes) }} + strategy: + fail-fast: false + matrix: + include: + - name: default + compile_command: cargo check --locked + nightly_command: cargo test --locked --test agent_e2e --verbose + install_libudev: false + - name: whatsapp-web + compile_command: cargo check --locked --no-default-features --features whatsapp-web + nightly_command: cargo check --locked --no-default-features --features whatsapp-web --verbose + install_libudev: false + - name: browser-native + compile_command: cargo check --locked --no-default-features --features browser-native + nightly_command: cargo check --locked --no-default-features --features browser-native --verbose + install_libudev: false + - name: nightly-all-features + compile_command: cargo check --locked --all-features + nightly_command: cargo test --locked --all-features --test agent_e2e --verbose + install_libudev: true + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: feature-matrix-${{ matrix.name }} + + - name: Ensure Linux deps for all-features lane + if: matrix.install_libudev + shell: bash + run: | + set -euo pipefail + + if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then + echo "libudev development headers already available; skipping apt install." + exit 0 + fi + + echo "Installing missing libudev build dependencies..." + for attempt in 1 2 3; do + if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \ + sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then + echo "Dependency installation succeeded on attempt ${attempt}." + exit 0 + fi + if [ "$attempt" -eq 3 ]; then + echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2 + exit 1 + fi + echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..." + sleep 10 + done + + - name: Run matrix lane command + id: lane + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + profile="${{ needs.resolve-profile.outputs.profile }}" + lane_command="${{ matrix.compile_command }}" + if [ "$profile" = "nightly" ]; then + lane_command="${{ matrix.nightly_command }}" + fi + + max_attempts="${{ needs.resolve-profile.outputs.max_attempts }}" + attempt=1 + status=1 + + started_at="$(date +%s)" + while [ "$attempt" -le "$max_attempts" ]; do + echo "Running lane command (attempt ${attempt}/${max_attempts}): ${lane_command}" + set +e + bash -lc "${lane_command}" + status=$? + set -e + if [ "$status" -eq 0 ]; then + break + fi + if [ "$attempt" -lt "$max_attempts" ]; then + sleep 5 + fi + attempt="$((attempt + 1))" + done + finished_at="$(date +%s)" + duration="$((finished_at - started_at))" + + lane_status="success" + if [ "$status" -ne 0 ]; then + lane_status="failure" + fi + + cat > "artifacts/nightly-result-${{ matrix.name }}.json" <> "$GITHUB_STEP_SUMMARY" + + echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT" + echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT" + + - name: Upload lane report + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ needs.resolve-profile.outputs.lane_artifact_prefix }}-${{ matrix.name }} + path: artifacts/nightly-result-${{ matrix.name }}.json + if-no-files-found: error + retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }} + + - name: Enforce lane success + if: steps.lane.outputs.lane_status != 'success' + shell: bash + run: | + set -euo pipefail + code="${{ steps.lane.outputs.lane_exit_code }}" + if [[ "$code" =~ ^[0-9]+$ ]]; then + # shellcheck disable=SC2242 + exit "$code" + fi + echo "Invalid lane exit code: $code" >&2 + exit 1 + + summary: + name: ${{ needs.resolve-profile.outputs.summary_job_name }} + needs: [resolve-profile, feature-check] + if: always() + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Download lane reports + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Collect recent nightly history + if: needs.resolve-profile.outputs.collect_history == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require("fs"); + const path = require("path"); + + const workflowId = "feature-matrix.yml"; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const events = ["schedule", "workflow_dispatch"]; + let runs = []; + for (const event of events) { + const resp = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id: workflowId, + branch: "dev", + event, + per_page: 20, + }); + runs = runs.concat(resp.data.workflow_runs || []); + } + + const currentRunId = context.runId; + runs = runs + .filter((run) => run.id !== currentRunId && run.status === "completed") + .sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()) + .slice(0, 3) + .map((run) => ({ + run_id: run.id, + url: run.html_url, + event: run.event, + conclusion: run.conclusion || "unknown", + created_at: run.created_at, + head_sha: run.head_sha, + display_title: run.display_title || "", + })); + + fs.mkdirSync("artifacts", { recursive: true }); + fs.writeFileSync( + path.join("artifacts", "nightly-history.json"), + `${JSON.stringify(runs, null, 2)}\n`, + { encoding: "utf8" } + ); + + - name: Aggregate matrix summary + shell: bash + run: | + set -euo pipefail + args=( + --input-dir artifacts + --owners-file .github/release/nightly-owner-routing.json + --output-json "artifacts/${{ needs.resolve-profile.outputs.summary_json_name }}" + --output-md "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}" + ) + + if [ "${{ needs.resolve-profile.outputs.collect_history }}" = "true" ] && [ -f artifacts/nightly-history.json ]; then + args+=(--history-file artifacts/nightly-history.json) + fi + + if [ "${{ needs.resolve-profile.outputs.fail_on_failure }}" = "true" ]; then + args+=(--fail-on-failure) + fi + + python3 scripts/ci/nightly_matrix_report.py "${args[@]}" + + - name: Publish summary + shell: bash + run: | + set -euo pipefail + cat "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}" >> "$GITHUB_STEP_SUMMARY" + + - name: Upload summary artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ needs.resolve-profile.outputs.summary_artifact_name }} + path: | + artifacts/${{ needs.resolve-profile.outputs.summary_json_name }} + artifacts/${{ needs.resolve-profile.outputs.summary_md_name }} + artifacts/nightly-history.json + if-no-files-found: error + retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }} diff --git a/.github/workflows/main-branch-flow.md b/.github/workflows/main-branch-flow.md new file mode 100644 index 0000000..07cb147 --- /dev/null +++ b/.github/workflows/main-branch-flow.md @@ -0,0 +1,266 @@ +# Main Branch Delivery Flows + +This document explains what runs when code is proposed to `dev`/`main`, merged to `main`, and released. + +Use this with: + +- [`docs/ci-map.md`](../../docs/ci-map.md) +- [`docs/pr-workflow.md`](../../docs/pr-workflow.md) +- [`docs/release-process.md`](../../docs/release-process.md) + +## Event Summary + +| Event | Main workflows | +| --- | --- | +| PR activity (`pull_request_target`) | `pr-intake-checks.yml`, `pr-labeler.yml`, `pr-auto-response.yml` | +| PR activity (`pull_request`) | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows | +| Push to `dev`/`main` | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows | +| Tag push (`v*`) | `pub-release.yml` publish mode, `pub-docker-img.yml` publish job | +| Scheduled/manual | `pub-release.yml` verification mode, `sec-codeql.yml`, `feature-matrix.yml`, `test-fuzz.yml`, `pr-check-stale.yml`, `pr-check-status.yml`, `sync-contributors.yml`, `test-benchmarks.yml`, `test-e2e.yml` | + +## Runtime and Docker Matrix + +Observed averages below are from recent completed runs (sampled from GitHub Actions on February 17, 2026). Values are directional, not SLA. + +| Workflow | Typical trigger in main flow | Avg runtime | Docker build? | Docker run? | Docker push? | +| --- | --- | ---:| --- | --- | --- | +| `pr-intake-checks.yml` | PR open/update (`pull_request_target`) | 14.5s | No | No | No | +| `pr-labeler.yml` | PR open/update (`pull_request_target`) | 53.7s | No | No | No | +| `pr-auto-response.yml` | PR/issue automation | 24.3s | No | No | No | +| `ci-run.yml` | PR + push to `dev`/`main` | 74.7s | No | No | No | +| `sec-audit.yml` | PR + push to `dev`/`main` | 127.2s | No | No | No | +| `workflow-sanity.yml` | Workflow-file changes | 34.2s | No | No | No | +| `pr-label-policy-check.yml` | Label policy/automation changes | 14.7s | No | No | No | +| `pub-docker-img.yml` (`pull_request`) | Docker build-input PR changes | 240.4s | Yes | Yes | No | +| `pub-docker-img.yml` (`push`) | tag push `v*` | 139.9s | Yes | No | Yes | +| `pub-release.yml` | Tag push `v*` (publish) + manual/scheduled verification (no publish) | N/A in recent sample | No | No | No | + +Notes: + +1. `pub-docker-img.yml` is the only workflow in the main PR/push path that builds Docker images. +2. Container runtime verification (`docker run`) occurs in PR smoke only. +3. Container registry push occurs on tag pushes (`v*`) only. +4. `ci-run.yml` "Build (Smoke)" builds Rust binaries, not Docker images. + +## Step-By-Step + +### 1) PR from branch in this repository -> `dev` + +1. Contributor opens or updates PR against `dev`. +2. `pull_request_target` automation runs (typical runtime): + - `pr-intake-checks.yml` posts intake warnings/errors. + - `pr-labeler.yml` sets size/risk/scope labels. + - `pr-auto-response.yml` runs first-interaction and label routes. +3. `pull_request` CI workflows start: + - `ci-run.yml` + - `feature-matrix.yml` (Rust/workflow path scope) + - `sec-audit.yml` + - `sec-codeql.yml` (if Rust/codeql paths changed) + - path-scoped workflows if matching files changed: + - `pub-docker-img.yml` (Docker build-input paths only) + - `docs-deploy.yml` (docs + README markdown paths; deploy contract guard enforces promotion + rollback ref policy) + - `workflow-sanity.yml` (workflow files only) + - `pr-label-policy-check.yml` (label-policy files only) + - `ci-change-audit.yml` (CI/security path changes) + - `ci-provider-connectivity.yml` (probe config/script/workflow changes) + - `ci-reproducible-build.yml` (Rust/build reproducibility paths) +4. In `ci-run.yml`, `changes` computes: + - `docs_only` + - `docs_changed` + - `rust_changed` + - `workflow_changed` +5. `build` runs for Rust-impacting changes. +6. On PRs, full lint/test/docs checks run when PR has label `ci:full`: + - `lint` + - `lint-strict-delta` + - `test` + - `flake-probe` (single-retry telemetry; optional block via `CI_BLOCK_ON_FLAKE_SUSPECTED`) + - `docs-quality` +7. If `.github/workflows/**` changed, `workflow-owner-approval` must pass. +8. If root license files (`LICENSE-APACHE`, `LICENSE-MIT`) changed, `license-file-owner-guard` allows only PR author `willsarg`. +9. `lint-feedback` posts actionable comment if lint/docs gates fail. +10. `CI Required Gate` aggregates results to final pass/fail. +11. Maintainer merges PR once checks and review policy are satisfied. +12. Merge emits a `push` event on `dev` (see scenario 4). + +### 2) PR from fork -> `dev` + +1. External contributor opens PR from `fork/` into `zeroclaw:dev`. +2. Immediately on `opened`: + - `pull_request_target` workflows start with base-repo context and base-repo token: + - `pr-intake-checks.yml` + - `pr-labeler.yml` + - `pr-auto-response.yml` + - `pull_request` workflows are queued for the fork head commit: + - `ci-run.yml` + - `sec-audit.yml` + - path-scoped workflows (`pub-docker-img.yml`, `workflow-sanity.yml`, `pr-label-policy-check.yml`) if changed files match. +3. Fork-specific permission behavior in `pull_request` workflows: + - token is restricted (read-focused), so jobs that try to write PR comments/status extras can be limited. + - secrets from the base repo are not exposed to fork PR `pull_request` jobs. +4. Approval gate possibility: + - if Actions settings require maintainer approval for fork workflows, the `pull_request` run stays in `action_required`/waiting state until approved. +5. Event fan-out after labeling: + - `pr-labeler.yml` and manual label changes emit `labeled`/`unlabeled` events. + - those events retrigger `pull_request_target` automation (`pr-labeler.yml` and `pr-auto-response.yml`), creating extra run volume/noise. +6. When contributor pushes new commits to fork branch (`synchronize`): + - reruns: `pr-intake-checks.yml`, `pr-labeler.yml`, `ci-run.yml`, `sec-audit.yml`, and matching path-scoped PR workflows. + - does not rerun `pr-auto-response.yml` unless label/open events occur. +7. `ci-run.yml` execution details for fork PR: + - `changes` computes `docs_only`, `docs_changed`, `rust_changed`, `workflow_changed`. + - `build` runs for Rust-impacting changes. + - `lint`/`lint-strict-delta`/`test`/`docs-quality` run on PR when `ci:full` label exists. + - `workflow-owner-approval` runs when `.github/workflows/**` changed. + - `CI Required Gate` emits final pass/fail for the PR head. +8. Fork PR merge blockers to check first when diagnosing stalls: + - run approval pending for fork workflows. + - `workflow-owner-approval` failing on workflow-file changes. + - `license-file-owner-guard` failing when root license files are modified by non-owner PR author. + - `CI Required Gate` failure caused by upstream jobs. + - repeated `pull_request_target` reruns from label churn causing noisy signals. +9. After merge, normal `push` workflows on `dev` execute (scenario 4). + +### 3) PR to `main` (direct or from `dev`) + +1. Contributor or maintainer opens PR with base `main`. +2. `ci-run.yml` and `sec-audit.yml` run on the PR, plus any path-scoped workflows. +3. Maintainer merges PR once checks and review policy pass. +4. Merge emits a `push` event on `main`. + +### 4) Push/Merge Queue to `dev` or `main` (including after merge) + +1. Commit reaches `dev` or `main` (usually from a merged PR), or merge queue creates a `merge_group` validation commit. +2. `ci-run.yml` runs on `push` and `merge_group`. +3. `feature-matrix.yml` runs on `push` for Rust/workflow paths and on `merge_group`. +4. `sec-audit.yml` runs on `push` and `merge_group`. +5. `sec-codeql.yml` runs on `push`/`merge_group` when Rust/codeql paths change (path-scoped on push). +6. `ci-supply-chain-provenance.yml` runs on push when Rust/build provenance paths change. +7. Path-filtered workflows run only if touched files match their filters. +8. In `ci-run.yml`, push/merge-group behavior differs from PR behavior: + - Rust path: `lint`, `lint-strict-delta`, `test`, `build` are expected. + - Docs/non-rust paths: fast-path behavior applies. +9. `CI Required Gate` computes overall push/merge-group result. + +## Docker Publish Logic + +Workflow: `.github/workflows/pub-docker-img.yml` + +### PR behavior + +1. Triggered on `pull_request` to `dev` or `main` when Docker build-input paths change. +2. Runs `PR Docker Smoke` job: + - Builds local smoke image with Blacksmith builder. + - Verifies container with `docker run ... --version`. +3. Typical runtime in recent sample: ~240.4s. +4. No registry push happens on PR events. + +### Push behavior + +1. `publish` job runs on tag pushes `v*` only. +2. Workflow trigger includes semantic version tag pushes (`v*`) only. +3. Login to `ghcr.io` uses `${{ github.actor }}` and `${{ secrets.GITHUB_TOKEN }}`. +4. Tag computation includes semantic tag from pushed git tag (`vX.Y.Z`) + SHA tag (`sha-<12>`) + `latest`. +5. Multi-platform publish is used for tag pushes (`linux/amd64,linux/arm64`). +6. `scripts/ci/ghcr_publish_contract_guard.py` validates anonymous pullability and digest parity across `vX.Y.Z`, `sha-<12>`, and `latest`, then emits rollback candidate mapping evidence. +7. Trivy scans are emitted for version, SHA, and latest references. +8. `scripts/ci/ghcr_vulnerability_gate.py` validates Trivy JSON outputs against `.github/release/ghcr-vulnerability-policy.json` and emits audit-event evidence. +9. Typical runtime in recent sample: ~139.9s. +10. Result: pushed image tags under `ghcr.io//` with publish-contract + vulnerability-gate + scan artifacts. + +Important: Docker publish now requires a `v*` tag push; regular `dev`/`main` branch pushes do not publish images. + +## Release Logic + +Workflow: `.github/workflows/pub-release.yml` + +1. Trigger modes: + - Tag push `v*` -> publish mode. + - Manual dispatch -> verification-only or publish mode (input-driven). + - Weekly schedule -> verification-only mode. +2. `prepare` resolves release context (`release_ref`, `release_tag`, publish/draft mode) and runs `scripts/ci/release_trigger_guard.py`. + - publish mode enforces actor authorization, stable annotated tag policy, `origin/main` ancestry, and `release_tag` == `Cargo.toml` version at the tag commit. + - trigger provenance is emitted as `release-trigger-guard` artifacts. +3. `build-release` builds matrix artifacts across Linux/macOS/Windows targets. +4. `verify-artifacts` runs `scripts/ci/release_artifact_guard.py` against `.github/release/release-artifact-contract.json` in verify-stage mode (archive contract required; manifest/SBOM/notice checks intentionally skipped) and uploads `release-artifact-guard-verify` evidence. +5. In publish mode, workflow generates SBOM (`CycloneDX` + `SPDX`), `SHA256SUMS`, and a checksum provenance statement (`zeroclaw.sha256sums.intoto.json`) plus audit-event envelope. +6. In publish mode, after manifest generation, workflow reruns `release_artifact_guard.py` in full-contract mode and emits `release-artifact-guard.publish.json` plus `audit-event-release-artifact-guard-publish.json`. +7. In publish mode, workflow keyless-signs release artifacts and composes a supply-chain release-notes preface via `release_notes_with_supply_chain_refs.py`. +8. In publish mode, workflow verifies GHCR release-tag availability. +9. In publish mode, workflow creates/updates the GitHub Release for the resolved tag and commit-ish, combining generated supply-chain preface with GitHub auto-generated commit notes. + +Pre-release path: + +1. Pre-release tags (`vX.Y.Z-alpha.N`, `vX.Y.Z-beta.N`, `vX.Y.Z-rc.N`) trigger `.github/workflows/pub-prerelease.yml`. +2. `scripts/ci/prerelease_guard.py` enforces stage progression, `origin/main` ancestry, and Cargo version/tag alignment. +3. In publish mode, prerelease assets are attached to a GitHub prerelease for the stage tag. + +Canary policy lane: + +1. `.github/workflows/ci-canary-gate.yml` runs weekly or manually. +2. `scripts/ci/canary_guard.py` evaluates metrics against `.github/release/canary-policy.json`. +3. Decision output is explicit (`promote`, `hold`, `abort`) with auditable artifacts and optional dispatch signal. + +## Merge/Policy Notes + +1. Workflow-file changes (`.github/workflows/**`) activate owner-approval gate in `ci-run.yml`. +2. PR lint/test strictness is intentionally controlled by `ci:full` label. +3. `pr-intake-checks.yml` now blocks PRs missing a Linear issue key (`RMN-*`, `CDV-*`, `COM-*`) to keep execution mapped to Linear. +4. `sec-audit.yml` runs on PR/push/merge queue (`merge_group`), plus scheduled weekly. +5. `ci-change-audit.yml` enforces pinned `uses:` references for CI/security workflow changes. +6. `sec-audit.yml` includes deny policy hygiene checks (`deny_policy_guard.py`) before cargo-deny. +7. `sec-audit.yml` includes gitleaks allowlist governance checks (`secrets_governance_guard.py`) against `.github/security/gitleaks-allowlist-governance.json`. +8. `ci-reproducible-build.yml` and `ci-supply-chain-provenance.yml` provide scheduled supply-chain assurance signals outside release-only windows. +9. Some workflows are operational and non-merge-path (`pr-check-stale`, `pr-check-status`, `sync-contributors`, etc.). +10. Workflow-specific JavaScript helpers are organized under `.github/workflows/scripts/`. +11. `ci-run.yml` includes cache partitioning (`prefix-key`) across lint/test/build/flake-probe lanes to reduce cache contention. +12. `ci-rollback.yml` provides a guarded rollback planning lane (scheduled dry-run + manual execute controls) with audit artifacts. + +## Mermaid Diagrams + +### PR to Dev + +```mermaid +flowchart TD + A["PR opened or updated -> dev"] --> B["pull_request_target lane"] + B --> B1["pr-intake-checks.yml"] + B --> B2["pr-labeler.yml"] + B --> B3["pr-auto-response.yml"] + A --> C["pull_request CI lane"] + C --> C1["ci-run.yml"] + C --> C2["sec-audit.yml"] + C --> C3["pub-docker-img.yml (if Docker paths changed)"] + C --> C4["workflow-sanity.yml (if workflow files changed)"] + C --> C5["pr-label-policy-check.yml (if policy files changed)"] + C1 --> D["CI Required Gate"] + D --> E{"Checks + review policy pass?"} + E -->|No| F["PR stays open"] + E -->|Yes| G["Merge PR"] + G --> H["push event on dev"] +``` + +### Main Delivery and Release + +```mermaid +flowchart TD + D0["Commit reaches dev"] --> B0["ci-run.yml"] + D0 --> C0["sec-audit.yml"] + PRM["PR to main"] --> QM["ci-run.yml + sec-audit.yml (+ path-scoped)"] + QM --> M["Merge to main"] + M --> A["Commit reaches main"] + A --> B["ci-run.yml"] + A --> C["sec-audit.yml"] + A --> D["path-scoped workflows (if matched)"] + T["Tag push v*"] --> R["pub-release.yml"] + W["Manual/Scheduled release verify"] --> R + T --> DP["pub-docker-img.yml publish job"] + R --> R1["Artifacts + SBOM + checksums + signatures + GitHub Release"] + W --> R2["Verification build only (no GitHub Release publish)"] + DP --> P1["Push ghcr image tags (version + sha + latest)"] +``` + +## Quick Troubleshooting + +1. Unexpected skipped jobs: inspect `scripts/ci/detect_change_scope.sh` outputs. +2. Workflow-change PR blocked: verify `WORKFLOW_OWNER_LOGINS` and approvals. +3. Fork PR appears stalled: check whether Actions run approval is pending. +4. Docker not published: confirm a `v*` tag was pushed to the intended commit. diff --git a/.github/workflows/nightly-all-features.yml b/.github/workflows/nightly-all-features.yml new file mode 100644 index 0000000..caee4a2 --- /dev/null +++ b/.github/workflows/nightly-all-features.yml @@ -0,0 +1,187 @@ +name: Nightly All-Features + +on: + schedule: + - cron: "15 3 * * *" # Daily 03:15 UTC + workflow_dispatch: + inputs: + fail_on_failure: + description: "Fail workflow when any nightly lane fails" + required: true + default: true + type: boolean + +concurrency: + group: nightly-all-features-${{ github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + nightly-lanes: + name: Nightly Lane (${{ matrix.name }}) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 70 + strategy: + fail-fast: false + matrix: + include: + - name: default + command: cargo test --locked --test agent_e2e --verbose + install_libudev: false + - name: whatsapp-web + command: cargo check --locked --no-default-features --features whatsapp-web --verbose + install_libudev: false + - name: browser-native + command: cargo check --locked --no-default-features --features browser-native --verbose + install_libudev: false + - name: nightly-all-features + command: cargo test --locked --all-features --test agent_e2e --verbose + install_libudev: true + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: nightly-all-features-${{ matrix.name }} + + - name: Ensure Linux deps for all-features lane + if: matrix.install_libudev + shell: bash + run: | + set -euo pipefail + + if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then + echo "libudev development headers already available; skipping apt install." + exit 0 + fi + + echo "Installing missing libudev build dependencies..." + for attempt in 1 2 3; do + if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \ + sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then + echo "Dependency installation succeeded on attempt ${attempt}." + exit 0 + fi + if [ "$attempt" -eq 3 ]; then + echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2 + exit 1 + fi + echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..." + sleep 10 + done + + - name: Run nightly lane command + id: lane + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + started_at="$(date +%s)" + set +e + bash -lc "${{ matrix.command }}" + status=$? + set -e + finished_at="$(date +%s)" + duration="$((finished_at - started_at))" + + lane_status="success" + if [ "$status" -ne 0 ]; then + lane_status="failure" + fi + + cat > "artifacts/nightly-result-${{ matrix.name }}.json" <> "$GITHUB_STEP_SUMMARY" + + echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT" + echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT" + + - name: Upload nightly lane artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: nightly-lane-${{ matrix.name }} + path: artifacts/nightly-result-${{ matrix.name }}.json + if-no-files-found: error + retention-days: 30 + + nightly-summary: + name: Nightly Summary & Routing + needs: [nightly-lanes] + if: always() + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Download nightly artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Aggregate nightly report + shell: bash + env: + FAIL_ON_FAILURE_INPUT: ${{ github.event.inputs.fail_on_failure || 'true' }} + run: | + set -euo pipefail + fail_on_failure="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_failure="${FAIL_ON_FAILURE_INPUT}" + fi + + args=() + if [ "$fail_on_failure" = "true" ]; then + args+=(--fail-on-failure) + fi + + python3 scripts/ci/nightly_matrix_report.py \ + --input-dir artifacts \ + --owners-file .github/release/nightly-owner-routing.json \ + --output-json artifacts/nightly-summary.json \ + --output-md artifacts/nightly-summary.md \ + "${args[@]}" + + - name: Publish nightly summary + shell: bash + run: | + set -euo pipefail + cat artifacts/nightly-summary.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload nightly summary artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: nightly-all-features-summary + path: | + artifacts/nightly-summary.json + artifacts/nightly-summary.md + if-no-files-found: error + retention-days: 30 diff --git a/.github/workflows/pages-deploy.yml b/.github/workflows/pages-deploy.yml new file mode 100644 index 0000000..34fca0b --- /dev/null +++ b/.github/workflows/pages-deploy.yml @@ -0,0 +1,64 @@ +name: Deploy GitHub Pages + +on: + push: + branches: + - main + paths: + - site/** + - docs/** + - README.md + - .github/workflows/pages-deploy.yml + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: github-pages + cancel-in-progress: true + +jobs: + build: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + cache-dependency-path: site/package-lock.json + + - name: Install Dependencies + working-directory: site + run: npm ci + + - name: Build Site + working-directory: site + run: npm run build + + - name: Configure Pages + uses: actions/configure-pages@v5 + + - name: Upload Artifact + uses: actions/upload-pages-artifact@v3 + with: + path: gh-pages + + deploy: + needs: build + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/pr-auto-response.yml b/.github/workflows/pr-auto-response.yml new file mode 100644 index 0000000..9cf1a7c --- /dev/null +++ b/.github/workflows/pr-auto-response.yml @@ -0,0 +1,89 @@ +name: PR Auto Responder + +on: + issues: + types: [opened, reopened, labeled, unlabeled] + pull_request_target: + branches: [dev, main] + types: [opened, labeled, unlabeled] + +permissions: {} + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + LABEL_POLICY_PATH: .github/label-policy.json + +jobs: + contributor-tier-issues: + if: >- + (github.event_name == 'issues' && + (github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) || + (github.event_name == 'pull_request_target' && + (github.event.action == 'labeled' || github.event.action == 'unlabeled')) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + issues: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Apply contributor tier label for issue author + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + LABEL_POLICY_PATH: .github/label-policy.json + with: + script: | + const script = require('./.github/workflows/scripts/pr_auto_response_contributor_tier.js'); + await script({ github, context, core }); + first-interaction: + if: github.event.action == 'opened' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + issues: write + pull-requests: write + steps: + - name: Greet first-time contributors + uses: actions/first-interaction@a1db7729b356323c7988c20ed6f0d33fe31297be # v1 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + issue_message: | + Thanks for opening this issue. + + Before maintainers triage it, please confirm: + - Repro steps are complete and run on latest `main` + - Environment details are included (OS, Rust version, ZeroClaw version) + - Sensitive values are redacted + + This helps us keep issue throughput high and response latency low. + pr_message: | + Thanks for contributing to ZeroClaw. + + For faster review, please ensure: + - PR template sections are fully completed + - `cargo fmt --all -- --check`, `cargo clippy --all-targets -- -D warnings`, and `cargo test` are included + - If automation/agents were used heavily, add brief workflow notes + - Scope is focused (prefer one concern per PR) + + See `CONTRIBUTING.md` and `docs/pr-workflow.md` for full collaboration rules. + + labeled-routes: + if: github.event.action == 'labeled' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + issues: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Handle label-driven responses + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_auto_response_labeled_routes.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-check-stale.yml b/.github/workflows/pr-check-stale.yml new file mode 100644 index 0000000..7c29077 --- /dev/null +++ b/.github/workflows/pr-check-stale.yml @@ -0,0 +1,49 @@ +name: PR Check Stale + +on: + schedule: + - cron: "20 2 * * *" + workflow_dispatch: + +permissions: {} + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + +jobs: + stale: + permissions: + issues: write + pull-requests: write + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Mark stale issues and pull requests + uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + days-before-issue-stale: 21 + days-before-issue-close: 7 + days-before-pr-stale: 14 + days-before-pr-close: 7 + stale-issue-label: stale + stale-pr-label: stale + exempt-issue-labels: security,pinned,no-stale,no-pr-hygiene,maintainer + exempt-pr-labels: no-stale,no-pr-hygiene,maintainer + remove-stale-when-updated: true + exempt-all-assignees: true + operations-per-run: 300 + stale-issue-message: | + This issue was automatically marked as stale due to inactivity. + Please provide an update, reproduction details, or current status to keep it open. + close-issue-message: | + Closing this issue due to inactivity. + If the problem still exists on the latest `main`, please open a new issue with fresh repro steps. + close-issue-reason: not_planned + stale-pr-message: | + This PR was automatically marked as stale due to inactivity. + Please rebase/update and post the latest validation results. + close-pr-message: | + Closing this PR due to inactivity. + Maintainers can reopen once the branch is updated and validation is provided. diff --git a/.github/workflows/pr-check-status.yml b/.github/workflows/pr-check-status.yml new file mode 100644 index 0000000..5fcdab2 --- /dev/null +++ b/.github/workflows/pr-check-status.yml @@ -0,0 +1,36 @@ +name: PR Check Status + +on: + schedule: + - cron: "15 8 * * *" # Once daily at 8:15am UTC + workflow_dispatch: + +permissions: {} + +concurrency: + group: pr-check-status + cancel-in-progress: true + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + +jobs: + nudge-stale-prs: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: write + issues: write + env: + STALE_HOURS: "48" + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - name: Nudge PRs that need rebase or CI refresh + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_check_status_nudge.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-intake-checks.yml b/.github/workflows/pr-intake-checks.yml new file mode 100644 index 0000000..1e84dcc --- /dev/null +++ b/.github/workflows/pr-intake-checks.yml @@ -0,0 +1,37 @@ +name: PR Intake Checks + +on: + pull_request_target: + branches: [dev, main] + types: [opened, reopened, synchronize, edited, ready_for_review] + +concurrency: + group: pr-intake-checks-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + intake: + name: Intake Checks + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Run safe PR intake checks + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_intake_checks.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-label-policy-check.yml b/.github/workflows/pr-label-policy-check.yml new file mode 100644 index 0000000..613071f --- /dev/null +++ b/.github/workflows/pr-label-policy-check.yml @@ -0,0 +1,80 @@ +name: PR Label Policy Check + +on: + pull_request: + paths: + - ".github/label-policy.json" + - ".github/workflows/pr-labeler.yml" + - ".github/workflows/pr-auto-response.yml" + push: + paths: + - ".github/label-policy.json" + - ".github/workflows/pr-labeler.yml" + - ".github/workflows/pr-auto-response.yml" + +concurrency: + group: pr-label-policy-check-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + contributor-tier-consistency: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Verify shared label policy and workflow wiring + shell: bash + run: | + set -euo pipefail + python3 - <<'PY' + import json + import re + from pathlib import Path + + policy_path = Path('.github/label-policy.json') + policy = json.loads(policy_path.read_text(encoding='utf-8')) + color = str(policy.get('contributor_tier_color', '')).upper() + rules = policy.get('contributor_tiers', []) + if not re.fullmatch(r'[0-9A-F]{6}', color): + raise SystemExit('invalid contributor_tier_color in .github/label-policy.json') + if not rules: + raise SystemExit('contributor_tiers must not be empty in .github/label-policy.json') + + labels = set() + prev_min = None + for entry in rules: + label = str(entry.get('label', '')).strip().lower() + min_merged = int(entry.get('min_merged_prs', 0)) + if not label.endswith('contributor'): + raise SystemExit(f'invalid contributor tier label: {label}') + if label in labels: + raise SystemExit(f'duplicate contributor tier label: {label}') + if prev_min is not None and min_merged > prev_min: + raise SystemExit('contributor_tiers must be sorted descending by min_merged_prs') + labels.add(label) + prev_min = min_merged + + workflow_paths = [ + Path('.github/workflows/pr-labeler.yml'), + Path('.github/workflows/pr-auto-response.yml'), + ] + for workflow in workflow_paths: + text = workflow.read_text(encoding='utf-8') + if '.github/label-policy.json' not in text: + raise SystemExit(f'{workflow} must load .github/label-policy.json') + if re.search(r'contributorTierColor\s*=\s*"[0-9A-Fa-f]{6}"', text): + raise SystemExit(f'{workflow} contains hardcoded contributorTierColor') + + print('label policy file is valid and workflow consumers are wired to shared policy') + PY diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml new file mode 100644 index 0000000..2e60988 --- /dev/null +++ b/.github/workflows/pr-labeler.yml @@ -0,0 +1,56 @@ +name: PR Labeler + +on: + pull_request_target: + branches: [dev, main] + types: [opened, reopened, synchronize, edited, labeled, unlabeled] + workflow_dispatch: + inputs: + mode: + description: "Run mode for managed-label governance" + required: true + default: "audit" + type: choice + options: + - audit + - repair + +concurrency: + group: pr-labeler-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + LABEL_POLICY_PATH: .github/label-policy.json + +jobs: + label: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Apply path labels + if: github.event_name == 'pull_request_target' + uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1 + continue-on-error: true + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: true + + - name: Apply size/risk/module labels + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + continue-on-error: true + env: + LABEL_POLICY_PATH: .github/label-policy.json + with: + script: | + const script = require('./.github/workflows/scripts/pr_labeler.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pub-docker-img.yml b/.github/workflows/pub-docker-img.yml new file mode 100644 index 0000000..0942182 --- /dev/null +++ b/.github/workflows/pub-docker-img.yml @@ -0,0 +1,352 @@ +name: Pub Docker Img + +on: + push: + tags: ["v*"] + pull_request: + branches: [dev, main] + paths: + - "Dockerfile" + - ".dockerignore" + - "docker-compose.yml" + - "rust-toolchain.toml" + - "dev/config.template.toml" + - ".github/workflows/pub-docker-img.yml" + - ".github/release/ghcr-tag-policy.json" + - ".github/release/ghcr-vulnerability-policy.json" + - "scripts/ci/ghcr_publish_contract_guard.py" + - "scripts/ci/ghcr_vulnerability_gate.py" + workflow_dispatch: + +concurrency: + group: docker-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + pr-smoke: + name: PR Docker Smoke + if: github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + permissions: + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + + - name: Extract metadata (tags, labels) + if: github.event_name == 'pull_request' + id: meta + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=pr + + - name: Build smoke image + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + with: + context: . + push: false + load: true + provenance: false + sbom: false + tags: zeroclaw-pr-smoke:latest + labels: ${{ steps.meta.outputs.labels || '' }} + platforms: linux/amd64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Verify image + run: docker run --rm zeroclaw-pr-smoke:latest --version + + publish: + name: Build and Push Docker Image + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'zeroclaw-labs/zeroclaw' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + permissions: + contents: read + packages: write + security-events: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + + - name: Log in to Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Compute tags + id: meta + shell: bash + run: | + set -euo pipefail + IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" + SHA_SUFFIX="sha-${GITHUB_SHA::12}" + SHA_TAG="${IMAGE}:${SHA_SUFFIX}" + LATEST_SUFFIX="latest" + LATEST_TAG="${IMAGE}:${LATEST_SUFFIX}" + if [[ "${GITHUB_REF}" != refs/tags/v* ]]; then + echo "::error::Docker publish is restricted to v* tag pushes." + exit 1 + fi + + RELEASE_TAG="${GITHUB_REF#refs/tags/}" + VERSION_TAG="${IMAGE}:${RELEASE_TAG}" + TAGS="${VERSION_TAG},${SHA_TAG},${LATEST_TAG}" + + { + echo "tags=${TAGS}" + echo "release_tag=${RELEASE_TAG}" + echo "sha_tag=${SHA_SUFFIX}" + echo "latest_tag=${LATEST_SUFFIX}" + } >> "$GITHUB_OUTPUT" + + - name: Build and push Docker image + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Set GHCR package visibility to public + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -euo pipefail + owner="${GITHUB_REPOSITORY_OWNER,,}" + repo="${GITHUB_REPOSITORY#*/}" + + # Package path can vary depending on repository/package linkage. + candidates=( + "$repo" + "${owner}%2F${repo}" + ) + + for scope in orgs users; do + for pkg in "${candidates[@]}"; do + code="$(curl -sS -o /tmp/ghcr-visibility.json -w "%{http_code}" \ + -X PATCH \ + -H "Authorization: Bearer ${GH_TOKEN}" \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/${scope}/${owner}/packages/container/${pkg}/visibility" \ + -d '{"visibility":"public"}' || true)" + + if [ "$code" = "200" ] || [ "$code" = "204" ]; then + echo "GHCR package visibility is public (${scope}/${owner}/${pkg})." + exit 0 + fi + + echo "Visibility attempt ${scope}/${owner}/${pkg} returned HTTP ${code}." + done + done + + echo "::warning::Unable to update GHCR visibility via API in this run; proceeding to GHCR publish contract verification." + + - name: Validate GHCR publish contract + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/ghcr_publish_contract_guard.py \ + --repository "${GITHUB_REPOSITORY,,}" \ + --release-tag "${{ steps.meta.outputs.release_tag }}" \ + --sha "${GITHUB_SHA}" \ + --policy-file .github/release/ghcr-tag-policy.json \ + --output-json artifacts/ghcr-publish-contract.json \ + --output-md artifacts/ghcr-publish-contract.md \ + --fail-on-violation + + - name: Emit GHCR publish contract audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-publish-contract.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ghcr_publish_contract \ + --input-json artifacts/ghcr-publish-contract.json \ + --output-json artifacts/audit-event-ghcr-publish-contract.json \ + --artifact-name ghcr-publish-contract \ + --retention-days 21 + fi + + - name: Publish GHCR contract summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-publish-contract.md ]; then + cat artifacts/ghcr-publish-contract.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload GHCR publish contract artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-publish-contract + path: | + artifacts/ghcr-publish-contract.json + artifacts/ghcr-publish-contract.md + artifacts/audit-event-ghcr-publish-contract.json + if-no-files-found: ignore + retention-days: 21 + + - name: Scan published image for vulnerabilities (Trivy) + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + TAG_NAME="${{ steps.meta.outputs.release_tag }}" + SHA_TAG="${{ steps.meta.outputs.sha_tag }}" + LATEST_TAG="${{ steps.meta.outputs.latest_tag }}" + IMAGE_BASE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" + VERSION_REF="${IMAGE_BASE}:${TAG_NAME}" + SHA_REF="${IMAGE_BASE}:${SHA_TAG}" + LATEST_REF="${IMAGE_BASE}:${LATEST_TAG}" + SARIF_OUT="artifacts/trivy-${TAG_NAME}.sarif" + TABLE_OUT="artifacts/trivy-${TAG_NAME}.txt" + JSON_OUT="artifacts/trivy-${TAG_NAME}.json" + SHA_TABLE_OUT="artifacts/trivy-${SHA_TAG}.txt" + SHA_JSON_OUT="artifacts/trivy-${SHA_TAG}.json" + LATEST_TABLE_OUT="artifacts/trivy-${LATEST_TAG}.txt" + LATEST_JSON_OUT="artifacts/trivy-${LATEST_TAG}.json" + + scan_trivy() { + local image_ref="$1" + local output_prefix="$2" + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format json \ + --output "/work/${output_prefix}.json" \ + "${image_ref}" + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format table \ + --output "/work/${output_prefix}.txt" \ + "${image_ref}" + } + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format sarif \ + --output "/work/trivy-${TAG_NAME}.sarif" \ + "${VERSION_REF}" + + scan_trivy "${VERSION_REF}" "trivy-${TAG_NAME}" + scan_trivy "${SHA_REF}" "trivy-${SHA_TAG}" + scan_trivy "${LATEST_REF}" "trivy-${LATEST_TAG}" + + echo "Generated Trivy reports:" + ls -1 "$SARIF_OUT" "$TABLE_OUT" "$JSON_OUT" "$SHA_TABLE_OUT" "$SHA_JSON_OUT" "$LATEST_TABLE_OUT" "$LATEST_JSON_OUT" + + - name: Validate GHCR vulnerability gate + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/ghcr_vulnerability_gate.py \ + --release-tag "${{ steps.meta.outputs.release_tag }}" \ + --sha-tag "${{ steps.meta.outputs.sha_tag }}" \ + --latest-tag "${{ steps.meta.outputs.latest_tag }}" \ + --release-report-json "artifacts/trivy-${{ steps.meta.outputs.release_tag }}.json" \ + --sha-report-json "artifacts/trivy-${{ steps.meta.outputs.sha_tag }}.json" \ + --latest-report-json "artifacts/trivy-${{ steps.meta.outputs.latest_tag }}.json" \ + --policy-file .github/release/ghcr-vulnerability-policy.json \ + --output-json artifacts/ghcr-vulnerability-gate.json \ + --output-md artifacts/ghcr-vulnerability-gate.md \ + --fail-on-violation + + - name: Emit GHCR vulnerability gate audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-vulnerability-gate.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ghcr_vulnerability_gate \ + --input-json artifacts/ghcr-vulnerability-gate.json \ + --output-json artifacts/audit-event-ghcr-vulnerability-gate.json \ + --artifact-name ghcr-vulnerability-gate \ + --retention-days 21 + fi + + - name: Publish GHCR vulnerability summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-vulnerability-gate.md ]; then + cat artifacts/ghcr-vulnerability-gate.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload GHCR vulnerability gate artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-vulnerability-gate + path: | + artifacts/ghcr-vulnerability-gate.json + artifacts/ghcr-vulnerability-gate.md + artifacts/audit-event-ghcr-vulnerability-gate.json + if-no-files-found: ignore + retention-days: 21 + + - name: Upload Trivy SARIF + if: always() + uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + sarif_file: artifacts/trivy-${{ github.ref_name }}.sarif + category: ghcr-trivy + + - name: Upload Trivy report artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-trivy-report + path: | + artifacts/trivy-${{ github.ref_name }}.sarif + artifacts/trivy-${{ github.ref_name }}.txt + artifacts/trivy-${{ github.ref_name }}.json + artifacts/trivy-sha-*.txt + artifacts/trivy-sha-*.json + artifacts/trivy-latest.txt + artifacts/trivy-latest.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/pub-prerelease.yml b/.github/workflows/pub-prerelease.yml new file mode 100644 index 0000000..01c0830 --- /dev/null +++ b/.github/workflows/pub-prerelease.yml @@ -0,0 +1,259 @@ +name: Pub Pre-release + +on: + push: + tags: + - "v*-alpha.*" + - "v*-beta.*" + - "v*-rc.*" + workflow_dispatch: + inputs: + tag: + description: "Existing pre-release tag (e.g. v0.1.8-rc.1)" + required: true + default: "" + type: string + mode: + description: "dry-run validates/builds only; publish creates prerelease" + required: true + default: dry-run + type: choice + options: + - dry-run + - publish + draft: + description: "Create prerelease as draft" + required: true + default: true + type: boolean + +concurrency: + group: prerelease-${{ github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + prerelease-guard: + name: Pre-release Guard + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + release_tag: ${{ steps.vars.outputs.release_tag }} + mode: ${{ steps.vars.outputs.mode }} + draft: ${{ steps.vars.outputs.draft }} + ready_to_publish: ${{ steps.extract.outputs.ready_to_publish }} + stage: ${{ steps.extract.outputs.stage }} + transition_outcome: ${{ steps.extract.outputs.transition_outcome }} + latest_stage: ${{ steps.extract.outputs.latest_stage }} + latest_stage_tag: ${{ steps.extract.outputs.latest_stage_tag }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve prerelease inputs + id: vars + shell: bash + run: | + set -euo pipefail + if [ "${GITHUB_EVENT_NAME}" = "push" ]; then + release_tag="${GITHUB_REF_NAME}" + mode="publish" + draft="false" + else + release_tag="${{ inputs.tag }}" + mode="${{ inputs.mode }}" + draft="${{ inputs.draft }}" + fi + + { + echo "release_tag=${release_tag}" + echo "mode=${mode}" + echo "draft=${draft}" + } >> "$GITHUB_OUTPUT" + + - name: Validate prerelease stage gate + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/prerelease_guard.py \ + --repo-root . \ + --tag "${{ steps.vars.outputs.release_tag }}" \ + --stage-config-file .github/release/prerelease-stage-gates.json \ + --mode "${{ steps.vars.outputs.mode }}" \ + --output-json artifacts/prerelease-guard.json \ + --output-md artifacts/prerelease-guard.md \ + --fail-on-violation + + - name: Extract prerelease outputs + id: extract + shell: bash + run: | + set -euo pipefail + ready_to_publish="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + print(str(bool(data.get('ready_to_publish', False))).lower()) + PY + )" + stage="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + print(data.get('stage', 'unknown')) + PY + )" + transition_outcome="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + transition = data.get('transition') or {} + print(transition.get('outcome', 'unknown')) + PY + )" + latest_stage="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + history = data.get('stage_history') or {} + print(history.get('latest_stage', 'unknown')) + PY + )" + latest_stage_tag="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + history = data.get('stage_history') or {} + print(history.get('latest_tag', 'unknown')) + PY + )" + { + echo "ready_to_publish=${ready_to_publish}" + echo "stage=${stage}" + echo "transition_outcome=${transition_outcome}" + echo "latest_stage=${latest_stage}" + echo "latest_stage_tag=${latest_stage_tag}" + } >> "$GITHUB_OUTPUT" + + - name: Emit prerelease audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type prerelease_guard \ + --input-json artifacts/prerelease-guard.json \ + --output-json artifacts/audit-event-prerelease-guard.json \ + --artifact-name prerelease-guard \ + --retention-days 21 + + - name: Publish prerelease summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/prerelease-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload prerelease guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: prerelease-guard + path: | + artifacts/prerelease-guard.json + artifacts/prerelease-guard.md + artifacts/audit-event-prerelease-guard.json + if-no-files-found: error + retention-days: 21 + + build-prerelease: + name: Build Pre-release Artifact + needs: [prerelease-guard] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - name: Checkout tag + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prerelease-guard.outputs.release_tag }} + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: prerelease-${{ needs.prerelease-guard.outputs.release_tag }} + cache-targets: true + + - name: Build release-fast binary + shell: bash + run: | + set -euo pipefail + cargo build --profile release-fast --locked --target x86_64-unknown-linux-gnu + + - name: Package prerelease artifact + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + cp target/x86_64-unknown-linux-gnu/release-fast/zeroclaw artifacts/zeroclaw + tar czf artifacts/zeroclaw-x86_64-unknown-linux-gnu.tar.gz -C artifacts zeroclaw + rm artifacts/zeroclaw + + - name: Generate manifest + checksums + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_manifest.py \ + --artifacts-dir artifacts \ + --release-tag "${{ needs.prerelease-guard.outputs.release_tag }}" \ + --output-json artifacts/prerelease-manifest.json \ + --output-md artifacts/prerelease-manifest.md \ + --checksums-path artifacts/SHA256SUMS \ + --fail-empty + + - name: Publish prerelease build summary + shell: bash + run: | + set -euo pipefail + cat artifacts/prerelease-manifest.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload prerelease build artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: prerelease-artifacts + path: artifacts/* + if-no-files-found: error + retention-days: 14 + + publish-prerelease: + name: Publish GitHub Pre-release + needs: [prerelease-guard, build-prerelease] + if: needs.prerelease-guard.outputs.ready_to_publish == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - name: Download prerelease artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: prerelease-artifacts + path: artifacts + + - name: Create or update GitHub pre-release + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + with: + tag_name: ${{ needs.prerelease-guard.outputs.release_tag }} + prerelease: true + draft: ${{ needs.prerelease-guard.outputs.draft == 'true' }} + generate_release_notes: true + files: | + artifacts/**/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pub-release.yml b/.github/workflows/pub-release.yml new file mode 100644 index 0000000..db0ec27 --- /dev/null +++ b/.github/workflows/pub-release.yml @@ -0,0 +1,645 @@ +name: Pub Release + +on: + push: + tags: ["v*"] + workflow_dispatch: + inputs: + release_ref: + description: "Git ref (branch, tag, or SHA) to build" + required: false + default: "main" + type: string + publish_release: + description: "Publish a GitHub release (false = verification build only)" + required: false + default: false + type: boolean + release_tag: + description: "Existing release tag (required when publish_release=true), e.g. v0.1.1" + required: false + default: "" + type: string + draft: + description: "Create release as draft (manual publish only)" + required: false + default: true + type: boolean + schedule: + # Weekly release-readiness verification on default branch (no publish) + - cron: "17 8 * * 1" + +concurrency: + group: release-${{ github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: write + packages: read + id-token: write # Required for cosign keyless signing via OIDC + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + prepare: + name: Prepare Release Context + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + release_ref: ${{ steps.vars.outputs.release_ref }} + release_tag: ${{ steps.vars.outputs.release_tag }} + publish_release: ${{ steps.vars.outputs.publish_release }} + draft_release: ${{ steps.vars.outputs.draft_release }} + steps: + - name: Resolve release inputs + id: vars + shell: bash + run: | + set -euo pipefail + + event_name="${GITHUB_EVENT_NAME}" + publish_release="false" + draft_release="false" + + if [[ "$event_name" == "push" ]]; then + release_ref="${GITHUB_REF_NAME}" + release_tag="${GITHUB_REF_NAME}" + publish_release="true" + elif [[ "$event_name" == "workflow_dispatch" ]]; then + release_ref="${{ inputs.release_ref }}" + publish_release="${{ inputs.publish_release }}" + draft_release="${{ inputs.draft }}" + + if [[ "$publish_release" == "true" ]]; then + release_tag="${{ inputs.release_tag }}" + if [[ -z "$release_tag" ]]; then + echo "::error::release_tag is required when publish_release=true" + exit 1 + fi + release_ref="$release_tag" + else + release_tag="verify-${GITHUB_SHA::12}" + fi + else + # schedule + release_ref="main" + release_tag="verify-${GITHUB_SHA::12}" + fi + + { + echo "release_ref=${release_ref}" + echo "release_tag=${release_tag}" + echo "publish_release=${publish_release}" + echo "draft_release=${draft_release}" + } >> "$GITHUB_OUTPUT" + + { + echo "### Release Context" + echo "- event: ${event_name}" + echo "- release_ref: ${release_ref}" + echo "- release_tag: ${release_tag}" + echo "- publish_release: ${publish_release}" + echo "- draft_release: ${draft_release}" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Validate release trigger and authorization guard + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/release_trigger_guard.py \ + --repo-root . \ + --repository "${GITHUB_REPOSITORY}" \ + --event-name "${GITHUB_EVENT_NAME}" \ + --actor "${GITHUB_ACTOR}" \ + --release-ref "${{ steps.vars.outputs.release_ref }}" \ + --release-tag "${{ steps.vars.outputs.release_tag }}" \ + --publish-release "${{ steps.vars.outputs.publish_release }}" \ + --authorized-actors "${{ vars.RELEASE_AUTHORIZED_ACTORS || 'willsarg,theonlyhennygod,chumyin' }}" \ + --authorized-tagger-emails "${{ vars.RELEASE_AUTHORIZED_TAGGER_EMAILS || '' }}" \ + --require-annotated-tag true \ + --output-json artifacts/release-trigger-guard.json \ + --output-md artifacts/release-trigger-guard.md \ + --fail-on-violation + + - name: Emit release trigger audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_trigger_guard \ + --input-json artifacts/release-trigger-guard.json \ + --output-json artifacts/audit-event-release-trigger-guard.json \ + --artifact-name release-trigger-guard \ + --retention-days 30 + + - name: Publish release trigger guard summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/release-trigger-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload release trigger guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: release-trigger-guard + path: | + artifacts/release-trigger-guard.json + artifacts/release-trigger-guard.md + artifacts/audit-event-release-trigger-guard.json + if-no-files-found: error + retention-days: 30 + + build-release: + name: Build ${{ matrix.target }} + needs: [prepare] + runs-on: ${{ matrix.os }} + timeout-minutes: 40 + strategy: + fail-fast: false + matrix: + include: + # Keep GNU Linux release artifacts on Ubuntu 22.04 to preserve + # a broadly compatible GLIBC baseline for user distributions. + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-linux-gnu + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-linux-musl + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-unknown-linux-gnu + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: gcc-aarch64-linux-gnu + linker_env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER + linker: aarch64-linux-gnu-gcc + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-unknown-linux-musl + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: armv7-unknown-linux-gnueabihf + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: gcc-arm-linux-gnueabihf + linker_env: CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER + linker: arm-linux-gnueabihf-gcc + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: armv7-linux-androideabi + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + android_ndk: true + android_api: 21 + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-linux-android + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + android_ndk: true + android_api: 21 + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-freebsd + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-apple-darwin + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-apple-darwin + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-pc-windows-msvc + artifact: zeroclaw.exe + archive_ext: zip + cross_compiler: "" + linker_env: "" + linker: "" + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + targets: ${{ matrix.target }} + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + if: runner.os != 'Windows' + + - name: Install cross for cross-built targets + if: matrix.use_cross + run: | + cargo install cross --git https://github.com/cross-rs/cross + + - name: Install cross-compilation toolchain (Linux) + if: runner.os == 'Linux' && matrix.cross_compiler != '' + run: | + sudo apt-get update -qq + sudo apt-get install -y "${{ matrix.cross_compiler }}" + + - name: Setup Android NDK + if: matrix.android_ndk + shell: bash + run: | + set -euo pipefail + NDK_VERSION="r26d" + NDK_ZIP="android-ndk-${NDK_VERSION}-linux.zip" + NDK_URL="https://dl.google.com/android/repository/${NDK_ZIP}" + NDK_ROOT="${RUNNER_TEMP}/android-ndk" + NDK_HOME="${NDK_ROOT}/android-ndk-${NDK_VERSION}" + + sudo apt-get update -qq + sudo apt-get install -y unzip + + mkdir -p "${NDK_ROOT}" + curl -fsSL "${NDK_URL}" -o "${RUNNER_TEMP}/${NDK_ZIP}" + unzip -q "${RUNNER_TEMP}/${NDK_ZIP}" -d "${NDK_ROOT}" + + echo "ANDROID_NDK_HOME=${NDK_HOME}" >> "$GITHUB_ENV" + echo "${NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin" >> "$GITHUB_PATH" + + - name: Configure Android toolchain + if: matrix.android_ndk + shell: bash + run: | + echo "Setting up Android NDK toolchain for ${{ matrix.target }}" + NDK_HOME="${ANDROID_NDK_HOME:-}" + if [[ -z "$NDK_HOME" ]]; then + echo "::error::ANDROID_NDK_HOME was not configured." + exit 1 + fi + TOOLCHAIN="$NDK_HOME/toolchains/llvm/prebuilt/linux-x86_64/bin" + + # Add to path for linker resolution + echo "$TOOLCHAIN" >> "$GITHUB_PATH" + + # Set linker environment variables + if [[ "${{ matrix.target }}" == "armv7-linux-androideabi" ]]; then + ARMV7_CC="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang" + ARMV7_CXX="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang++" + + # Some crates still probe legacy compiler names (arm-linux-androideabi-clang). + ln -sf "$ARMV7_CC" "${TOOLCHAIN}/arm-linux-androideabi-clang" + ln -sf "$ARMV7_CXX" "${TOOLCHAIN}/arm-linux-androideabi-clang++" + + { + echo "CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=${ARMV7_CC}" + echo "CC_armv7_linux_androideabi=${ARMV7_CC}" + echo "CXX_armv7_linux_androideabi=${ARMV7_CXX}" + echo "AR_armv7_linux_androideabi=${TOOLCHAIN}/llvm-ar" + } >> "$GITHUB_ENV" + elif [[ "${{ matrix.target }}" == "aarch64-linux-android" ]]; then + AARCH64_CC="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang" + AARCH64_CXX="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang++" + + { + echo "CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=${AARCH64_CC}" + echo "CC_aarch64_linux_android=${AARCH64_CC}" + echo "CXX_aarch64_linux_android=${AARCH64_CXX}" + echo "AR_aarch64_linux_android=${TOOLCHAIN}/llvm-ar" + } >> "$GITHUB_ENV" + fi + + - name: Build release + shell: bash + env: + LINKER_ENV: ${{ matrix.linker_env }} + LINKER: ${{ matrix.linker }} + USE_CROSS: ${{ matrix.use_cross }} + run: | + if [ -n "$LINKER_ENV" ] && [ -n "$LINKER" ]; then + echo "Using linker override: $LINKER_ENV=$LINKER" + export "$LINKER_ENV=$LINKER" + fi + if [ "$USE_CROSS" = "true" ]; then + echo "Using cross for MUSL target" + cross build --profile release-fast --locked --target ${{ matrix.target }} + else + cargo build --profile release-fast --locked --target ${{ matrix.target }} + fi + + - name: Check binary size (Unix) + if: runner.os != 'Windows' + run: bash scripts/ci/check_binary_size.sh "target/${{ matrix.target }}/release-fast/${{ matrix.artifact }}" "${{ matrix.target }}" + + - name: Package (Unix) + if: runner.os != 'Windows' + run: | + cd target/${{ matrix.target }}/release-fast + tar czf ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }} + + - name: Package (Windows) + if: runner.os == 'Windows' + run: | + cd target/${{ matrix.target }}/release-fast + 7z a ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }} + + - name: Upload artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: zeroclaw-${{ matrix.target }} + path: zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} + retention-days: 7 + + verify-artifacts: + name: Verify Artifact Set + needs: [prepare, build-release] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - name: Download all artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Validate release archive contract (verify stage) + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_artifact_guard.py \ + --artifacts-dir artifacts \ + --contract-file .github/release/release-artifact-contract.json \ + --output-json artifacts/release-artifact-guard.verify.json \ + --output-md artifacts/release-artifact-guard.verify.md \ + --allow-extra-archives \ + --skip-manifest-files \ + --skip-sbom-files \ + --skip-notice-files \ + --fail-on-violation + + - name: Emit verify-stage artifact guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_artifact_guard_verify \ + --input-json artifacts/release-artifact-guard.verify.json \ + --output-json artifacts/audit-event-release-artifact-guard-verify.json \ + --artifact-name release-artifact-guard-verify \ + --retention-days 21 + + - name: Publish verify-stage artifact guard summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/release-artifact-guard.verify.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload verify-stage artifact guard reports + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: release-artifact-guard-verify + path: | + artifacts/release-artifact-guard.verify.json + artifacts/release-artifact-guard.verify.md + artifacts/audit-event-release-artifact-guard-verify.json + if-no-files-found: error + retention-days: 21 + + publish: + name: Publish Release + if: needs.prepare.outputs.publish_release == 'true' + needs: [prepare, verify-artifacts] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - name: Download all artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Install syft + shell: bash + run: | + set -euo pipefail + mkdir -p "${RUNNER_TEMP}/bin" + ./scripts/ci/install_syft.sh "${RUNNER_TEMP}/bin" + echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH" + + - name: Generate SBOM (CycloneDX) + run: | + syft dir:. --source-name zeroclaw -o cyclonedx-json=artifacts/zeroclaw.cdx.json -o spdx-json=artifacts/zeroclaw.spdx.json + { + echo "### SBOM Generated" + echo "- CycloneDX: zeroclaw.cdx.json" + echo "- SPDX: zeroclaw.spdx.json" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Attach license and notice files + run: | + cp LICENSE-APACHE artifacts/LICENSE-APACHE + cp LICENSE-MIT artifacts/LICENSE-MIT + cp NOTICE artifacts/NOTICE + + - name: Generate release manifest + checksums + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/release_manifest.py \ + --artifacts-dir artifacts \ + --release-tag "${RELEASE_TAG}" \ + --output-json artifacts/release-manifest.json \ + --output-md artifacts/release-manifest.md \ + --checksums-path artifacts/SHA256SUMS \ + --fail-empty + + - name: Generate SHA256SUMS provenance statement + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/generate_provenance.py \ + --artifact artifacts/SHA256SUMS \ + --subject-name "zeroclaw-${RELEASE_TAG}-sha256sums" \ + --output artifacts/zeroclaw.sha256sums.intoto.json + + - name: Emit SHA256SUMS provenance audit event + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_sha256sums_provenance \ + --input-json artifacts/zeroclaw.sha256sums.intoto.json \ + --output-json artifacts/audit-event-release-sha256sums-provenance.json \ + --artifact-name release-sha256sums-provenance \ + --retention-days 30 + + - name: Validate release artifact contract (publish stage) + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_artifact_guard.py \ + --artifacts-dir artifacts \ + --contract-file .github/release/release-artifact-contract.json \ + --output-json artifacts/release-artifact-guard.publish.json \ + --output-md artifacts/release-artifact-guard.publish.md \ + --allow-extra-archives \ + --allow-extra-manifest-files \ + --allow-extra-sbom-files \ + --allow-extra-notice-files \ + --fail-on-violation + + - name: Emit publish-stage artifact guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_artifact_guard_publish \ + --input-json artifacts/release-artifact-guard.publish.json \ + --output-json artifacts/audit-event-release-artifact-guard-publish.json \ + --artifact-name release-artifact-guard-publish \ + --retention-days 30 + + - name: Publish artifact guard summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-artifact-guard.publish.md >> "$GITHUB_STEP_SUMMARY" + + - name: Publish release manifest summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-manifest.md >> "$GITHUB_STEP_SUMMARY" + + - name: Install cosign + uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 + + - name: Sign artifacts with cosign (keyless) + shell: bash + run: | + set -euo pipefail + while IFS= read -r -d '' file; do + cosign sign-blob --yes \ + --bundle="${file}.sigstore.json" \ + --output-signature="${file}.sig" \ + --output-certificate="${file}.pem" \ + "$file" + done < <(find artifacts -type f ! -name '*.sig' ! -name '*.pem' ! -name '*.sigstore.json' -print0) + + - name: Compose release-notes supply-chain references + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/release_notes_with_supply_chain_refs.py \ + --artifacts-dir artifacts \ + --repository "${GITHUB_REPOSITORY}" \ + --release-tag "${RELEASE_TAG}" \ + --output-json artifacts/release-notes-supply-chain.json \ + --output-md artifacts/release-notes-supply-chain.md \ + --fail-on-missing + + - name: Publish release-notes supply-chain summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-notes-supply-chain.md >> "$GITHUB_STEP_SUMMARY" + + - name: Verify GHCR release tag availability + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + repo="${GITHUB_REPOSITORY,,}" + manifest_url="https://ghcr.io/v2/${repo}/manifests/${RELEASE_TAG}" + accept_header="application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.v2+json" + max_attempts=75 + sleep_seconds=20 + + for attempt in $(seq 1 "$max_attempts"); do + token_resp="$(curl -sS "https://ghcr.io/token?scope=repository:${repo}:pull" || true)" + token="$(echo "$token_resp" | sed -n 's/.*"token":"\([^"]*\)".*/\1/p')" + + if [ -z "$token" ]; then + code="000" + else + code="$(curl -sS -o /tmp/ghcr-release-manifest.json -w "%{http_code}" \ + -H "Authorization: Bearer ${token}" \ + -H "Accept: ${accept_header}" \ + "${manifest_url}" || true)" + fi + + if [ "$code" = "200" ]; then + echo "GHCR release tag is available: ${repo}:${RELEASE_TAG}" + exit 0 + fi + + if [ "$attempt" -lt "$max_attempts" ]; then + echo "Waiting for GHCR tag ${repo}:${RELEASE_TAG} (attempt ${attempt}/${max_attempts}, HTTP ${code})..." + sleep "$sleep_seconds" + fi + done + + echo "::error::GHCR tag ${repo}:${RELEASE_TAG} was not available before release publish timeout." + cat /tmp/ghcr-release-manifest.json || true + exit 1 + + - name: Create GitHub Release + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + with: + tag_name: ${{ needs.prepare.outputs.release_tag }} + draft: ${{ needs.prepare.outputs.draft_release == 'true' }} + body_path: artifacts/release-notes-supply-chain.md + generate_release_notes: true + files: | + artifacts/**/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/scripts/ci_human_review_guard.js b/.github/workflows/scripts/ci_human_review_guard.js new file mode 100644 index 0000000..b13923b --- /dev/null +++ b/.github/workflows/scripts/ci_human_review_guard.js @@ -0,0 +1,61 @@ +// Enforce at least one human approval on pull requests. +// Used by .github/workflows/ci-run.yml via actions/github-script. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const botAllowlist = new Set( + (process.env.HUMAN_REVIEW_BOT_LOGINS || "github-actions[bot],dependabot[bot],coderabbitai[bot]") + .split(",") + .map((value) => value.trim().toLowerCase()) + .filter(Boolean), + ); + + const isBotAccount = (login, accountType) => { + if (!login) return false; + if ((accountType || "").toLowerCase() === "bot") return true; + if (login.endsWith("[bot]")) return true; + return botAllowlist.has(login); + }; + + const reviews = await github.paginate(github.rest.pulls.listReviews, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const latestReviewByUser = new Map(); + const decisiveStates = new Set(["APPROVED", "CHANGES_REQUESTED", "DISMISSED"]); + for (const review of reviews) { + const login = review.user?.login?.toLowerCase(); + if (!login) continue; + if (!decisiveStates.has(review.state)) continue; + latestReviewByUser.set(login, { + state: review.state, + type: review.user?.type || "", + }); + } + + const humanApprovers = []; + for (const [login, review] of latestReviewByUser.entries()) { + if (review.state !== "APPROVED") continue; + if (isBotAccount(login, review.type)) continue; + humanApprovers.push(login); + } + + if (humanApprovers.length === 0) { + core.setFailed( + "No human approving review found. At least one non-bot approval is required before merge.", + ); + return; + } + + core.info(`Human approval check passed. Approver(s): ${humanApprovers.join(", ")}`); +}; diff --git a/.github/workflows/scripts/ci_license_file_owner_guard.js b/.github/workflows/scripts/ci_license_file_owner_guard.js new file mode 100644 index 0000000..ee0befa --- /dev/null +++ b/.github/workflows/scripts/ci_license_file_owner_guard.js @@ -0,0 +1,54 @@ +// Enforce ownership rules for root license files in PRs. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || ""; + + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const ownerAllowlist = ["willsarg"]; + + if (ownerAllowlist.length === 0) { + core.setFailed("License owner allowlist is empty."); + return; + } + + const protectedFiles = new Set(["LICENSE-APACHE", "LICENSE-MIT"]); + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const changedProtectedFiles = files + .map((file) => file.filename) + .filter((name) => protectedFiles.has(name)); + + if (changedProtectedFiles.length === 0) { + core.info("No protected root license files changed in this PR."); + return; + } + + core.info(`Protected license files changed:\n- ${changedProtectedFiles.join("\n- ")}`); + core.info(`Allowed license file editors: ${ownerAllowlist.join(", ")}`); + + if (!prAuthor) { + core.setFailed("Unable to resolve PR author login."); + return; + } + + if (!ownerAllowlist.includes(prAuthor)) { + core.setFailed( + `Root license files (${changedProtectedFiles.join(", ")}) can only be changed by ${ownerAllowlist.join(", ")}. PR author is @${prAuthor}.`, + ); + return; + } + + core.info(`License file edit authorized for PR author: @${prAuthor}`); +}; diff --git a/.github/workflows/scripts/ci_workflow_owner_approval.js b/.github/workflows/scripts/ci_workflow_owner_approval.js new file mode 100644 index 0000000..2f3bf29 --- /dev/null +++ b/.github/workflows/scripts/ci_workflow_owner_approval.js @@ -0,0 +1,83 @@ +// Extracted from ci-run.yml step: Require owner approval for workflow file changes + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || ""; + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const baseOwners = ["theonlyhennygod", "willsarg", "chumyin"]; + const configuredOwners = (process.env.WORKFLOW_OWNER_LOGINS || "") + .split(",") + .map((login) => login.trim().toLowerCase()) + .filter(Boolean); + const ownerAllowlist = [...new Set([...baseOwners, ...configuredOwners])]; + + if (ownerAllowlist.length === 0) { + core.setFailed("Workflow owner allowlist is empty."); + return; + } + + core.info(`Workflow owner allowlist: ${ownerAllowlist.join(", ")}`); + + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const workflowFiles = files + .map((file) => file.filename) + .filter((name) => name.startsWith(".github/workflows/")); + + if (workflowFiles.length === 0) { + core.info("No workflow files changed in this PR."); + return; + } + + core.info(`Workflow files changed:\n- ${workflowFiles.join("\n- ")}`); + + if (prAuthor && ownerAllowlist.includes(prAuthor)) { + core.info(`Workflow PR authored by allowlisted owner: @${prAuthor}`); + return; + } + + const reviews = await github.paginate(github.rest.pulls.listReviews, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const latestReviewByUser = new Map(); + for (const review of reviews) { + const login = review.user?.login; + if (!login) continue; + latestReviewByUser.set(login.toLowerCase(), review.state); + } + + const approvedUsers = [...latestReviewByUser.entries()] + .filter(([, state]) => state === "APPROVED") + .map(([login]) => login); + + if (approvedUsers.length === 0) { + core.setFailed("Workflow files changed but no approving review is present."); + return; + } + + const ownerApprover = approvedUsers.find((login) => ownerAllowlist.includes(login)); + if (!ownerApprover) { + core.setFailed( + `Workflow files changed. Approvals found (${approvedUsers.join(", ")}), but none match workflow owner allowlist.`, + ); + return; + } + + core.info(`Workflow owner approval present: @${ownerApprover}`); + +}; diff --git a/.github/workflows/scripts/lint_feedback.js b/.github/workflows/scripts/lint_feedback.js new file mode 100644 index 0000000..8b90161 --- /dev/null +++ b/.github/workflows/scripts/lint_feedback.js @@ -0,0 +1,90 @@ +// Post actionable lint failure summary as a PR comment. +// Used by the lint-feedback CI job via actions/github-script. +// +// Required environment variables: +// RUST_CHANGED — "true" if Rust files changed +// DOCS_CHANGED — "true" if docs files changed +// LINT_RESULT — result of the lint job +// LINT_DELTA_RESULT — result of the strict delta lint job +// DOCS_RESULT — result of the docs-quality job + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const issueNumber = context.payload.pull_request?.number; + if (!issueNumber) return; + + const marker = ""; + const rustChanged = process.env.RUST_CHANGED === "true"; + const docsChanged = process.env.DOCS_CHANGED === "true"; + const lintResult = process.env.LINT_RESULT || "skipped"; + const lintDeltaResult = process.env.LINT_DELTA_RESULT || "skipped"; + const docsResult = process.env.DOCS_RESULT || "skipped"; + + const failures = []; + if (rustChanged && !["success", "skipped"].includes(lintResult)) { + failures.push("`Lint Gate (Format + Clippy)` failed."); + } + if (rustChanged && !["success", "skipped"].includes(lintDeltaResult)) { + failures.push("`Lint Gate (Strict Delta)` failed."); + } + if (docsChanged && !["success", "skipped"].includes(docsResult)) { + failures.push("`Docs Quality` failed."); + } + + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); + const existing = comments.find((comment) => (comment.body || "").includes(marker)); + + if (failures.length === 0) { + if (existing) { + await github.rest.issues.deleteComment({ + owner, + repo, + comment_id: existing.id, + }); + } + core.info("No lint/docs gate failures. No feedback comment required."); + return; + } + + const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`; + const body = [ + marker, + "### CI lint feedback", + "", + "This PR failed one or more fast lint/documentation gates:", + "", + ...failures.map((item) => `- ${item}`), + "", + "Open the failing logs in this run:", + `- ${runUrl}`, + "", + "Local fix commands:", + "- `./scripts/ci/rust_quality_gate.sh`", + "- `./scripts/ci/rust_strict_delta_gate.sh`", + "- `./scripts/ci/docs_quality_gate.sh`", + "", + "After fixes, push a new commit and CI will re-run automatically.", + ].join("\n"); + + if (existing) { + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body, + }); + } +}; diff --git a/.github/workflows/scripts/pr_auto_response_contributor_tier.js b/.github/workflows/scripts/pr_auto_response_contributor_tier.js new file mode 100644 index 0000000..76dc0fb --- /dev/null +++ b/.github/workflows/scripts/pr_auto_response_contributor_tier.js @@ -0,0 +1,132 @@ +// Extracted from pr-auto-response.yml step: Apply contributor tier label for issue author + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const issue = context.payload.issue; + const pullRequest = context.payload.pull_request; + const target = issue ?? pullRequest; + async function loadContributorTierPolicy() { + const policyPath = process.env.LABEL_POLICY_PATH || ".github/label-policy.json"; + const fallback = { + contributorTierColor: "2ED9FF", + contributorTierRules: [ + { label: "distinguished contributor", minMergedPRs: 50 }, + { label: "principal contributor", minMergedPRs: 20 }, + { label: "experienced contributor", minMergedPRs: 10 }, + { label: "trusted contributor", minMergedPRs: 5 }, + ], + }; + try { + const { data } = await github.rest.repos.getContent({ + owner, + repo, + path: policyPath, + ref: context.payload.repository?.default_branch || "main", + }); + const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8")); + const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({ + label: String(entry.label || "").trim(), + minMergedPRs: Number(entry.min_merged_prs || 0), + })); + const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase(); + if (!contributorTierColor || contributorTierRules.length === 0) { + return fallback; + } + return { contributorTierColor, contributorTierRules }; + } catch (error) { + core.warning(`failed to load ${policyPath}, using fallback policy: ${error.message}`); + return fallback; + } + } + + const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy(); + const contributorTierLabels = contributorTierRules.map((rule) => rule.label); + const managedContributorLabels = new Set(contributorTierLabels); + const action = context.payload.action; + const changedLabel = context.payload.label?.name; + + if (!target) return; + if ((action === "labeled" || action === "unlabeled") && !managedContributorLabels.has(changedLabel)) { + return; + } + + const author = target.user; + if (!author || author.type === "Bot") return; + + function contributorTierDescription(rule) { + return `Contributor with ${rule.minMergedPRs}+ merged PRs.`; + } + + async function ensureContributorTierLabels() { + for (const rule of contributorTierRules) { + const label = rule.label; + const expectedDescription = contributorTierDescription(rule); + try { + const { data: existing } = await github.rest.issues.getLabel({ owner, repo, name: label }); + const currentColor = (existing.color || "").toUpperCase(); + const currentDescription = (existing.description || "").trim(); + if (currentColor !== contributorTierColor || currentDescription !== expectedDescription) { + await github.rest.issues.updateLabel({ + owner, + repo, + name: label, + new_name: label, + color: contributorTierColor, + description: expectedDescription, + }); + } + } catch (error) { + if (error.status !== 404) throw error; + await github.rest.issues.createLabel({ + owner, + repo, + name: label, + color: contributorTierColor, + description: expectedDescription, + }); + } + } + } + + function selectContributorTier(mergedCount) { + const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs); + return matchedTier ? matchedTier.label : null; + } + + let contributorTierLabel = null; + try { + const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({ + q: `repo:${owner}/${repo} is:pr is:merged author:${author.login}`, + per_page: 1, + }); + const mergedCount = mergedSearch.total_count || 0; + contributorTierLabel = selectContributorTier(mergedCount); + } catch (error) { + core.warning(`failed to evaluate contributor tier status: ${error.message}`); + return; + } + + await ensureContributorTierLabels(); + + const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({ + owner, + repo, + issue_number: target.number, + }); + const keepLabels = currentLabels + .map((label) => label.name) + .filter((label) => !contributorTierLabels.includes(label)); + + if (contributorTierLabel) { + keepLabels.push(contributorTierLabel); + } + + await github.rest.issues.setLabels({ + owner, + repo, + issue_number: target.number, + labels: [...new Set(keepLabels)], + }); + +}; diff --git a/.github/workflows/scripts/pr_auto_response_labeled_routes.js b/.github/workflows/scripts/pr_auto_response_labeled_routes.js new file mode 100644 index 0000000..eb5410e --- /dev/null +++ b/.github/workflows/scripts/pr_auto_response_labeled_routes.js @@ -0,0 +1,94 @@ +// Extracted from pr-auto-response.yml step: Handle label-driven responses + +module.exports = async ({ github, context, core }) => { + const label = context.payload.label?.name; + if (!label) return; + + const issue = context.payload.issue; + const pullRequest = context.payload.pull_request; + const target = issue ?? pullRequest; + if (!target) return; + + const isIssue = Boolean(issue); + const issueNumber = target.number; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const rules = [ + { + label: "r:support", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "This looks like a usage/support request. Please use README + docs first, then open a focused bug with repro details if behavior is incorrect.", + }, + { + label: "r:needs-repro", + close: false, + message: + "Thanks for the report. Please add deterministic repro steps, exact environment, and redacted logs so maintainers can triage quickly.", + }, + { + label: "invalid", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "Closing as invalid based on current information. If this is still relevant, open a new issue with updated evidence and reproducible steps.", + }, + { + label: "duplicate", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "Closing as duplicate. Please continue discussion in the canonical linked issue/PR.", + }, + ]; + + const rule = rules.find((entry) => entry.label === label); + if (!rule) return; + + const marker = ``; + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); + + const alreadyCommented = comments.some((comment) => + (comment.body || "").includes(marker) + ); + + if (!alreadyCommented) { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: `${rule.message}\n\n${marker}`, + }); + } + + if (!rule.close) return; + if (rule.closeIssuesOnly && !isIssue) return; + if (target.state === "closed") return; + + if (isIssue) { + await github.rest.issues.update({ + owner, + repo, + issue_number: issueNumber, + state: "closed", + state_reason: rule.closeReason || "not_planned", + }); + } else { + await github.rest.issues.update({ + owner, + repo, + issue_number: issueNumber, + state: "closed", + }); + } +}; diff --git a/.github/workflows/scripts/pr_check_status_nudge.js b/.github/workflows/scripts/pr_check_status_nudge.js new file mode 100644 index 0000000..1d81215 --- /dev/null +++ b/.github/workflows/scripts/pr_check_status_nudge.js @@ -0,0 +1,161 @@ +// Extracted from pr-check-status.yml step: Nudge PRs that need rebase or CI refresh + +module.exports = async ({ github, context, core }) => { + const staleHours = Number(process.env.STALE_HOURS || "48"); + const ignoreLabels = new Set(["no-stale", "stale", "maintainer", "no-pr-hygiene"]); + const marker = ""; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const openPrs = await github.paginate(github.rest.pulls.list, { + owner, + repo, + state: "open", + per_page: 100, + }); + + const activePrs = openPrs.filter((pr) => { + if (pr.draft) { + return false; + } + + const labels = new Set((pr.labels || []).map((label) => label.name)); + return ![...ignoreLabels].some((label) => labels.has(label)); + }); + + core.info(`Scanning ${activePrs.length} open PR(s) for hygiene nudges.`); + + let nudged = 0; + let skipped = 0; + + for (const pr of activePrs) { + const { data: headCommit } = await github.rest.repos.getCommit({ + owner, + repo, + ref: pr.head.sha, + }); + + const headCommitAt = + headCommit.commit?.committer?.date || headCommit.commit?.author?.date; + if (!headCommitAt) { + skipped += 1; + core.info(`#${pr.number}: missing head commit timestamp, skipping.`); + continue; + } + + const ageHours = (Date.now() - new Date(headCommitAt).getTime()) / 3600000; + if (ageHours < staleHours) { + skipped += 1; + continue; + } + + const { data: prDetail } = await github.rest.pulls.get({ + owner, + repo, + pull_number: pr.number, + }); + + const isBehindBase = prDetail.mergeable_state === "behind"; + + const { data: checkRunsData } = await github.rest.checks.listForRef({ + owner, + repo, + ref: pr.head.sha, + per_page: 100, + }); + + const ciGateRuns = (checkRunsData.check_runs || []) + .filter((run) => run.name === "CI Required Gate") + .sort((a, b) => { + const aTime = new Date(a.started_at || a.completed_at || a.created_at).getTime(); + const bTime = new Date(b.started_at || b.completed_at || b.created_at).getTime(); + return bTime - aTime; + }); + + let ciState = "missing"; + if (ciGateRuns.length > 0) { + const latest = ciGateRuns[0]; + if (latest.status !== "completed") { + ciState = "in_progress"; + } else if (["success", "neutral", "skipped"].includes(latest.conclusion || "")) { + ciState = "success"; + } else { + ciState = String(latest.conclusion || "failure"); + } + } + + const ciMissing = ciState === "missing"; + const ciFailing = !["success", "in_progress", "missing"].includes(ciState); + + if (!isBehindBase && !ciMissing && !ciFailing) { + skipped += 1; + continue; + } + + const reasons = []; + if (isBehindBase) { + reasons.push("- Branch is behind `main` (please rebase or merge the latest base branch)."); + } + if (ciMissing) { + reasons.push("- No `CI Required Gate` run was found for the current head commit."); + } + if (ciFailing) { + reasons.push(`- Latest \`CI Required Gate\` result is \`${ciState}\`.`); + } + + const shortSha = pr.head.sha.slice(0, 12); + const body = [ + marker, + `Hi @${pr.user.login}, friendly automation nudge from PR hygiene.`, + "", + `This PR has had no new commits for **${Math.floor(ageHours)}h** and still needs an update before merge:`, + "", + ...reasons, + "", + "### Recommended next steps", + "1. Rebase your branch on `main`.", + "2. Push the updated branch and re-run checks (or use **Re-run failed jobs**).", + "3. Post fresh validation output in this PR thread.", + "", + "Maintainers: apply `no-stale` to opt out for accepted-but-blocked work.", + `Head SHA: \`${shortSha}\``, + ].join("\n"); + + const { data: comments } = await github.rest.issues.listComments({ + owner, + repo, + issue_number: pr.number, + per_page: 100, + }); + + const existing = comments.find( + (comment) => comment.user?.type === "Bot" && comment.body?.includes(marker), + ); + + if (existing) { + if (existing.body === body) { + skipped += 1; + continue; + } + + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body, + }); + } + + nudged += 1; + core.info(`#${pr.number}: hygiene nudge posted/updated.`); + } + + core.info(`Done. Nudged=${nudged}, skipped=${skipped}`); +}; diff --git a/.github/workflows/scripts/pr_intake_checks.js b/.github/workflows/scripts/pr_intake_checks.js new file mode 100644 index 0000000..33d188f --- /dev/null +++ b/.github/workflows/scripts/pr_intake_checks.js @@ -0,0 +1,202 @@ +// Run safe intake checks for PR events and maintain a single sticky comment. +// Used by .github/workflows/pr-intake-checks.yml via actions/github-script. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const pr = context.payload.pull_request; + if (!pr) return; + + const marker = ""; + const legacyMarker = ""; + const requiredSections = [ + "## Summary", + "## Validation Evidence (required)", + "## Security Impact (required)", + "## Privacy and Data Hygiene (required)", + "## Rollback Plan (required)", + ]; + const body = pr.body || ""; + const linearKeyRegex = /\b(?:RMN|CDV|COM)-\d+\b/g; + const linearKeys = Array.from( + new Set([...(pr.title.match(linearKeyRegex) || []), ...(body.match(linearKeyRegex) || [])]), + ); + + const missingSections = requiredSections.filter((section) => !body.includes(section)); + const missingFields = []; + const requiredFieldChecks = [ + ["summary problem", /- Problem:\s*\S+/m], + ["summary why it matters", /- Why it matters:\s*\S+/m], + ["summary what changed", /- What changed:\s*\S+/m], + ["validation commands", /Commands and result summary:\s*[\s\S]*```/m], + ["security risk/mitigation", /- New permissions\/capabilities\?\s*\(`Yes\/No`\):\s*\S+/m], + ["privacy status", /- Data-hygiene status\s*\(`pass\|needs-follow-up`\):\s*\S+/m], + ["rollback plan", /- Fast rollback command\/path:\s*\S+/m], + ]; + for (const [name, pattern] of requiredFieldChecks) { + if (!pattern.test(body)) { + missingFields.push(name); + } + } + + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: pr.number, + per_page: 100, + }); + + const formatWarnings = []; + const dangerousProblems = []; + for (const file of files) { + const patch = file.patch || ""; + if (!patch) continue; + const lines = patch.split("\n"); + for (let idx = 0; idx < lines.length; idx += 1) { + const line = lines[idx]; + if (!line.startsWith("+") || line.startsWith("+++")) continue; + const added = line.slice(1); + const lineNo = idx + 1; + if (/\t/.test(added)) { + formatWarnings.push(`${file.filename}:patch#${lineNo} contains tab characters`); + } + if (/[ \t]+$/.test(added)) { + formatWarnings.push(`${file.filename}:patch#${lineNo} contains trailing whitespace`); + } + if (/^(<<<<<<<|=======|>>>>>>>)/.test(added)) { + dangerousProblems.push(`${file.filename}:patch#${lineNo} contains merge conflict markers`); + } + } + } + + const workflowFilesChanged = files + .map((file) => file.filename) + .filter((name) => name.startsWith(".github/workflows/")); + + const advisoryFindings = []; + const blockingFindings = []; + if (missingSections.length > 0) { + advisoryFindings.push(`Missing required PR template sections: ${missingSections.join(", ")}`); + } + if (missingFields.length > 0) { + advisoryFindings.push(`Incomplete required PR template fields: ${missingFields.join(", ")}`); + } + if (formatWarnings.length > 0) { + advisoryFindings.push(`Formatting issues in added lines (${formatWarnings.length})`); + } + if (dangerousProblems.length > 0) { + blockingFindings.push(`Dangerous patch markers found (${dangerousProblems.length})`); + } + if (linearKeys.length === 0) { + advisoryFindings.push( + "Missing Linear issue key reference (`RMN-`, `CDV-`, or `COM-`) in PR title/body (recommended for traceability, non-blocking).", + ); + } + + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: pr.number, + per_page: 100, + }); + const existing = comments.find((comment) => { + const body = comment.body || ""; + return body.includes(marker) || body.includes(legacyMarker); + }); + + if (advisoryFindings.length === 0 && blockingFindings.length === 0) { + if (existing) { + await github.rest.issues.deleteComment({ + owner, + repo, + comment_id: existing.id, + }); + } + core.info("PR intake sanity checks passed."); + return; + } + + const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`; + const advisoryDetails = []; + if (formatWarnings.length > 0) { + advisoryDetails.push(...formatWarnings.slice(0, 20).map((entry) => `- ${entry}`)); + if (formatWarnings.length > 20) { + advisoryDetails.push(`- ...and ${formatWarnings.length - 20} more issue(s)`); + } + } + const blockingDetails = []; + if (dangerousProblems.length > 0) { + blockingDetails.push(...dangerousProblems.slice(0, 20).map((entry) => `- ${entry}`)); + if (dangerousProblems.length > 20) { + blockingDetails.push(`- ...and ${dangerousProblems.length - 20} more issue(s)`); + } + } + + const isBlocking = blockingFindings.length > 0; + + const ownerApprovalNote = workflowFilesChanged.length > 0 + ? [ + "", + "Workflow files changed in this PR:", + ...workflowFilesChanged.map((name) => `- \`${name}\``), + "", + "Reminder: workflow changes require owner approval via `CI Required Gate`.", + ].join("\n") + : ""; + + const commentBody = [ + marker, + isBlocking + ? "### PR intake checks failed (blocking)" + : "### PR intake checks found warnings (non-blocking)", + "", + isBlocking + ? "Fast safe checks found blocking safety issues:" + : "Fast safe checks found advisory issues. CI lint/test/build gates still enforce merge quality.", + ...(blockingFindings.length > 0 ? blockingFindings.map((entry) => `- ${entry}`) : []), + ...(advisoryFindings.length > 0 ? advisoryFindings.map((entry) => `- ${entry}`) : []), + "", + "Action items:", + "1. Complete required PR template sections/fields.", + "2. (Recommended) Link this PR to one active Linear issue key (`RMN-xxx`/`CDV-xxx`/`COM-xxx`) for traceability.", + "3. Remove tabs, trailing whitespace, and merge conflict markers from added lines.", + "4. Re-run local checks before pushing:", + " - `./scripts/ci/rust_quality_gate.sh`", + " - `./scripts/ci/rust_strict_delta_gate.sh`", + " - `./scripts/ci/docs_quality_gate.sh`", + "", + `Detected Linear keys: ${linearKeys.length > 0 ? linearKeys.join(", ") : "none"}`, + "", + `Run logs: ${runUrl}`, + "", + "Detected blocking line issues (sample):", + ...(blockingDetails.length > 0 ? blockingDetails : ["- none"]), + "", + "Detected advisory line issues (sample):", + ...(advisoryDetails.length > 0 ? advisoryDetails : ["- none"]), + ownerApprovalNote, + ].join("\n"); + + if (existing) { + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body: commentBody, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body: commentBody, + }); + } + + if (isBlocking) { + core.setFailed("PR intake sanity checks found blocking issues. See sticky comment for details."); + return; + } + + core.info("PR intake sanity checks found advisory issues only."); +}; diff --git a/.github/workflows/scripts/pr_labeler.js b/.github/workflows/scripts/pr_labeler.js new file mode 100644 index 0000000..7232606 --- /dev/null +++ b/.github/workflows/scripts/pr_labeler.js @@ -0,0 +1,805 @@ +// Apply managed PR labels (size/risk/path/module/contributor tiers). +// Extracted from pr-labeler workflow inline github-script for maintainability. + +module.exports = async ({ github, context, core }) => { +const pr = context.payload.pull_request; +const owner = context.repo.owner; +const repo = context.repo.repo; +const action = context.payload.action; +const changedLabel = context.payload.label?.name; + +const sizeLabels = ["size: XS", "size: S", "size: M", "size: L", "size: XL"]; +const computedRiskLabels = ["risk: low", "risk: medium", "risk: high"]; +const manualRiskOverrideLabel = "risk: manual"; +const managedEnforcedLabels = new Set([ + ...sizeLabels, + manualRiskOverrideLabel, + ...computedRiskLabels, +]); +if ((action === "labeled" || action === "unlabeled") && !managedEnforcedLabels.has(changedLabel)) { + core.info(`skip non-size/risk label event: ${changedLabel || "unknown"}`); + return; +} + +async function loadContributorTierPolicy() { + const policyPath = process.env.LABEL_POLICY_PATH || ".github/label-policy.json"; + const fallback = { + contributorTierColor: "2ED9FF", + contributorTierRules: [ + { label: "distinguished contributor", minMergedPRs: 50 }, + { label: "principal contributor", minMergedPRs: 20 }, + { label: "experienced contributor", minMergedPRs: 10 }, + { label: "trusted contributor", minMergedPRs: 5 }, + ], + }; + try { + const { data } = await github.rest.repos.getContent({ + owner, + repo, + path: policyPath, + ref: context.payload.repository?.default_branch || "main", + }); + const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8")); + const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({ + label: String(entry.label || "").trim(), + minMergedPRs: Number(entry.min_merged_prs || 0), + })); + const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase(); + if (!contributorTierColor || contributorTierRules.length === 0) { + return fallback; + } + return { contributorTierColor, contributorTierRules }; + } catch (error) { + core.warning(`failed to load ${policyPath}, using fallback policy: ${error.message}`); + return fallback; + } +} + +const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy(); +const contributorTierLabels = contributorTierRules.map((rule) => rule.label); + +const managedPathLabels = [ + "docs", + "dependencies", + "ci", + "core", + "agent", + "channel", + "config", + "cron", + "daemon", + "doctor", + "gateway", + "health", + "heartbeat", + "integration", + "memory", + "observability", + "onboard", + "provider", + "runtime", + "security", + "service", + "skillforge", + "skills", + "tool", + "tunnel", + "tests", + "scripts", + "dev", +]; +const managedPathLabelSet = new Set(managedPathLabels); + +const moduleNamespaceRules = [ + { root: "src/agent/", prefix: "agent", coreEntries: new Set(["mod.rs"]) }, + { root: "src/channels/", prefix: "channel", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/config/", prefix: "config", coreEntries: new Set(["mod.rs", "schema.rs"]) }, + { root: "src/cron/", prefix: "cron", coreEntries: new Set(["mod.rs"]) }, + { root: "src/daemon/", prefix: "daemon", coreEntries: new Set(["mod.rs"]) }, + { root: "src/doctor/", prefix: "doctor", coreEntries: new Set(["mod.rs"]) }, + { root: "src/gateway/", prefix: "gateway", coreEntries: new Set(["mod.rs"]) }, + { root: "src/health/", prefix: "health", coreEntries: new Set(["mod.rs"]) }, + { root: "src/heartbeat/", prefix: "heartbeat", coreEntries: new Set(["mod.rs"]) }, + { root: "src/integrations/", prefix: "integration", coreEntries: new Set(["mod.rs", "registry.rs"]) }, + { root: "src/memory/", prefix: "memory", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/observability/", prefix: "observability", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/onboard/", prefix: "onboard", coreEntries: new Set(["mod.rs"]) }, + { root: "src/providers/", prefix: "provider", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/runtime/", prefix: "runtime", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/security/", prefix: "security", coreEntries: new Set(["mod.rs"]) }, + { root: "src/service/", prefix: "service", coreEntries: new Set(["mod.rs"]) }, + { root: "src/skillforge/", prefix: "skillforge", coreEntries: new Set(["mod.rs"]) }, + { root: "src/skills/", prefix: "skills", coreEntries: new Set(["mod.rs"]) }, + { root: "src/tools/", prefix: "tool", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/tunnel/", prefix: "tunnel", coreEntries: new Set(["mod.rs"]) }, +]; +const managedModulePrefixes = [...new Set(moduleNamespaceRules.map((rule) => `${rule.prefix}:`))]; +const orderedOtherLabelStyles = [ + { label: "health", color: "8EC9B8" }, + { label: "tool", color: "7FC4B6" }, + { label: "agent", color: "86C4A2" }, + { label: "memory", color: "8FCB99" }, + { label: "channel", color: "7EB6F2" }, + { label: "service", color: "95C7B6" }, + { label: "integration", color: "8DC9AE" }, + { label: "tunnel", color: "9FC8B3" }, + { label: "config", color: "AABCD0" }, + { label: "observability", color: "84C9D0" }, + { label: "docs", color: "8FBBE0" }, + { label: "dev", color: "B9C1CC" }, + { label: "tests", color: "9DC8C7" }, + { label: "skills", color: "BFC89B" }, + { label: "skillforge", color: "C9C39B" }, + { label: "provider", color: "958DF0" }, + { label: "runtime", color: "A3ADD8" }, + { label: "heartbeat", color: "C0C88D" }, + { label: "daemon", color: "C8C498" }, + { label: "doctor", color: "C1CF9D" }, + { label: "onboard", color: "D2BF86" }, + { label: "cron", color: "D2B490" }, + { label: "ci", color: "AEB4CE" }, + { label: "dependencies", color: "9FB1DE" }, + { label: "gateway", color: "B5A8E5" }, + { label: "security", color: "E58D85" }, + { label: "core", color: "C8A99B" }, + { label: "scripts", color: "C9B49F" }, +]; +const otherLabelDisplayOrder = orderedOtherLabelStyles.map((entry) => entry.label); +const modulePrefixSet = new Set(moduleNamespaceRules.map((rule) => rule.prefix)); +const modulePrefixPriority = otherLabelDisplayOrder.filter((label) => modulePrefixSet.has(label)); +const pathLabelPriority = [...otherLabelDisplayOrder]; +const riskDisplayOrder = ["risk: high", "risk: medium", "risk: low", "risk: manual"]; +const sizeDisplayOrder = ["size: XS", "size: S", "size: M", "size: L", "size: XL"]; +const contributorDisplayOrder = [ + "distinguished contributor", + "principal contributor", + "experienced contributor", + "trusted contributor", +]; +const modulePrefixPriorityIndex = new Map( + modulePrefixPriority.map((prefix, index) => [prefix, index]) +); +const pathLabelPriorityIndex = new Map( + pathLabelPriority.map((label, index) => [label, index]) +); +const riskPriorityIndex = new Map( + riskDisplayOrder.map((label, index) => [label, index]) +); +const sizePriorityIndex = new Map( + sizeDisplayOrder.map((label, index) => [label, index]) +); +const contributorPriorityIndex = new Map( + contributorDisplayOrder.map((label, index) => [label, index]) +); + +const otherLabelColors = Object.fromEntries( + orderedOtherLabelStyles.map((entry) => [entry.label, entry.color]) +); +const staticLabelColors = { + "size: XS": "E7CDD3", + "size: S": "E1BEC7", + "size: M": "DBB0BB", + "size: L": "D4A2AF", + "size: XL": "CE94A4", + "risk: low": "97D3A6", + "risk: medium": "E4C47B", + "risk: high": "E98E88", + "risk: manual": "B7A4E0", + ...otherLabelColors, +}; +const staticLabelDescriptions = { + "size: XS": "Auto size: <=80 non-doc changed lines.", + "size: S": "Auto size: 81-250 non-doc changed lines.", + "size: M": "Auto size: 251-500 non-doc changed lines.", + "size: L": "Auto size: 501-1000 non-doc changed lines.", + "size: XL": "Auto size: >1000 non-doc changed lines.", + "risk: low": "Auto risk: docs/chore-only paths.", + "risk: medium": "Auto risk: src/** or dependency/config changes.", + "risk: high": "Auto risk: security/runtime/gateway/tools/workflows.", + "risk: manual": "Maintainer override: keep selected risk label.", + docs: "Auto scope: docs/markdown/template files changed.", + dependencies: "Auto scope: dependency manifest/lock/policy changed.", + ci: "Auto scope: CI/workflow/hook files changed.", + core: "Auto scope: root src/*.rs files changed.", + agent: "Auto scope: src/agent/** changed.", + channel: "Auto scope: src/channels/** changed.", + config: "Auto scope: src/config/** changed.", + cron: "Auto scope: src/cron/** changed.", + daemon: "Auto scope: src/daemon/** changed.", + doctor: "Auto scope: src/doctor/** changed.", + gateway: "Auto scope: src/gateway/** changed.", + health: "Auto scope: src/health/** changed.", + heartbeat: "Auto scope: src/heartbeat/** changed.", + integration: "Auto scope: src/integrations/** changed.", + memory: "Auto scope: src/memory/** changed.", + observability: "Auto scope: src/observability/** changed.", + onboard: "Auto scope: src/onboard/** changed.", + provider: "Auto scope: src/providers/** changed.", + runtime: "Auto scope: src/runtime/** changed.", + security: "Auto scope: src/security/** changed.", + service: "Auto scope: src/service/** changed.", + skillforge: "Auto scope: src/skillforge/** changed.", + skills: "Auto scope: src/skills/** changed.", + tool: "Auto scope: src/tools/** changed.", + tunnel: "Auto scope: src/tunnel/** changed.", + tests: "Auto scope: tests/** changed.", + scripts: "Auto scope: scripts/** changed.", + dev: "Auto scope: dev/** changed.", +}; +for (const label of contributorTierLabels) { + staticLabelColors[label] = contributorTierColor; + const rule = contributorTierRules.find((entry) => entry.label === label); + if (rule) { + staticLabelDescriptions[label] = `Contributor with ${rule.minMergedPRs}+ merged PRs.`; + } +} + +const modulePrefixColors = Object.fromEntries( + modulePrefixPriority.map((prefix) => [ + `${prefix}:`, + otherLabelColors[prefix] || "BFDADC", + ]) +); + +const providerKeywordHints = [ + "deepseek", + "moonshot", + "kimi", + "qwen", + "mistral", + "doubao", + "baichuan", + "yi", + "siliconflow", + "vertex", + "azure", + "perplexity", + "venice", + "vercel", + "cloudflare", + "synthetic", + "opencode", + "zai", + "glm", + "minimax", + "bedrock", + "qianfan", + "groq", + "together", + "fireworks", + "novita", + "cohere", + "openai", + "openrouter", + "anthropic", + "gemini", + "ollama", +]; + +const channelKeywordHints = [ + "telegram", + "discord", + "slack", + "whatsapp", + "matrix", + "irc", + "imessage", + "email", + "cli", +]; + +function isDocsLike(path) { + return ( + path.startsWith("docs/") || + path.endsWith(".md") || + path.endsWith(".mdx") || + path === "LICENSE" || + path === ".markdownlint-cli2.yaml" || + path === ".github/pull_request_template.md" || + path.startsWith(".github/ISSUE_TEMPLATE/") + ); +} + +function normalizeLabelSegment(segment) { + return (segment || "") + .toLowerCase() + .replace(/\.rs$/g, "") + .replace(/[^a-z0-9_-]+/g, "-") + .replace(/^[-_]+|[-_]+$/g, "") + .slice(0, 40); +} + +function containsKeyword(text, keyword) { + const escaped = keyword.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const pattern = new RegExp(`(^|[^a-z0-9_])${escaped}([^a-z0-9_]|$)`, "i"); + return pattern.test(text); +} + +function formatModuleLabel(prefix, segment) { + return `${prefix}: ${segment}`; +} + +function parseModuleLabel(label) { + if (typeof label !== "string") return null; + const match = label.match(/^([^:]+):\s*(.+)$/); + if (!match) return null; + const prefix = match[1].trim().toLowerCase(); + const segment = (match[2] || "").trim().toLowerCase(); + if (!prefix || !segment) return null; + return { prefix, segment }; +} + +function sortByPriority(labels, priorityIndex) { + return [...new Set(labels)].sort((left, right) => { + const leftPriority = priorityIndex.has(left) ? priorityIndex.get(left) : Number.MAX_SAFE_INTEGER; + const rightPriority = priorityIndex.has(right) + ? priorityIndex.get(right) + : Number.MAX_SAFE_INTEGER; + if (leftPriority !== rightPriority) return leftPriority - rightPriority; + return left.localeCompare(right); + }); +} + +function sortModuleLabels(labels) { + return [...new Set(labels)].sort((left, right) => { + const leftParsed = parseModuleLabel(left); + const rightParsed = parseModuleLabel(right); + if (!leftParsed || !rightParsed) return left.localeCompare(right); + + const leftPrefixPriority = modulePrefixPriorityIndex.has(leftParsed.prefix) + ? modulePrefixPriorityIndex.get(leftParsed.prefix) + : Number.MAX_SAFE_INTEGER; + const rightPrefixPriority = modulePrefixPriorityIndex.has(rightParsed.prefix) + ? modulePrefixPriorityIndex.get(rightParsed.prefix) + : Number.MAX_SAFE_INTEGER; + + if (leftPrefixPriority !== rightPrefixPriority) { + return leftPrefixPriority - rightPrefixPriority; + } + if (leftParsed.prefix !== rightParsed.prefix) { + return leftParsed.prefix.localeCompare(rightParsed.prefix); + } + + const leftIsCore = leftParsed.segment === "core"; + const rightIsCore = rightParsed.segment === "core"; + if (leftIsCore !== rightIsCore) return leftIsCore ? 1 : -1; + + return leftParsed.segment.localeCompare(rightParsed.segment); + }); +} + +function refineModuleLabels(rawLabels) { + const refined = new Set(rawLabels); + const segmentsByPrefix = new Map(); + + for (const label of rawLabels) { + const parsed = parseModuleLabel(label); + if (!parsed) continue; + if (!segmentsByPrefix.has(parsed.prefix)) { + segmentsByPrefix.set(parsed.prefix, new Set()); + } + segmentsByPrefix.get(parsed.prefix).add(parsed.segment); + } + + for (const [prefix, segments] of segmentsByPrefix) { + const hasSpecificSegment = [...segments].some((segment) => segment !== "core"); + if (hasSpecificSegment) { + refined.delete(formatModuleLabel(prefix, "core")); + } + } + + return refined; +} + +function compactModuleLabels(labels) { + const groupedSegments = new Map(); + const compactedModuleLabels = new Set(); + const forcePathPrefixes = new Set(); + + for (const label of labels) { + const parsed = parseModuleLabel(label); + if (!parsed) { + compactedModuleLabels.add(label); + continue; + } + if (!groupedSegments.has(parsed.prefix)) { + groupedSegments.set(parsed.prefix, new Set()); + } + groupedSegments.get(parsed.prefix).add(parsed.segment); + } + + for (const [prefix, segments] of groupedSegments) { + const uniqueSegments = [...new Set([...segments].filter(Boolean))]; + if (uniqueSegments.length === 0) continue; + + if (uniqueSegments.length === 1) { + compactedModuleLabels.add(formatModuleLabel(prefix, uniqueSegments[0])); + } else { + forcePathPrefixes.add(prefix); + } + } + + return { + moduleLabels: compactedModuleLabels, + forcePathPrefixes, + }; +} + +function colorForLabel(label) { + if (staticLabelColors[label]) return staticLabelColors[label]; + const matchedPrefix = Object.keys(modulePrefixColors).find((prefix) => label.startsWith(prefix)); + if (matchedPrefix) return modulePrefixColors[matchedPrefix]; + return "BFDADC"; +} + +function descriptionForLabel(label) { + if (staticLabelDescriptions[label]) return staticLabelDescriptions[label]; + + const parsed = parseModuleLabel(label); + if (parsed) { + if (parsed.segment === "core") { + return `Auto module: ${parsed.prefix} core files changed.`; + } + return `Auto module: ${parsed.prefix}/${parsed.segment} changed.`; + } + + return "Auto-managed label."; +} + +async function ensureLabel(name, existing = null) { + const expectedColor = colorForLabel(name); + const expectedDescription = descriptionForLabel(name); + try { + const current = existing || (await github.rest.issues.getLabel({ owner, repo, name })).data; + const currentColor = (current.color || "").toUpperCase(); + const currentDescription = (current.description || "").trim(); + if (currentColor !== expectedColor || currentDescription !== expectedDescription) { + await github.rest.issues.updateLabel({ + owner, + repo, + name, + new_name: name, + color: expectedColor, + description: expectedDescription, + }); + } + } catch (error) { + if (error.status !== 404) throw error; + await github.rest.issues.createLabel({ + owner, + repo, + name, + color: expectedColor, + description: expectedDescription, + }); + } +} + +function isManagedLabel(label) { + if (label === manualRiskOverrideLabel) return true; + if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return true; + if (managedPathLabelSet.has(label)) return true; + if (contributorTierLabels.includes(label)) return true; + if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return true; + return false; +} + +async function ensureManagedRepoLabelsMetadata() { + const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, { + owner, + repo, + per_page: 100, + }); + + for (const existingLabel of repoLabels) { + const labelName = existingLabel.name || ""; + if (!isManagedLabel(labelName)) continue; + await ensureLabel(labelName, existingLabel); + } +} + +function selectContributorTier(mergedCount) { + const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs); + return matchedTier ? matchedTier.label : null; +} + +if (context.eventName === "workflow_dispatch") { + const mode = (context.payload.inputs?.mode || "audit").toLowerCase(); + const shouldRepair = mode === "repair"; + const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, { + owner, + repo, + per_page: 100, + }); + + let managedScanned = 0; + const drifts = []; + + for (const existingLabel of repoLabels) { + const labelName = existingLabel.name || ""; + if (!isManagedLabel(labelName)) continue; + managedScanned += 1; + + const expectedColor = colorForLabel(labelName); + const expectedDescription = descriptionForLabel(labelName); + const currentColor = (existingLabel.color || "").toUpperCase(); + const currentDescription = (existingLabel.description || "").trim(); + if (currentColor !== expectedColor || currentDescription !== expectedDescription) { + drifts.push({ + name: labelName, + currentColor, + expectedColor, + currentDescription, + expectedDescription, + }); + if (shouldRepair) { + await ensureLabel(labelName, existingLabel); + } + } + } + + core.summary + .addHeading("Managed Label Governance", 2) + .addRaw(`Mode: ${shouldRepair ? "repair" : "audit"}`) + .addEOL() + .addRaw(`Managed labels scanned: ${managedScanned}`) + .addEOL() + .addRaw(`Drifts found: ${drifts.length}`) + .addEOL(); + + if (drifts.length > 0) { + const sample = drifts.slice(0, 30).map((entry) => [ + entry.name, + `${entry.currentColor} -> ${entry.expectedColor}`, + `${entry.currentDescription || "(blank)"} -> ${entry.expectedDescription}`, + ]); + core.summary.addTable([ + [{ data: "Label", header: true }, { data: "Color", header: true }, { data: "Description", header: true }], + ...sample, + ]); + if (drifts.length > sample.length) { + core.summary + .addRaw(`Additional drifts not shown: ${drifts.length - sample.length}`) + .addEOL(); + } + } + + await core.summary.write(); + + if (!shouldRepair && drifts.length > 0) { + core.info(`Managed-label metadata drifts detected: ${drifts.length}. Re-run with mode=repair to auto-fix.`); + } else if (shouldRepair) { + core.info(`Managed-label metadata repair applied to ${drifts.length} labels.`); + } else { + core.info("No managed-label metadata drift detected."); + } + + return; +} + +const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: pr.number, + per_page: 100, +}); + +const detectedModuleLabels = new Set(); +for (const file of files) { + const path = (file.filename || "").toLowerCase(); + for (const rule of moduleNamespaceRules) { + if (!path.startsWith(rule.root)) continue; + + const relative = path.slice(rule.root.length); + if (!relative) continue; + + const first = relative.split("/")[0]; + const firstStem = first.endsWith(".rs") ? first.slice(0, -3) : first; + let segment = firstStem; + + if (rule.coreEntries.has(first) || rule.coreEntries.has(firstStem)) { + segment = "core"; + } + + segment = normalizeLabelSegment(segment); + if (!segment) continue; + + detectedModuleLabels.add(formatModuleLabel(rule.prefix, segment)); + } +} + +const providerRelevantFiles = files.filter((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/providers/") || + path.startsWith("src/integrations/") || + path.startsWith("src/onboard/") || + path.startsWith("src/config/") + ); +}); + +if (providerRelevantFiles.length > 0) { + const searchableText = [ + pr.title || "", + pr.body || "", + ...providerRelevantFiles.map((file) => file.filename || ""), + ...providerRelevantFiles.map((file) => file.patch || ""), + ] + .join("\n") + .toLowerCase(); + + for (const keyword of providerKeywordHints) { + if (containsKeyword(searchableText, keyword)) { + detectedModuleLabels.add(formatModuleLabel("provider", keyword)); + } + } +} + +const channelRelevantFiles = files.filter((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/channels/") || + path.startsWith("src/onboard/") || + path.startsWith("src/config/") + ); +}); + +if (channelRelevantFiles.length > 0) { + const searchableText = [ + pr.title || "", + pr.body || "", + ...channelRelevantFiles.map((file) => file.filename || ""), + ...channelRelevantFiles.map((file) => file.patch || ""), + ] + .join("\n") + .toLowerCase(); + + for (const keyword of channelKeywordHints) { + if (containsKeyword(searchableText, keyword)) { + detectedModuleLabels.add(formatModuleLabel("channel", keyword)); + } + } +} + +const refinedModuleLabels = refineModuleLabels(detectedModuleLabels); +const compactedModuleState = compactModuleLabels(refinedModuleLabels); +const selectedModuleLabels = compactedModuleState.moduleLabels; +const forcePathPrefixes = compactedModuleState.forcePathPrefixes; +const modulePrefixesWithLabels = new Set( + [...selectedModuleLabels] + .map((label) => parseModuleLabel(label)?.prefix) + .filter(Boolean) +); + +const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({ + owner, + repo, + issue_number: pr.number, +}); +const currentLabelNames = currentLabels.map((label) => label.name); +const currentPathLabels = currentLabelNames.filter((label) => managedPathLabelSet.has(label)); +const candidatePathLabels = new Set([...currentPathLabels, ...forcePathPrefixes]); + +const dedupedPathLabels = [...candidatePathLabels].filter((label) => { + if (label === "core") return true; + if (forcePathPrefixes.has(label)) return true; + return !modulePrefixesWithLabels.has(label); +}); + +const excludedLockfiles = new Set(["Cargo.lock"]); +const changedLines = files.reduce((total, file) => { + const path = file.filename || ""; + if (isDocsLike(path) || excludedLockfiles.has(path)) { + return total; + } + return total + (file.additions || 0) + (file.deletions || 0); +}, 0); + +let sizeLabel = "size: XL"; +if (changedLines <= 80) sizeLabel = "size: XS"; +else if (changedLines <= 250) sizeLabel = "size: S"; +else if (changedLines <= 500) sizeLabel = "size: M"; +else if (changedLines <= 1000) sizeLabel = "size: L"; + +const hasHighRiskPath = files.some((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/security/") || + path.startsWith("src/runtime/") || + path.startsWith("src/gateway/") || + path.startsWith("src/tools/") || + path.startsWith(".github/workflows/") + ); +}); + +const hasMediumRiskPath = files.some((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/") || + path === "Cargo.toml" || + path === "Cargo.lock" || + path === "deny.toml" || + path.startsWith(".githooks/") + ); +}); + +let riskLabel = "risk: low"; +if (hasHighRiskPath) { + riskLabel = "risk: high"; +} else if (hasMediumRiskPath) { + riskLabel = "risk: medium"; +} + +await ensureManagedRepoLabelsMetadata(); + +const labelsToEnsure = new Set([ + ...sizeLabels, + ...computedRiskLabels, + manualRiskOverrideLabel, + ...managedPathLabels, + ...contributorTierLabels, + ...selectedModuleLabels, +]); + +for (const label of labelsToEnsure) { + await ensureLabel(label); +} + +let contributorTierLabel = null; +const authorLogin = pr.user?.login; +if (authorLogin && pr.user?.type !== "Bot") { + try { + const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({ + q: `repo:${owner}/${repo} is:pr is:merged author:${authorLogin}`, + per_page: 1, + }); + const mergedCount = mergedSearch.total_count || 0; + contributorTierLabel = selectContributorTier(mergedCount); + } catch (error) { + core.warning(`failed to compute contributor tier label: ${error.message}`); + } +} + +const hasManualRiskOverride = currentLabelNames.includes(manualRiskOverrideLabel); +const keepNonManagedLabels = currentLabelNames.filter((label) => { + if (label === manualRiskOverrideLabel) return true; + if (contributorTierLabels.includes(label)) return false; + if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return false; + if (managedPathLabelSet.has(label)) return false; + if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return false; + return true; +}); + +const manualRiskSelection = + currentLabelNames.find((label) => computedRiskLabels.includes(label)) || riskLabel; + +const moduleLabelList = sortModuleLabels([...selectedModuleLabels]); +const contributorLabelList = contributorTierLabel ? [contributorTierLabel] : []; +const selectedRiskLabels = hasManualRiskOverride + ? sortByPriority([manualRiskSelection, manualRiskOverrideLabel], riskPriorityIndex) + : sortByPriority([riskLabel], riskPriorityIndex); +const selectedSizeLabels = sortByPriority([sizeLabel], sizePriorityIndex); +const sortedContributorLabels = sortByPriority(contributorLabelList, contributorPriorityIndex); +const sortedPathLabels = sortByPriority(dedupedPathLabels, pathLabelPriorityIndex); +const sortedKeepNonManagedLabels = [...new Set(keepNonManagedLabels)].sort((left, right) => + left.localeCompare(right) +); + +const nextLabels = [ + ...new Set([ + ...selectedRiskLabels, + ...selectedSizeLabels, + ...sortedContributorLabels, + ...moduleLabelList, + ...sortedPathLabels, + ...sortedKeepNonManagedLabels, + ]), +]; + +await github.rest.issues.setLabels({ + owner, + repo, + issue_number: pr.number, + labels: nextLabels, +}); +}; diff --git a/.github/workflows/scripts/test_benchmarks_pr_comment.js b/.github/workflows/scripts/test_benchmarks_pr_comment.js new file mode 100644 index 0000000..d517141 --- /dev/null +++ b/.github/workflows/scripts/test_benchmarks_pr_comment.js @@ -0,0 +1,57 @@ +// Extracted from test-benchmarks.yml step: Post benchmark summary on PR + +module.exports = async ({ github, context, core }) => { + const fs = require('fs'); + const output = fs.readFileSync('benchmark_output.txt', 'utf8'); + + // Extract Criterion result lines + const lines = output.split('\n').filter(l => + l.includes('time:') || l.includes('change:') || l.includes('Performance') + ); + + if (lines.length === 0) { + core.info('No benchmark results to post.'); + return; + } + + const body = [ + '## 📊 Benchmark Results', + '', + '```', + lines.join('\n'), + '```', + '', + '
Full output', + '', + '```', + output.substring(0, 60000), + '```', + '
', + ].join('\n'); + + // Find and update or create comment + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + }); + + const marker = '## 📊 Benchmark Results'; + const existing = comments.find(c => c.body && c.body.startsWith(marker)); + + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + body, + }); + } +}; diff --git a/.github/workflows/sec-audit.yml b/.github/workflows/sec-audit.yml new file mode 100644 index 0000000..9c1b031 --- /dev/null +++ b/.github/workflows/sec-audit.yml @@ -0,0 +1,597 @@ +name: Sec Audit + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "deny.toml" + - ".gitleaks.toml" + - ".github/security/gitleaks-allowlist-governance.json" + - ".github/security/deny-ignore-governance.json" + - ".github/security/unsafe-audit-governance.json" + - "scripts/ci/install_gitleaks.sh" + - "scripts/ci/install_syft.sh" + - "scripts/ci/deny_policy_guard.py" + - "scripts/ci/secrets_governance_guard.py" + - "scripts/ci/unsafe_debt_audit.py" + - "scripts/ci/unsafe_policy_guard.py" + - "scripts/ci/config/unsafe_debt_policy.toml" + - "scripts/ci/emit_audit_event.py" + - "scripts/ci/security_regression_tests.sh" + - ".github/workflows/sec-audit.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "deny.toml" + - ".gitleaks.toml" + - ".github/security/gitleaks-allowlist-governance.json" + - ".github/security/deny-ignore-governance.json" + - ".github/security/unsafe-audit-governance.json" + - "scripts/ci/install_gitleaks.sh" + - "scripts/ci/install_syft.sh" + - "scripts/ci/deny_policy_guard.py" + - "scripts/ci/secrets_governance_guard.py" + - "scripts/ci/unsafe_debt_audit.py" + - "scripts/ci/unsafe_policy_guard.py" + - "scripts/ci/config/unsafe_debt_policy.toml" + - "scripts/ci/emit_audit_event.py" + - "scripts/ci/security_regression_tests.sh" + - ".github/workflows/sec-audit.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "0 6 * * 1" # Weekly on Monday 6am UTC + workflow_dispatch: + inputs: + full_secret_scan: + description: "Scan full git history for secrets" + required: true + default: false + type: boolean + fail_on_secret_leak: + description: "Fail workflow if secret leaks are detected" + required: true + default: true + type: boolean + fail_on_governance_violation: + description: "Fail workflow if secrets governance policy violations are detected" + required: true + default: true + type: boolean + +concurrency: + group: security-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + security-events: write + actions: read + checks: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + audit: + name: Security Audit + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: rustsec/audit-check@69366f33c96575abad1ee0dba8212993eecbe998 # v2.0.0 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + deny: + name: License & Supply Chain + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Enforce deny policy hygiene + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/deny_policy_guard.py \ + --deny-file deny.toml \ + --governance-file .github/security/deny-ignore-governance.json \ + --output-json artifacts/deny-policy-guard.json \ + --output-md artifacts/deny-policy-guard.md \ + --fail-on-violation + + - uses: EmbarkStudios/cargo-deny-action@3fd3802e88374d3fe9159b834c7714ec57d6c979 # v2 + with: + command: check advisories licenses sources + + - name: Emit deny audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/deny-policy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type deny_policy_guard \ + --input-json artifacts/deny-policy-guard.json \ + --output-json artifacts/audit-event-deny-policy-guard.json \ + --artifact-name deny-policy-audit-event \ + --retention-days 14 + fi + + - name: Upload deny policy artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: deny-policy-guard + path: artifacts/deny-policy-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload deny policy audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: deny-policy-audit-event + path: artifacts/audit-event-deny-policy-guard.json + if-no-files-found: ignore + retention-days: 14 + + security-regressions: + name: Security Regression Tests + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: sec-audit-security-regressions + - name: Run security regression suite + shell: bash + run: ./scripts/ci/security_regression_tests.sh + + secrets: + name: Secrets Governance (Gitleaks) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Enforce gitleaks allowlist governance + shell: bash + env: + FAIL_ON_GOVERNANCE_INPUT: ${{ github.event.inputs.fail_on_governance_violation || 'true' }} + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_governance="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_governance="${FAIL_ON_GOVERNANCE_INPUT}" + fi + cmd=(python3 scripts/ci/secrets_governance_guard.py + --gitleaks-file .gitleaks.toml + --governance-file .github/security/gitleaks-allowlist-governance.json + --output-json artifacts/secrets-governance-guard.json + --output-md artifacts/secrets-governance-guard.md) + if [ "$fail_on_governance" = "true" ]; then + cmd+=(--fail-on-violation) + fi + "${cmd[@]}" + + - name: Publish secrets governance summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/secrets-governance-guard.md ]; then + cat artifacts/secrets-governance-guard.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Secrets governance report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Emit secrets governance audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/secrets-governance-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type secrets_governance_guard \ + --input-json artifacts/secrets-governance-guard.json \ + --output-json artifacts/audit-event-secrets-governance-guard.json \ + --artifact-name secrets-governance-audit-event \ + --retention-days 14 + fi + + - name: Upload secrets governance artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: secrets-governance-guard + path: artifacts/secrets-governance-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload secrets governance audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: secrets-governance-audit-event + path: artifacts/audit-event-secrets-governance-guard.json + if-no-files-found: ignore + retention-days: 14 + + - name: Install gitleaks + shell: bash + run: | + set -euo pipefail + mkdir -p "${RUNNER_TEMP}/bin" + ./scripts/ci/install_gitleaks.sh "${RUNNER_TEMP}/bin" + echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH" + + - name: Run gitleaks scan + shell: bash + env: + FULL_SECRET_SCAN_INPUT: ${{ github.event.inputs.full_secret_scan || 'false' }} + FAIL_ON_SECRET_LEAK_INPUT: ${{ github.event.inputs.fail_on_secret_leak || 'true' }} + run: | + set -euo pipefail + mkdir -p artifacts + log_opts="" + scan_scope="full-history" + fail_on_leak="true" + + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + log_opts="${{ github.event.pull_request.base.sha }}..${GITHUB_SHA}" + scan_scope="diff-range" + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then + log_opts="${base_sha}..${GITHUB_SHA}" + scan_scope="diff-range" + fi + elif [ "${GITHUB_EVENT_NAME}" = "merge_group" ]; then + base_sha="${{ github.event.merge_group.base_sha }}" + if [ -n "$base_sha" ]; then + log_opts="${base_sha}..${GITHUB_SHA}" + scan_scope="diff-range" + fi + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + if [ "${FULL_SECRET_SCAN_INPUT}" != "true" ]; then + if [ -n "${{ github.sha }}" ]; then + log_opts="${{ github.sha }}~1..${{ github.sha }}" + scan_scope="latest-commit" + fi + fi + fail_on_leak="${FAIL_ON_SECRET_LEAK_INPUT}" + fi + + cmd=(gitleaks git + --config .gitleaks.toml + --redact + --report-format sarif + --report-path artifacts/gitleaks.sarif + --verbose) + if [ -n "$log_opts" ]; then + cmd+=(--log-opts="$log_opts") + fi + + set +e + "${cmd[@]}" + status=$? + set -e + + echo "### Gitleaks scan" >> "$GITHUB_STEP_SUMMARY" + echo "- Scope: ${scan_scope}" >> "$GITHUB_STEP_SUMMARY" + if [ -n "$log_opts" ]; then + echo "- Log range: \`${log_opts}\`" >> "$GITHUB_STEP_SUMMARY" + fi + echo "- Exit code: ${status}" >> "$GITHUB_STEP_SUMMARY" + + cat > artifacts/gitleaks-summary.json <> "$GITHUB_PATH" + + - name: Generate CycloneDX + SPDX SBOM + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + syft dir:. --source-name zeroclaw \ + -o cyclonedx-json=artifacts/zeroclaw.cdx.json \ + -o spdx-json=artifacts/zeroclaw.spdx.json + { + echo "### SBOM snapshot" + echo "- CycloneDX: artifacts/zeroclaw.cdx.json" + echo "- SPDX: artifacts/zeroclaw.spdx.json" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Upload SBOM artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: sbom-snapshot + path: artifacts/zeroclaw.*.json + retention-days: 14 + + - name: Emit SBOM audit event + if: always() + shell: bash + run: | + set -euo pipefail + cat > artifacts/sbom-summary.json <> "$GITHUB_STEP_SUMMARY" + else + echo "Unsafe policy governance report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Run unsafe debt audit + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/unsafe_debt_audit.py \ + --repo-root . \ + --policy-file scripts/ci/config/unsafe_debt_policy.toml \ + --output-json artifacts/unsafe-debt-audit.json \ + --fail-on-findings \ + --fail-on-excluded-crate-roots + + - name: Publish unsafe debt summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-debt-audit.json ]; then + python3 - <<'PY' >> "$GITHUB_STEP_SUMMARY" + import json + from pathlib import Path + + report = json.loads(Path("artifacts/unsafe-debt-audit.json").read_text(encoding="utf-8")) + summary = report.get("summary", {}) + source = report.get("source", {}) + by_pattern = summary.get("by_pattern", {}) + + print("### Unsafe debt audit") + print(f"- Total findings: `{summary.get('total_findings', 0)}`") + print(f"- Files scanned: `{source.get('files_scanned', 0)}`") + print(f"- Crate roots scanned: `{source.get('crate_roots_scanned', 0)}`") + print(f"- Crate roots excluded: `{source.get('crate_roots_excluded', 0)}`") + if by_pattern: + print("- Findings by pattern:") + for pattern_id, count in sorted(by_pattern.items()): + print(f" - `{pattern_id}`: `{count}`") + else: + print("- Findings by pattern: none") + PY + else + echo "Unsafe debt audit JSON report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Emit unsafe policy governance audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-policy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type unsafe_policy_guard \ + --input-json artifacts/unsafe-policy-guard.json \ + --output-json artifacts/audit-event-unsafe-policy-guard.json \ + --artifact-name unsafe-policy-audit-event \ + --retention-days 14 + fi + + - name: Emit unsafe debt audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-debt-audit.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type unsafe_debt_audit \ + --input-json artifacts/unsafe-debt-audit.json \ + --output-json artifacts/audit-event-unsafe-debt-audit.json \ + --artifact-name unsafe-debt-audit-event \ + --retention-days 14 + fi + + - name: Upload unsafe policy guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-policy-guard + path: artifacts/unsafe-policy-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe debt audit artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-debt-audit + path: artifacts/unsafe-debt-audit.json + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe policy audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-policy-audit-event + path: artifacts/audit-event-unsafe-policy-guard.json + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe debt audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-debt-audit-event + path: artifacts/audit-event-unsafe-debt-audit.json + if-no-files-found: ignore + retention-days: 14 + + security-required: + name: Security Required Gate + if: always() && (github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'merge_group') + needs: [audit, deny, security-regressions, secrets, sbom, unsafe-debt] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Enforce security gate + shell: bash + run: | + set -euo pipefail + results=( + "audit=${{ needs.audit.result }}" + "deny=${{ needs.deny.result }}" + "security-regressions=${{ needs.security-regressions.result }}" + "secrets=${{ needs.secrets.result }}" + "sbom=${{ needs.sbom.result }}" + "unsafe-debt=${{ needs['unsafe-debt'].result }}" + ) + for item in "${results[@]}"; do + echo "$item" + done + for item in "${results[@]}"; do + result="${item#*=}" + if [ "$result" != "success" ]; then + echo "Security gate failed: $item" + exit 1 + fi + done diff --git a/.github/workflows/sec-codeql.yml b/.github/workflows/sec-codeql.yml new file mode 100644 index 0000000..a2bc484 --- /dev/null +++ b/.github/workflows/sec-codeql.yml @@ -0,0 +1,72 @@ +name: Sec CodeQL + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - ".github/codeql/**" + - ".github/workflows/sec-codeql.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - ".github/codeql/**" + - ".github/workflows/sec-codeql.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "0 6 * * 1" # Weekly Monday 6am UTC + workflow_dispatch: + +concurrency: + group: codeql-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + security-events: write + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + codeql: + name: CodeQL Analysis + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Initialize CodeQL + uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + languages: rust + config-file: ./.github/codeql/codeql-config.yml + queries: security-and-quality + + - name: Set up Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Build + run: cargo build --workspace --all-targets --locked + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + category: "/language:rust" diff --git a/.github/workflows/sec-vorpal-reviewdog.yml b/.github/workflows/sec-vorpal-reviewdog.yml new file mode 100644 index 0000000..6187550 --- /dev/null +++ b/.github/workflows/sec-vorpal-reviewdog.yml @@ -0,0 +1,191 @@ +name: Sec Vorpal Reviewdog + +on: + workflow_dispatch: + inputs: + scan_scope: + description: "File selection mode when source_path is empty" + required: true + type: choice + default: changed + options: + - changed + - all + base_ref: + description: "Base branch/ref for changed diff mode" + required: true + type: string + default: main + source_path: + description: "Optional comma-separated file paths to scan (overrides scan_scope)" + required: false + type: string + include_tests: + description: "Include test/fixture files in scan selection" + required: true + type: choice + default: "false" + options: + - "false" + - "true" + folders_to_ignore: + description: "Optional comma-separated path prefixes to ignore" + required: false + type: string + default: target,node_modules,web/dist,.venv,venv + reporter: + description: "Reviewdog reporter mode" + required: true + type: choice + default: github-pr-check + options: + - github-pr-check + - github-pr-review + filter_mode: + description: "Reviewdog filter mode" + required: true + type: choice + default: file + options: + - added + - diff_context + - file + - nofilter + level: + description: "Reviewdog severity level" + required: true + type: choice + default: error + options: + - info + - warning + - error + fail_on_error: + description: "Fail workflow when Vorpal reports findings" + required: true + type: choice + default: "false" + options: + - "false" + - "true" + reviewdog_flags: + description: "Optional extra reviewdog flags" + required: false + type: string + +concurrency: + group: sec-vorpal-reviewdog-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + checks: write + pull-requests: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + vorpal: + name: Vorpal Reviewdog Scan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Resolve source paths + id: sources + shell: bash + env: + INPUT_SOURCE_PATH: ${{ inputs.source_path }} + INPUT_SCAN_SCOPE: ${{ inputs.scan_scope }} + INPUT_BASE_REF: ${{ inputs.base_ref }} + INPUT_INCLUDE_TESTS: ${{ inputs.include_tests }} + run: | + set -euo pipefail + + strip_space() { + local value="$1" + value="${value//$'\n'/}" + value="${value//$'\r'/}" + value="${value// /}" + echo "$value" + } + + source_override="$(strip_space "${INPUT_SOURCE_PATH}")" + if [ -n "${source_override}" ]; then + normalized="$(echo "${INPUT_SOURCE_PATH}" | tr '\n' ',' | sed -E 's/[[:space:]]+//g; s/,+/,/g; s/^,|,$//g')" + if [ -n "${normalized}" ]; then + { + echo "scan=true" + echo "source_path=${normalized}" + echo "selection=manual" + } >> "${GITHUB_OUTPUT}" + exit 0 + fi + fi + + include_ext='\.(py|js|jsx|ts|tsx)$' + exclude_paths='^(target/|node_modules/|web/node_modules/|dist/|web/dist/|\.venv/|venv/)' + exclude_tests='(^|/)(test|tests|__tests__|fixtures|mocks|examples)/|(^|/)test_helpers/|(_test\.py$)|(^|/)test_.*\.py$|(\.spec\.(ts|tsx|js|jsx)$)|(\.test\.(ts|tsx|js|jsx)$)' + + if [ "${INPUT_SCAN_SCOPE}" = "all" ]; then + candidate_files="$(git ls-files)" + else + base_ref="${INPUT_BASE_REF#refs/heads/}" + base_ref="${base_ref#origin/}" + if git fetch --no-tags --depth=1 origin "${base_ref}" >/dev/null 2>&1; then + if merge_base="$(git merge-base HEAD "origin/${base_ref}" 2>/dev/null)"; then + candidate_files="$(git diff --name-only --diff-filter=ACMR "${merge_base}"...HEAD)" + else + echo "Unable to resolve merge-base for origin/${base_ref}; falling back to tracked files." + candidate_files="$(git ls-files)" + fi + else + echo "Unable to fetch origin/${base_ref}; falling back to tracked files." + candidate_files="$(git ls-files)" + fi + fi + + source_files="$(printf '%s\n' "${candidate_files}" | sed '/^$/d' | grep -E "${include_ext}" | grep -Ev "${exclude_paths}" || true)" + if [ "${INPUT_INCLUDE_TESTS}" != "true" ] && [ -n "${source_files}" ]; then + source_files="$(printf '%s\n' "${source_files}" | grep -Ev "${exclude_tests}" || true)" + fi + if [ -z "${source_files}" ]; then + { + echo "scan=false" + echo "source_path=" + echo "selection=none" + } >> "${GITHUB_OUTPUT}" + exit 0 + fi + + source_path="$(printf '%s\n' "${source_files}" | paste -sd, -)" + { + echo "scan=true" + echo "source_path=${source_path}" + echo "selection=auto-${INPUT_SCAN_SCOPE}" + } >> "${GITHUB_OUTPUT}" + + - name: No supported files to scan + if: steps.sources.outputs.scan != 'true' + shell: bash + run: | + echo "No supported files selected for Vorpal scan (extensions: .py .js .jsx .ts .tsx)." + + - name: Run Vorpal with reviewdog + if: steps.sources.outputs.scan == 'true' + uses: Checkmarx/vorpal-reviewdog-github-action@8cc292f337a2f1dea581b4f4bd73852e7becb50d # v1.2.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + source_path: ${{ steps.sources.outputs.source_path }} + folders_to_ignore: ${{ inputs.folders_to_ignore }} + reporter: ${{ inputs.reporter }} + filter_mode: ${{ inputs.filter_mode }} + level: ${{ inputs.level }} + fail_on_error: ${{ inputs.fail_on_error }} + reviewdog_flags: ${{ inputs.reviewdog_flags }} diff --git a/.github/workflows/sync-contributors.yml b/.github/workflows/sync-contributors.yml new file mode 100644 index 0000000..3dc0483 --- /dev/null +++ b/.github/workflows/sync-contributors.yml @@ -0,0 +1,116 @@ +name: Sync Contributors + +on: + workflow_dispatch: + schedule: + # Run every Sunday at 00:00 UTC + - cron: '0 0 * * 0' + +concurrency: + group: update-notice-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: write + pull-requests: write + +jobs: + update-notice: + name: Update NOTICE with new contributors + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Fetch contributors + id: contributors + env: + GH_TOKEN: ${{ github.token }} + run: | + # Fetch all contributors (excluding bots) + gh api \ + --paginate \ + "repos/${{ github.repository }}/contributors" \ + --jq '.[] | select(.type != "Bot") | .login' > /tmp/contributors_raw.txt + + # Sort alphabetically and filter + sort -f < /tmp/contributors_raw.txt > contributors.txt + + # Count contributors + count=$(wc -l < contributors.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + + - name: Generate new NOTICE file + run: | + cat > NOTICE << 'EOF' + ZeroClaw + Copyright 2025 ZeroClaw Labs + + This product includes software developed at ZeroClaw Labs (https://github.com/zeroclaw-labs). + + Contributors + ============ + + The following individuals have contributed to ZeroClaw: + + EOF + + # Append contributors in alphabetical order + sed 's/^/- /' contributors.txt >> NOTICE + + # Add third-party dependencies section + cat >> NOTICE << 'EOF' + + + Third-Party Dependencies + ========================= + + This project uses the following third-party libraries and components, + each licensed under their respective terms: + + See Cargo.lock for a complete list of dependencies and their licenses. + EOF + + - name: Check if NOTICE changed + id: check_diff + run: | + if git diff --quiet NOTICE; then + echo "changed=false" >> "$GITHUB_OUTPUT" + else + echo "changed=true" >> "$GITHUB_OUTPUT" + fi + + - name: Create Pull Request + if: steps.check_diff.outputs.changed == 'true' + env: + GH_TOKEN: ${{ github.token }} + COUNT: ${{ steps.contributors.outputs.count }} + run: | + branch_name="auto/update-notice-$(date +%Y%m%d)" + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git checkout -b "$branch_name" + git add NOTICE + git commit -m "chore(notice): update contributor list" + git push origin "$branch_name" + + gh pr create \ + --title "chore(notice): update contributor list" \ + --body "Auto-generated update to NOTICE file with $COUNT contributors." \ + --label "chore" \ + --label "docs" \ + --draft || true + + - name: Summary + run: | + echo "## NOTICE Update Results" >> "$GITHUB_STEP_SUMMARY" + echo "" >> "$GITHUB_STEP_SUMMARY" + if [ "${{ steps.check_diff.outputs.changed }}" = "true" ]; then + echo "✅ PR created to update NOTICE" >> "$GITHUB_STEP_SUMMARY" + else + echo "✓ NOTICE file is up to date" >> "$GITHUB_STEP_SUMMARY" + fi + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "**Contributors:** ${{ steps.contributors.outputs.count }}" >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/test-benchmarks.yml b/.github/workflows/test-benchmarks.yml new file mode 100644 index 0000000..9654d9c --- /dev/null +++ b/.github/workflows/test-benchmarks.yml @@ -0,0 +1,53 @@ +name: Test Benchmarks + +on: + schedule: + - cron: "0 3 * * 1" # Weekly Monday 3am UTC + workflow_dispatch: + +concurrency: + group: bench-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + benchmarks: + name: Criterion Benchmarks + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + + - name: Run benchmarks + run: cargo bench --locked 2>&1 | tee benchmark_output.txt + + - name: Upload benchmark results + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: benchmark-results + path: | + target/criterion/ + benchmark_output.txt + retention-days: 7 + + - name: Post benchmark summary on PR + if: github.event_name == 'pull_request' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/test_benchmarks_pr_comment.js'); + await script({ github, context, core }); diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml new file mode 100644 index 0000000..97dabf7 --- /dev/null +++ b/.github/workflows/test-e2e.yml @@ -0,0 +1,33 @@ +name: Test E2E + +on: + push: + branches: [dev, main] + workflow_dispatch: + +concurrency: + group: e2e-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + integration-tests: + name: Integration / E2E Tests + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + - name: Run integration / E2E tests + run: cargo test --test agent_e2e --locked --verbose diff --git a/.github/workflows/test-fuzz.yml b/.github/workflows/test-fuzz.yml new file mode 100644 index 0000000..809672a --- /dev/null +++ b/.github/workflows/test-fuzz.yml @@ -0,0 +1,75 @@ +name: Test Fuzz + +on: + schedule: + - cron: "0 2 * * 0" # Weekly Sunday 2am UTC + workflow_dispatch: + inputs: + fuzz_seconds: + description: "Seconds to run each fuzz target" + required: false + default: "300" + +concurrency: + group: fuzz-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + fuzz: + name: Fuzz (${{ matrix.target }}) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + target: + - fuzz_config_parse + - fuzz_tool_params + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: nightly + components: llvm-tools-preview + + - name: Install cargo-fuzz + run: cargo install cargo-fuzz --locked + + - name: Run fuzz target + run: | + SECONDS="${{ github.event.inputs.fuzz_seconds || '300' }}" + echo "Fuzzing ${{ matrix.target }} for ${SECONDS}s" + cargo +nightly fuzz run ${{ matrix.target }} -- \ + -max_total_time="${SECONDS}" \ + -max_len=4096 + continue-on-error: true + id: fuzz + + - name: Upload crash artifacts + if: failure() || steps.fuzz.outcome == 'failure' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: fuzz-crashes-${{ matrix.target }} + path: fuzz/artifacts/${{ matrix.target }}/ + retention-days: 30 + if-no-files-found: ignore + + - name: Report fuzz results + run: | + echo "### Fuzz: ${{ matrix.target }}" >> "$GITHUB_STEP_SUMMARY" + if [ "${{ steps.fuzz.outcome }}" = "failure" ]; then + echo "- :x: Crashes found — see artifacts" >> "$GITHUB_STEP_SUMMARY" + else + echo "- :white_check_mark: No crashes found" >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/workflow-sanity.yml b/.github/workflows/workflow-sanity.yml new file mode 100644 index 0000000..da9d7f3 --- /dev/null +++ b/.github/workflows/workflow-sanity.yml @@ -0,0 +1,106 @@ +name: Workflow Sanity + +on: + pull_request: + paths: + - ".github/workflows/**" + - ".github/*.yml" + - ".github/*.yaml" + push: + paths: + - ".github/workflows/**" + - ".github/*.yml" + - ".github/*.yaml" + +concurrency: + group: workflow-sanity-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + no-tabs: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Normalize git global hooks config + shell: bash + run: | + set -euo pipefail + git config --global --unset-all core.hooksPath || true + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Fail on tabs in workflow files + shell: bash + run: | + set -euo pipefail + python3 - <<'PY' + from __future__ import annotations + + import pathlib + import sys + + root = pathlib.Path(".github/workflows") + bad: list[str] = [] + for path in sorted(root.rglob("*.yml")): + if b"\t" in path.read_bytes(): + bad.append(str(path)) + for path in sorted(root.rglob("*.yaml")): + if b"\t" in path.read_bytes(): + bad.append(str(path)) + + if bad: + print("Tabs found in workflow file(s):") + for path in bad: + print(f"- {path}") + sys.exit(1) + PY + + actionlint: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Normalize git global hooks config + shell: bash + run: | + set -euo pipefail + git config --global --unset-all core.hooksPath || true + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Install actionlint binary + shell: bash + run: | + set -euo pipefail + version="1.7.11" + arch="$(uname -m)" + case "$arch" in + x86_64|amd64) archive="actionlint_${version}_linux_amd64.tar.gz" ;; + aarch64|arm64) archive="actionlint_${version}_linux_arm64.tar.gz" ;; + *) + echo "::error::Unsupported architecture: ${arch}" + exit 1 + ;; + esac + + curl -fsSL \ + -o "$RUNNER_TEMP/actionlint.tgz" \ + "https://github.com/rhysd/actionlint/releases/download/v${version}/${archive}" + tar -xzf "$RUNNER_TEMP/actionlint.tgz" -C "$RUNNER_TEMP" actionlint + chmod +x "$RUNNER_TEMP/actionlint" + echo "$RUNNER_TEMP" >> "$GITHUB_PATH" + "$RUNNER_TEMP/actionlint" -version + + - name: Lint GitHub workflows + shell: bash + run: actionlint -color From af8743d14f766b3be1b2d06123d0955025bca015 Mon Sep 17 00:00:00 2001 From: louisdevzz Date: Thu, 5 Mar 2026 00:27:50 +0700 Subject: [PATCH 2/2] chore(github): add .githooks and .github configuration from zeroclaw - Add pre-commit and pre-push git hooks - Add GitHub workflows, issue templates, and CI/CD configs - Add CODEOWNERS with louisdevzz as owner and reviewer - Add security, release, and connectivity policies --- .githooks/pre-commit | 8 + .githooks/pre-push | 53 ++ .github/CODEOWNERS | 32 + .github/ISSUE_TEMPLATE/bug_report.yml | 148 ++++ .github/ISSUE_TEMPLATE/config.yml | 17 + .github/ISSUE_TEMPLATE/feature_request.yml | 107 +++ .github/actionlint.yaml | 7 + .github/codeql/codeql-config.yml | 8 + .github/connectivity/probe-contract.json | 70 ++ .github/connectivity/providers.json | 77 ++ .github/dependabot.yml | 52 ++ .github/label-policy.json | 21 + .github/labeler.yml | 147 ++++ .github/pull_request_template.md | 117 +++ .github/release/canary-policy.json | 39 + .github/release/docs-deploy-policy.json | 10 + .github/release/ghcr-tag-policy.json | 18 + .../release/ghcr-vulnerability-policy.json | 17 + .github/release/nightly-owner-routing.json | 9 + .github/release/prerelease-stage-gates.json | 33 + .../release/release-artifact-contract.json | 30 + .github/security/deny-ignore-governance.json | 26 + .../gitleaks-allowlist-governance.json | 56 ++ .github/security/unsafe-audit-governance.json | 5 + .github/workflows/README.md | 36 + .github/workflows/ci-build-fast.yml | 63 ++ .github/workflows/ci-canary-gate.yml | 329 +++++++ .github/workflows/ci-change-audit.yml | 154 ++++ .../workflows/ci-provider-connectivity.yml | 112 +++ .github/workflows/ci-reproducible-build.yml | 121 +++ .github/workflows/ci-rollback.yml | 257 ++++++ .github/workflows/ci-run.yml | 446 ++++++++++ .../workflows/ci-supply-chain-provenance.yml | 110 +++ .github/workflows/deploy-web.yml | 56 ++ .github/workflows/docs-deploy.yml | 291 +++++++ .github/workflows/feature-matrix.yml | 382 +++++++++ .github/workflows/main-branch-flow.md | 266 ++++++ .github/workflows/nightly-all-features.yml | 187 ++++ .github/workflows/pages-deploy.yml | 64 ++ .github/workflows/pr-auto-response.yml | 89 ++ .github/workflows/pr-check-stale.yml | 49 ++ .github/workflows/pr-check-status.yml | 36 + .github/workflows/pr-intake-checks.yml | 37 + .github/workflows/pr-label-policy-check.yml | 80 ++ .github/workflows/pr-labeler.yml | 56 ++ .github/workflows/pub-docker-img.yml | 352 ++++++++ .github/workflows/pub-prerelease.yml | 259 ++++++ .github/workflows/pub-release.yml | 645 ++++++++++++++ .../scripts/ci_human_review_guard.js | 61 ++ .../scripts/ci_license_file_owner_guard.js | 54 ++ .../scripts/ci_workflow_owner_approval.js | 83 ++ .github/workflows/scripts/lint_feedback.js | 90 ++ .../pr_auto_response_contributor_tier.js | 132 +++ .../pr_auto_response_labeled_routes.js | 94 ++ .../scripts/pr_check_status_nudge.js | 161 ++++ .github/workflows/scripts/pr_intake_checks.js | 202 +++++ .github/workflows/scripts/pr_labeler.js | 805 ++++++++++++++++++ .../scripts/test_benchmarks_pr_comment.js | 57 ++ .github/workflows/sec-audit.yml | 597 +++++++++++++ .github/workflows/sec-codeql.yml | 72 ++ .github/workflows/sec-vorpal-reviewdog.yml | 191 +++++ .github/workflows/sync-contributors.yml | 116 +++ .github/workflows/test-benchmarks.yml | 53 ++ .github/workflows/test-e2e.yml | 33 + .github/workflows/test-fuzz.yml | 75 ++ .github/workflows/workflow-sanity.yml | 106 +++ 66 files changed, 8566 insertions(+) create mode 100755 .githooks/pre-commit create mode 100755 .githooks/pre-push create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 .github/actionlint.yaml create mode 100644 .github/codeql/codeql-config.yml create mode 100644 .github/connectivity/probe-contract.json create mode 100644 .github/connectivity/providers.json create mode 100644 .github/dependabot.yml create mode 100644 .github/label-policy.json create mode 100644 .github/labeler.yml create mode 100644 .github/pull_request_template.md create mode 100644 .github/release/canary-policy.json create mode 100644 .github/release/docs-deploy-policy.json create mode 100644 .github/release/ghcr-tag-policy.json create mode 100644 .github/release/ghcr-vulnerability-policy.json create mode 100644 .github/release/nightly-owner-routing.json create mode 100644 .github/release/prerelease-stage-gates.json create mode 100644 .github/release/release-artifact-contract.json create mode 100644 .github/security/deny-ignore-governance.json create mode 100644 .github/security/gitleaks-allowlist-governance.json create mode 100644 .github/security/unsafe-audit-governance.json create mode 100644 .github/workflows/README.md create mode 100644 .github/workflows/ci-build-fast.yml create mode 100644 .github/workflows/ci-canary-gate.yml create mode 100644 .github/workflows/ci-change-audit.yml create mode 100644 .github/workflows/ci-provider-connectivity.yml create mode 100644 .github/workflows/ci-reproducible-build.yml create mode 100644 .github/workflows/ci-rollback.yml create mode 100644 .github/workflows/ci-run.yml create mode 100644 .github/workflows/ci-supply-chain-provenance.yml create mode 100644 .github/workflows/deploy-web.yml create mode 100644 .github/workflows/docs-deploy.yml create mode 100644 .github/workflows/feature-matrix.yml create mode 100644 .github/workflows/main-branch-flow.md create mode 100644 .github/workflows/nightly-all-features.yml create mode 100644 .github/workflows/pages-deploy.yml create mode 100644 .github/workflows/pr-auto-response.yml create mode 100644 .github/workflows/pr-check-stale.yml create mode 100644 .github/workflows/pr-check-status.yml create mode 100644 .github/workflows/pr-intake-checks.yml create mode 100644 .github/workflows/pr-label-policy-check.yml create mode 100644 .github/workflows/pr-labeler.yml create mode 100644 .github/workflows/pub-docker-img.yml create mode 100644 .github/workflows/pub-prerelease.yml create mode 100644 .github/workflows/pub-release.yml create mode 100644 .github/workflows/scripts/ci_human_review_guard.js create mode 100644 .github/workflows/scripts/ci_license_file_owner_guard.js create mode 100644 .github/workflows/scripts/ci_workflow_owner_approval.js create mode 100644 .github/workflows/scripts/lint_feedback.js create mode 100644 .github/workflows/scripts/pr_auto_response_contributor_tier.js create mode 100644 .github/workflows/scripts/pr_auto_response_labeled_routes.js create mode 100644 .github/workflows/scripts/pr_check_status_nudge.js create mode 100644 .github/workflows/scripts/pr_intake_checks.js create mode 100644 .github/workflows/scripts/pr_labeler.js create mode 100644 .github/workflows/scripts/test_benchmarks_pr_comment.js create mode 100644 .github/workflows/sec-audit.yml create mode 100644 .github/workflows/sec-codeql.yml create mode 100644 .github/workflows/sec-vorpal-reviewdog.yml create mode 100644 .github/workflows/sync-contributors.yml create mode 100644 .github/workflows/test-benchmarks.yml create mode 100644 .github/workflows/test-e2e.yml create mode 100644 .github/workflows/test-fuzz.yml create mode 100644 .github/workflows/workflow-sanity.yml diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 0000000..d162ba3 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +set -euo pipefail + +if command -v gitleaks >/dev/null 2>&1; then + gitleaks protect --staged --redact +else + echo "warning: gitleaks not found; skipping staged secret scan" >&2 +fi diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 0000000..f69e1cb --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# +# pre-push hook — runs fmt, clippy, and tests before every push. +# Install: git config core.hooksPath .githooks +# Skip: git push --no-verify + +set -euo pipefail + +echo "==> pre-push: running rust quality gate..." +./scripts/ci/rust_quality_gate.sh || { + echo "FAIL: rust quality gate failed." + exit 1 +} + +if [ "${ZEROCLAW_STRICT_LINT:-0}" = "1" ]; then + echo "==> pre-push: running strict clippy warnings gate (ZEROCLAW_STRICT_LINT=1)..." + ./scripts/ci/rust_quality_gate.sh --strict || { + echo "FAIL: strict clippy warnings gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_STRICT_DELTA_LINT:-0}" = "1" ]; then + echo "==> pre-push: running strict delta lint gate (ZEROCLAW_STRICT_DELTA_LINT=1)..." + ./scripts/ci/rust_strict_delta_gate.sh || { + echo "FAIL: strict delta lint gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_DOCS_LINT:-0}" = "1" ]; then + echo "==> pre-push: running docs quality gate (ZEROCLAW_DOCS_LINT=1)..." + ./scripts/ci/docs_quality_gate.sh || { + echo "FAIL: docs quality gate reported issues." + exit 1 + } +fi + +if [ "${ZEROCLAW_DOCS_LINKS:-0}" = "1" ]; then + echo "==> pre-push: running docs links gate (ZEROCLAW_DOCS_LINKS=1)..." + ./scripts/ci/docs_links_gate.sh || { + echo "FAIL: docs links gate reported issues." + exit 1 + } +fi + +echo "==> pre-push: running tests..." +cargo test --locked || { + echo "FAIL: some tests did not pass." + exit 1 +} + +echo "==> pre-push: all checks passed." diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..cdb9b92 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,32 @@ +# Default owner for all files +* @louisdevzz + +# Important functional modules +/src/agent/** @louisdevzz +/src/providers/** @louisdevzz +/src/channels/** @louisdevzz +/src/tools/** @louisdevzz +/src/gateway/** @louisdevzz +/src/runtime/** @louisdevzz +/src/memory/** @louisdevzz +/Cargo.toml @louisdevzz +/Cargo.lock @louisdevzz + +# Security / tests / CI-CD ownership +/src/security/** @louisdevzz +/tests/** @louisdevzz +/.github/** @louisdevzz +/.github/workflows/** @louisdevzz +/.github/codeql/** @louisdevzz +/.github/dependabot.yml @louisdevzz +/SECURITY.md @louisdevzz +/docs/actions-source-policy.md @louisdevzz +/docs/ci-map.md @louisdevzz + +# Docs & governance +/docs/** @louisdevzz +/AGENTS.md @louisdevzz +/CLAUDE.md @louisdevzz +/CONTRIBUTING.md @louisdevzz +/docs/pr-workflow.md @louisdevzz +/docs/reviewer-playbook.md @louisdevzz diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000..8ac7419 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,148 @@ +name: Bug Report +description: Report a reproducible defect in ZeroClaw +title: "[Bug]: " +labels: + - bug +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to report a bug. + Please provide a minimal reproducible case so maintainers can triage quickly. + Do not include personal/sensitive data; redact and anonymize all logs/payloads. + + - type: input + id: summary + attributes: + label: Summary + description: One-line description of the problem. + placeholder: zeroclaw daemon exits immediately when ... + validations: + required: true + + - type: dropdown + id: component + attributes: + label: Affected component + options: + - runtime/daemon + - provider + - channel + - memory + - security/sandbox + - tooling/ci + - docs + - unknown + validations: + required: true + + - type: dropdown + id: severity + attributes: + label: Severity + options: + - S0 - data loss / security risk + - S1 - workflow blocked + - S2 - degraded behavior + - S3 - minor issue + validations: + required: true + + - type: textarea + id: current + attributes: + label: Current behavior + description: What is happening now? + placeholder: The process exits with ... + validations: + required: true + + - type: textarea + id: expected + attributes: + label: Expected behavior + description: What should happen instead? + placeholder: The daemon should stay alive and ... + validations: + required: true + + - type: textarea + id: reproduce + attributes: + label: Steps to reproduce + description: Please provide exact commands/config. + placeholder: | + 1. zeroclaw onboard --interactive + 2. zeroclaw daemon + 3. Observe crash in logs + render: bash + validations: + required: true + + - type: textarea + id: impact + attributes: + label: Impact + description: Who is affected, how often, and practical consequences. + placeholder: | + Affected users: ... + Frequency: always/intermittent + Consequence: ... + validations: + required: true + + - type: textarea + id: logs + attributes: + label: Logs / stack traces + description: Paste relevant logs (redact secrets, personal identifiers, and sensitive data). + render: text + validations: + required: false + + - type: input + id: version + attributes: + label: ZeroClaw version + placeholder: v0.1.0 / commit SHA + validations: + required: true + + - type: input + id: rust + attributes: + label: Rust version + placeholder: rustc 1.xx.x + validations: + required: true + + - type: input + id: os + attributes: + label: Operating system + placeholder: Ubuntu 24.04 / macOS 15 / Windows 11 + validations: + required: true + + - type: dropdown + id: regression + attributes: + label: Regression? + options: + - Unknown + - Yes, it worked before + - No, first-time setup + validations: + required: true + + - type: checkboxes + id: checks + attributes: + label: Pre-flight checks + options: + - label: I reproduced this on the latest main branch or latest release. + required: true + - label: I redacted secrets/tokens from logs. + required: true + - label: I removed personal identifiers and replaced identity-specific data with neutral placeholders. + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..4de85aa --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,17 @@ +blank_issues_enabled: false +contact_links: + - name: Security vulnerability report + url: https://github.com/zeroclaw-labs/zeroclaw/security/policy + about: Please report security vulnerabilities privately via SECURITY.md policy. + - name: Private vulnerability report template + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.md + about: Use this template when filing a private vulnerability report in Security Advisories. + - name: 私密漏洞报告模板(中文) + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/security/private-vulnerability-report-template.zh-CN.md + about: 使用该中文模板通过 Security Advisories 进行私密漏洞提交。 + - name: Contribution guide + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/CONTRIBUTING.md + about: Please read contribution and PR requirements before opening an issue. + - name: PR workflow & reviewer expectations + url: https://github.com/zeroclaw-labs/zeroclaw/blob/main/docs/pr-workflow.md + about: Read risk-based PR tracks, CI gates, and merge criteria before filing feature requests. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000..25fa32b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,107 @@ +name: Feature Request +description: Propose an improvement or new capability +title: "[Feature]: " +labels: + - enhancement +body: + - type: markdown + attributes: + value: | + Thanks for sharing your idea. + Please focus on user value, constraints, and rollout safety. + Do not include personal/sensitive data; use neutral project-scoped placeholders. + + - type: input + id: summary + attributes: + label: Summary + description: One-line statement of the requested capability. + placeholder: Add a provider-level retry budget override for long-running channels. + validations: + required: true + + - type: textarea + id: problem + attributes: + label: Problem statement + description: What user pain does this solve and why is current behavior insufficient? + placeholder: Teams operating in unstable networks cannot tune retries per provider... + validations: + required: true + + - type: textarea + id: proposal + attributes: + label: Proposed solution + description: Describe preferred behavior and interfaces. + placeholder: Add `[provider.retry]` config and enforce bounds in config validation. + validations: + required: true + + - type: textarea + id: non_goals + attributes: + label: Non-goals / out of scope + description: Clarify what should not be included in the first iteration. + placeholder: No UI changes, no cross-provider dynamic adaptation in v1. + validations: + required: true + + - type: textarea + id: alternatives + attributes: + label: Alternatives considered + description: What alternatives did you evaluate? + placeholder: Keep current behavior, use wrapper scripts, etc. + validations: + required: false + + - type: textarea + id: acceptance + attributes: + label: Acceptance criteria + description: What outcomes would make this request complete? + placeholder: | + - Config key is documented and validated + - Runtime path uses configured retry budget + - Regression tests cover fallback and invalid config + validations: + required: true + + - type: textarea + id: architecture + attributes: + label: Architecture impact + description: Which subsystem(s) are affected? + placeholder: providers/, channels/, memory/, runtime/, security/, docs/ ... + validations: + required: true + + - type: textarea + id: risk + attributes: + label: Risk and rollback + description: Main risk + how to disable/revert quickly. + placeholder: Risk is ... rollback is ... + validations: + required: true + + - type: dropdown + id: breaking + attributes: + label: Breaking change? + options: + - "No" + - "Yes" + validations: + required: true + + - type: checkboxes + id: hygiene + attributes: + label: Data hygiene checks + options: + - label: I removed personal/sensitive data from examples, payloads, and logs. + required: true + - label: I used neutral, project-focused wording and placeholders. + required: true diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml new file mode 100644 index 0000000..3c46a6f --- /dev/null +++ b/.github/actionlint.yaml @@ -0,0 +1,7 @@ +self-hosted-runner: + labels: + - blacksmith-2vcpu-ubuntu-2404 + - aws-india + - hetzner + - Linux + - X64 diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml new file mode 100644 index 0000000..5c82c1b --- /dev/null +++ b/.github/codeql/codeql-config.yml @@ -0,0 +1,8 @@ +# CodeQL configuration for ZeroClaw +# +# We intentionally ignore integration tests under `tests/` because they often +# contain security-focused fixtures (example secrets, malformed payloads, etc.) +# that can trigger false positives in security queries. + +paths-ignore: + - tests/** diff --git a/.github/connectivity/probe-contract.json b/.github/connectivity/probe-contract.json new file mode 100644 index 0000000..4c6b3a2 --- /dev/null +++ b/.github/connectivity/probe-contract.json @@ -0,0 +1,70 @@ +{ + "version": 1, + "description": "Provider/model connectivity probe contract for scheduled CI checks.", + "consecutive_transient_failures_to_escalate": 2, + "providers": [ + { + "name": "OpenAI", + "provider": "openai", + "required": true, + "secret_env": "OPENAI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Primary reference provider; validates baseline OpenAI-compatible path." + }, + { + "name": "Anthropic", + "provider": "anthropic", + "required": true, + "secret_env": "ANTHROPIC_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Checks non-OpenAI provider fetch path and account health." + }, + { + "name": "Gemini", + "provider": "gemini", + "required": true, + "secret_env": "GEMINI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Validates Google model discovery endpoint availability." + }, + { + "name": "OpenRouter", + "provider": "openrouter", + "required": true, + "secret_env": "OPENROUTER_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Routes across many providers; signal for aggregator-side health." + }, + { + "name": "Qwen", + "provider": "qwen", + "required": false, + "secret_env": "DASHSCOPE_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Regional provider check; optional for global deployments." + }, + { + "name": "NVIDIA NIM", + "provider": "nvidia", + "required": false, + "secret_env": "NVIDIA_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Optional ecosystem endpoint check." + }, + { + "name": "OpenAI Codex", + "provider": "openai-codex", + "required": false, + "secret_env": "OPENAI_API_KEY", + "timeout_sec": 90, + "retries": 2, + "notes": "Uses OpenAI-compatible models endpoint to verify Codex-profile discovery path." + } + ] +} diff --git a/.github/connectivity/providers.json b/.github/connectivity/providers.json new file mode 100644 index 0000000..559a064 --- /dev/null +++ b/.github/connectivity/providers.json @@ -0,0 +1,77 @@ +{ + "global_timeout_seconds": 8, + "providers": [ + { + "id": "openrouter", + "url": "https://openrouter.ai/api/v1/models", + "method": "GET", + "critical": true + }, + { + "id": "openai", + "url": "https://api.openai.com/v1/models", + "method": "GET", + "critical": true + }, + { + "id": "anthropic", + "url": "https://api.anthropic.com/v1/messages", + "method": "POST", + "critical": true + }, + { + "id": "groq", + "url": "https://api.groq.com/openai/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "deepseek", + "url": "https://api.deepseek.com/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "moonshot", + "url": "https://api.moonshot.ai/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "qwen", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "zai", + "url": "https://api.z.ai/api/paas/v4/models", + "method": "GET", + "critical": false + }, + { + "id": "glm", + "url": "https://open.bigmodel.cn/api/paas/v4/models", + "method": "GET", + "critical": false + }, + { + "id": "together", + "url": "https://api.together.xyz/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "fireworks", + "url": "https://api.fireworks.ai/inference/v1/models", + "method": "GET", + "critical": false + }, + { + "id": "cohere", + "url": "https://api.cohere.com/v1/models", + "method": "GET", + "critical": false + } + ] +} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..eb81c96 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,52 @@ +version: 2 + +updates: + - package-ecosystem: cargo + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 3 + labels: + - "dependencies" + groups: + rust-all: + patterns: + - "*" + update-types: + - minor + - patch + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 1 + labels: + - "ci" + - "dependencies" + groups: + actions-all: + patterns: + - "*" + update-types: + - minor + - patch + + - package-ecosystem: docker + directory: "/" + schedule: + interval: daily + target-branch: main + open-pull-requests-limit: 1 + labels: + - "ci" + - "dependencies" + groups: + docker-all: + patterns: + - "*" + update-types: + - minor + - patch diff --git a/.github/label-policy.json b/.github/label-policy.json new file mode 100644 index 0000000..e8b254f --- /dev/null +++ b/.github/label-policy.json @@ -0,0 +1,21 @@ +{ + "contributor_tier_color": "2ED9FF", + "contributor_tiers": [ + { + "label": "distinguished contributor", + "min_merged_prs": 50 + }, + { + "label": "principal contributor", + "min_merged_prs": 20 + }, + { + "label": "experienced contributor", + "min_merged_prs": 10 + }, + { + "label": "trusted contributor", + "min_merged_prs": 5 + } + ] +} diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000..21e851f --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,147 @@ +"docs": + - changed-files: + - any-glob-to-any-file: + - "docs/**" + - "**/*.md" + - "**/*.mdx" + - "LICENSE" + - ".markdownlint-cli2.yaml" + +"dependencies": + - changed-files: + - any-glob-to-any-file: + - "Cargo.toml" + - "Cargo.lock" + - "deny.toml" + - ".github/dependabot.yml" + +"ci": + - changed-files: + - any-glob-to-any-file: + - ".github/**" + - ".githooks/**" + +"core": + - changed-files: + - any-glob-to-any-file: + - "src/*.rs" + +"agent": + - changed-files: + - any-glob-to-any-file: + - "src/agent/**" + +"channel": + - changed-files: + - any-glob-to-any-file: + - "src/channels/**" + +"gateway": + - changed-files: + - any-glob-to-any-file: + - "src/gateway/**" + +"config": + - changed-files: + - any-glob-to-any-file: + - "src/config/**" + +"cron": + - changed-files: + - any-glob-to-any-file: + - "src/cron/**" + +"daemon": + - changed-files: + - any-glob-to-any-file: + - "src/daemon/**" + +"doctor": + - changed-files: + - any-glob-to-any-file: + - "src/doctor/**" + +"health": + - changed-files: + - any-glob-to-any-file: + - "src/health/**" + +"heartbeat": + - changed-files: + - any-glob-to-any-file: + - "src/heartbeat/**" + +"integration": + - changed-files: + - any-glob-to-any-file: + - "src/integrations/**" + +"memory": + - changed-files: + - any-glob-to-any-file: + - "src/memory/**" + +"security": + - changed-files: + - any-glob-to-any-file: + - "src/security/**" + +"runtime": + - changed-files: + - any-glob-to-any-file: + - "src/runtime/**" + +"onboard": + - changed-files: + - any-glob-to-any-file: + - "src/onboard/**" + +"provider": + - changed-files: + - any-glob-to-any-file: + - "src/providers/**" + +"service": + - changed-files: + - any-glob-to-any-file: + - "src/service/**" + +"skillforge": + - changed-files: + - any-glob-to-any-file: + - "src/skillforge/**" + +"skills": + - changed-files: + - any-glob-to-any-file: + - "src/skills/**" + +"tool": + - changed-files: + - any-glob-to-any-file: + - "src/tools/**" + +"tunnel": + - changed-files: + - any-glob-to-any-file: + - "src/tunnel/**" + +"observability": + - changed-files: + - any-glob-to-any-file: + - "src/observability/**" + +"tests": + - changed-files: + - any-glob-to-any-file: + - "tests/**" + +"scripts": + - changed-files: + - any-glob-to-any-file: + - "scripts/**" + +"dev": + - changed-files: + - any-glob-to-any-file: + - "dev/**" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..fe3cd6f --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,117 @@ +## Summary + +Describe this PR in 2-5 bullets: + +- Base branch target (`main` by default; use `dev` only when maintainers explicitly request integration batching): +- Problem: +- Why it matters: +- What changed: +- What did **not** change (scope boundary): + +## Label Snapshot (required) + +- Risk label (`risk: low|medium|high`): +- Size label (`size: XS|S|M|L|XL`, auto-managed/read-only): +- Scope labels (`core|agent|channel|config|cron|daemon|doctor|gateway|health|heartbeat|integration|memory|observability|onboard|provider|runtime|security|service|skillforge|skills|tool|tunnel|docs|dependencies|ci|tests|scripts|dev`, comma-separated): +- Module labels (`: `, for example `channel: telegram`, `provider: kimi`, `tool: shell`): +- Contributor tier label (`trusted contributor|experienced contributor|principal contributor|distinguished contributor`, auto-managed/read-only; author merged PRs >=5/10/20/50): +- If any auto-label is incorrect, note requested correction: + +## Change Metadata + +- Change type (`bug|feature|refactor|docs|security|chore`): +- Primary scope (`runtime|provider|channel|memory|security|ci|docs|multi`): + +## Linked Issue + +- Closes # +- Related # +- Depends on # (if stacked) +- Existing overlapping PR(s) reviewed for this issue (list `# by @` or `N/A`): +- Supersedes # (if replacing older PR) +- Linear issue key(s) (required, e.g. `RMN-123`): +- Linear issue URL(s): + +## Supersede Attribution (required when `Supersedes #` is used) + +- Superseded PRs + authors (`# by @`, one per line): +- Integrated scope by source PR (what was materially carried forward): +- `Co-authored-by` trailers added for materially incorporated contributors? (`Yes/No`) +- If `No`, explain why (for example: inspiration-only, no direct code/design carry-over): +- Trailer format check (separate lines, no escaped `\n`): (`Pass/Fail`) + +## Validation Evidence (required) + +Commands and result summary: + +```bash +cargo fmt --all -- --check +cargo clippy --all-targets -- -D warnings +cargo test +``` + +- Evidence provided (test/log/trace/screenshot/perf): +- If any command is intentionally skipped, explain why: + +## Security Impact (required) + +- New permissions/capabilities? (`Yes/No`) +- New external network calls? (`Yes/No`) +- Secrets/tokens handling changed? (`Yes/No`) +- File system access scope changed? (`Yes/No`) +- If any `Yes`, describe risk and mitigation: + +## Privacy and Data Hygiene (required) + +- Data-hygiene status (`pass|needs-follow-up`): +- Redaction/anonymization notes: +- Neutral wording confirmation (use ZeroClaw/project-native labels if identity-like wording is needed): + +## Compatibility / Migration + +- Backward compatible? (`Yes/No`) +- Config/env changes? (`Yes/No`) +- Migration needed? (`Yes/No`) +- If yes, exact upgrade steps: + +## i18n Follow-Through (required when docs or user-facing wording changes) + +- i18n follow-through triggered? (`Yes/No`) +- If `Yes`, locale navigation parity updated in `README*`, `docs/README*`, and `docs/SUMMARY.md` for supported locales (`en`, `zh-CN`, `ja`, `ru`, `fr`, `vi`)? (`Yes/No`) +- If `Yes`, localized runtime-contract docs updated where equivalents exist (minimum for `fr`/`vi`: `commands-reference`, `config-reference`, `troubleshooting`)? (`Yes/No/N.A.`) +- If `Yes`, Vietnamese canonical docs under `docs/i18n/vi/**` synced and compatibility shims under `docs/*.vi.md` validated? (`Yes/No/N.A.`) +- If any `No`/`N.A.`, link follow-up issue/PR and explain scope decision: + +## Human Verification (required) + +What was personally validated beyond CI: + +- Verified scenarios: +- Edge cases checked: +- What was not verified: + +## Side Effects / Blast Radius (required) + +- Affected subsystems/workflows: +- Potential unintended effects: +- Guardrails/monitoring for early detection: + +## Agent Collaboration Notes (recommended) + +- Agent tools used (if any): +- Workflow/plan summary (if any): +- Verification focus: +- Confirmation: naming + architecture boundaries followed (`AGENTS.md` + `CONTRIBUTING.md`): + +## Rollback Plan (required) + +- Fast rollback command/path: +- Feature flags or config toggles (if any): +- Observable failure symptoms: + +## Risks and Mitigations + +List real risks in this PR (or write `None`). + +- Risk: + - Mitigation: diff --git a/.github/release/canary-policy.json b/.github/release/canary-policy.json new file mode 100644 index 0000000..e032311 --- /dev/null +++ b/.github/release/canary-policy.json @@ -0,0 +1,39 @@ +{ + "schema_version": "zeroclaw.canary-policy.v1", + "release_channel": "stable", + "observation_window_minutes": 60, + "minimum_sample_size": 500, + "cohorts": [ + { + "name": "canary-5pct", + "traffic_percent": 5, + "duration_minutes": 20 + }, + { + "name": "canary-20pct", + "traffic_percent": 20, + "duration_minutes": 20 + }, + { + "name": "canary-50pct", + "traffic_percent": 50, + "duration_minutes": 20 + }, + { + "name": "canary-100pct", + "traffic_percent": 100, + "duration_minutes": 60 + } + ], + "observability_signals": [ + "error_rate", + "crash_rate", + "p95_latency_ms", + "sample_size" + ], + "thresholds": { + "max_error_rate": 0.02, + "max_crash_rate": 0.01, + "max_p95_latency_ms": 1200 + } +} diff --git a/.github/release/docs-deploy-policy.json b/.github/release/docs-deploy-policy.json new file mode 100644 index 0000000..ba8db88 --- /dev/null +++ b/.github/release/docs-deploy-policy.json @@ -0,0 +1,10 @@ +{ + "schema_version": "zeroclaw.docs-deploy-policy.v1", + "production_branch": "main", + "allow_manual_production_dispatch": true, + "require_preview_evidence_on_manual_production": true, + "allow_manual_rollback_dispatch": true, + "rollback_ref_must_be_ancestor_of_production_branch": true, + "docs_preview_retention_days": 14, + "docs_guard_artifact_retention_days": 21 +} diff --git a/.github/release/ghcr-tag-policy.json b/.github/release/ghcr-tag-policy.json new file mode 100644 index 0000000..bbac3ff --- /dev/null +++ b/.github/release/ghcr-tag-policy.json @@ -0,0 +1,18 @@ +{ + "schema_version": "zeroclaw.ghcr-tag-policy.v1", + "release_tag_regex": "^v[0-9]+\\.[0-9]+\\.[0-9]+$", + "sha_tag_prefix": "sha-", + "sha_tag_length": 12, + "latest_tag": "latest", + "require_latest_on_release": true, + "immutable_tag_classes": [ + "release", + "sha" + ], + "rollback_priority": [ + "sha", + "release" + ], + "contract_artifact_retention_days": 21, + "scan_artifact_retention_days": 14 +} diff --git a/.github/release/ghcr-vulnerability-policy.json b/.github/release/ghcr-vulnerability-policy.json new file mode 100644 index 0000000..64209b0 --- /dev/null +++ b/.github/release/ghcr-vulnerability-policy.json @@ -0,0 +1,17 @@ +{ + "schema_version": "zeroclaw.ghcr-vulnerability-policy.v1", + "required_tag_classes": [ + "release", + "sha", + "latest" + ], + "blocking_severities": [ + "HIGH", + "CRITICAL" + ], + "max_blocking_findings_per_tag": 0, + "require_blocking_count_parity": true, + "require_artifact_id_parity": true, + "scan_artifact_retention_days": 14, + "audit_artifact_retention_days": 21 +} diff --git a/.github/release/nightly-owner-routing.json b/.github/release/nightly-owner-routing.json new file mode 100644 index 0000000..a9d44ea --- /dev/null +++ b/.github/release/nightly-owner-routing.json @@ -0,0 +1,9 @@ +{ + "schema_version": "zeroclaw.nightly-owner-routing.v1", + "owners": { + "default": "@louisdevzz", + "whatsapp-web": "@louisdevzz", + "browser-native": "@louisdevzz", + "nightly-all-features": "@louisdevzz" + } +} diff --git a/.github/release/prerelease-stage-gates.json b/.github/release/prerelease-stage-gates.json new file mode 100644 index 0000000..e2614ae --- /dev/null +++ b/.github/release/prerelease-stage-gates.json @@ -0,0 +1,33 @@ +{ + "schema_version": "zeroclaw.prerelease-stage-gates.v1", + "stage_order": ["alpha", "beta", "rc", "stable"], + "required_previous_stage": { + "beta": "alpha", + "rc": "beta", + "stable": "rc" + }, + "required_checks": { + "alpha": [ + "CI Required Gate", + "Security Audit" + ], + "beta": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary" + ], + "rc": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary", + "Nightly Summary & Routing" + ], + "stable": [ + "CI Required Gate", + "Security Audit", + "Feature Matrix Summary", + "Verify Artifact Set", + "Nightly Summary & Routing" + ] + } +} diff --git a/.github/release/release-artifact-contract.json b/.github/release/release-artifact-contract.json new file mode 100644 index 0000000..1459588 --- /dev/null +++ b/.github/release/release-artifact-contract.json @@ -0,0 +1,30 @@ +{ + "schema_version": "zeroclaw.release-artifact-contract.v1", + "release_archive_patterns": [ + "zeroclaw-x86_64-unknown-linux-gnu.tar.gz", + "zeroclaw-x86_64-unknown-linux-musl.tar.gz", + "zeroclaw-aarch64-unknown-linux-gnu.tar.gz", + "zeroclaw-aarch64-unknown-linux-musl.tar.gz", + "zeroclaw-armv7-unknown-linux-gnueabihf.tar.gz", + "zeroclaw-armv7-linux-androideabi.tar.gz", + "zeroclaw-aarch64-linux-android.tar.gz", + "zeroclaw-x86_64-unknown-freebsd.tar.gz", + "zeroclaw-x86_64-apple-darwin.tar.gz", + "zeroclaw-aarch64-apple-darwin.tar.gz", + "zeroclaw-x86_64-pc-windows-msvc.zip" + ], + "required_manifest_files": [ + "release-manifest.json", + "release-manifest.md", + "SHA256SUMS" + ], + "required_sbom_files": [ + "zeroclaw.cdx.json", + "zeroclaw.spdx.json" + ], + "required_notice_files": [ + "LICENSE-APACHE", + "LICENSE-MIT", + "NOTICE" + ] +} diff --git a/.github/security/deny-ignore-governance.json b/.github/security/deny-ignore-governance.json new file mode 100644 index 0000000..d959274 --- /dev/null +++ b/.github/security/deny-ignore-governance.json @@ -0,0 +1,26 @@ +{ + "schema_version": "zeroclaw.deny-governance.v1", + "advisories": [ + { + "id": "RUSTSEC-2025-0141", + "owner": "repo-maintainers", + "reason": "Transitive via probe-rs in current release path; tracked for replacement when probe-rs updates.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + }, + { + "id": "RUSTSEC-2024-0384", + "owner": "repo-maintainers", + "reason": "Upstream rust-nostr advisory mitigation is still in progress; monitor until released fix lands.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + }, + { + "id": "RUSTSEC-2024-0388", + "owner": "repo-maintainers", + "reason": "Transitive via matrix-sdk indexeddb dependency chain in current matrix release line; track removal when upstream drops derivative.", + "ticket": "RMN-21", + "expires_on": "2026-12-31" + } + ] +} diff --git a/.github/security/gitleaks-allowlist-governance.json b/.github/security/gitleaks-allowlist-governance.json new file mode 100644 index 0000000..4ec7714 --- /dev/null +++ b/.github/security/gitleaks-allowlist-governance.json @@ -0,0 +1,56 @@ +{ + "schema_version": "zeroclaw.secrets-governance.v1", + "paths": [ + { + "pattern": "src/security/leak_detector\\.rs", + "owner": "repo-maintainers", + "reason": "Fixture patterns are intentionally embedded for regression tests in leak detector logic.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "src/agent/loop_\\.rs", + "owner": "repo-maintainers", + "reason": "Contains escaped template snippets used for command orchestration and parser coverage.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "src/security/secrets\\.rs", + "owner": "repo-maintainers", + "reason": "Contains detector test vectors and redaction examples required for secret scanning tests.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "docs/(i18n/vi/|vi/)?zai-glm-setup\\.md", + "owner": "repo-maintainers", + "reason": "Documentation contains literal environment variable placeholders for onboarding commands.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "\\.github/workflows/pub-release\\.yml", + "owner": "repo-maintainers", + "reason": "Release workflow emits masked authorization header examples during registry smoke checks.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + } + ], + "regexes": [ + { + "pattern": "Authorization: Bearer \\$\\{[^}]+\\}", + "owner": "repo-maintainers", + "reason": "Intentional placeholder used in docs/workflow snippets for safe header examples.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + }, + { + "pattern": "curl -sS -o /tmp/ghcr-release-manifest\\.json -w \"%\\{http_code\\}\"", + "owner": "repo-maintainers", + "reason": "Release smoke command string is non-secret telemetry and should not be flagged as credential leakage.", + "ticket": "RMN-13", + "expires_on": "2026-12-31" + } + ] +} diff --git a/.github/security/unsafe-audit-governance.json b/.github/security/unsafe-audit-governance.json new file mode 100644 index 0000000..e8edb6c --- /dev/null +++ b/.github/security/unsafe-audit-governance.json @@ -0,0 +1,5 @@ +{ + "schema_version": "zeroclaw.unsafe-audit-governance.v1", + "ignore_paths": [], + "ignore_pattern_ids": [] +} diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000..fe3b3d8 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,36 @@ +# Workflow Directory Layout + +GitHub Actions only loads workflow entry files from: + +- `.github/workflows/*.yml` +- `.github/workflows/*.yaml` + +Subdirectories are not valid locations for workflow entry files. + +Repository convention: + +1. Keep runnable workflow entry files at `.github/workflows/` root. +2. Keep workflow-only helper scripts under `.github/workflows/scripts/`. +3. Keep cross-tooling/local CI scripts under `scripts/ci/` when they are used outside Actions. + +Workflow behavior documentation in this directory: + +- `.github/workflows/main-branch-flow.md` + +Current workflow helper scripts: + +- `.github/workflows/scripts/ci_workflow_owner_approval.js` +- `.github/workflows/scripts/ci_license_file_owner_guard.js` +- `.github/workflows/scripts/lint_feedback.js` +- `.github/workflows/scripts/pr_auto_response_contributor_tier.js` +- `.github/workflows/scripts/pr_auto_response_labeled_routes.js` +- `.github/workflows/scripts/pr_check_status_nudge.js` +- `.github/workflows/scripts/pr_intake_checks.js` +- `.github/workflows/scripts/pr_labeler.js` +- `.github/workflows/scripts/test_benchmarks_pr_comment.js` + +Release/CI policy assets introduced for advanced delivery lanes: + +- `.github/release/nightly-owner-routing.json` +- `.github/release/canary-policy.json` +- `.github/release/prerelease-stage-gates.json` diff --git a/.github/workflows/ci-build-fast.yml b/.github/workflows/ci-build-fast.yml new file mode 100644 index 0000000..a9cab2b --- /dev/null +++ b/.github/workflows/ci-build-fast.yml @@ -0,0 +1,63 @@ +name: CI Build (Fast) + +# Optional fast release build that runs alongside the normal Build (Smoke) job. +# This workflow is informational and does not gate merges. + +on: + push: + branches: [dev, main] + pull_request: + branches: [dev, main] + +concurrency: + group: ci-fast-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + changes: + name: Detect Change Scope + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + rust_changed: ${{ steps.scope.outputs.rust_changed }} + docs_only: ${{ steps.scope.outputs.docs_only }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + - name: Detect docs-only changes + id: scope + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event.before }} + run: ./scripts/ci/detect_change_scope.sh + + build-fast: + name: Build (Fast) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: fast-build + cache-targets: true + + - name: Build release binary + run: cargo build --release --locked --verbose diff --git a/.github/workflows/ci-canary-gate.yml b/.github/workflows/ci-canary-gate.yml new file mode 100644 index 0000000..9fcfdff --- /dev/null +++ b/.github/workflows/ci-canary-gate.yml @@ -0,0 +1,329 @@ +name: CI Canary Gate + +on: + workflow_dispatch: + inputs: + mode: + description: "dry-run computes decision only; execute enables canary dispatch" + required: true + default: dry-run + type: choice + options: + - dry-run + - execute + candidate_tag: + description: "Candidate release tag (e.g. v0.1.8-rc.1 or v0.1.8)" + required: false + default: "" + type: string + candidate_sha: + description: "Optional explicit candidate SHA" + required: false + default: "" + type: string + error_rate: + description: "Observed canary error rate (0.0-1.0)" + required: true + default: "0.0" + type: string + crash_rate: + description: "Observed canary crash rate (0.0-1.0)" + required: true + default: "0.0" + type: string + p95_latency_ms: + description: "Observed canary p95 latency in milliseconds" + required: true + default: "0" + type: string + sample_size: + description: "Observed canary sample size" + required: true + default: "0" + type: string + emit_repository_dispatch: + description: "Emit canary decision repository_dispatch event" + required: true + default: false + type: boolean + trigger_rollback_on_abort: + description: "Automatically dispatch CI Rollback Guard when canary decision is abort" + required: true + default: true + type: boolean + rollback_branch: + description: "Rollback integration branch used by CI Rollback Guard dispatch" + required: true + default: dev + type: choice + options: + - dev + - main + rollback_target_ref: + description: "Optional explicit rollback target ref passed to CI Rollback Guard" + required: false + default: "" + type: string + fail_on_violation: + description: "Fail on policy violations" + required: true + default: true + type: boolean + schedule: + - cron: "45 7 * * 1" # Weekly Monday 07:45 UTC + +concurrency: + group: canary-gate-${{ github.event.inputs.candidate_tag || github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: read + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + canary-plan: + name: Canary Plan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + mode: ${{ steps.inputs.outputs.mode }} + candidate_tag: ${{ steps.inputs.outputs.candidate_tag }} + candidate_sha: ${{ steps.inputs.outputs.candidate_sha }} + trigger_rollback_on_abort: ${{ steps.inputs.outputs.trigger_rollback_on_abort }} + rollback_branch: ${{ steps.inputs.outputs.rollback_branch }} + rollback_target_ref: ${{ steps.inputs.outputs.rollback_target_ref }} + decision: ${{ steps.extract.outputs.decision }} + ready_to_execute: ${{ steps.extract.outputs.ready_to_execute }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve canary inputs + id: inputs + shell: bash + run: | + set -euo pipefail + + mode="dry-run" + candidate_tag="" + candidate_sha="" + error_rate="0.0" + crash_rate="0.0" + p95_latency_ms="0" + sample_size="0" + trigger_rollback_on_abort="true" + rollback_branch="dev" + rollback_target_ref="" + fail_on_violation="true" + + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + mode="${{ github.event.inputs.mode || 'dry-run' }}" + candidate_tag="${{ github.event.inputs.candidate_tag || '' }}" + candidate_sha="${{ github.event.inputs.candidate_sha || '' }}" + error_rate="${{ github.event.inputs.error_rate || '0.0' }}" + crash_rate="${{ github.event.inputs.crash_rate || '0.0' }}" + p95_latency_ms="${{ github.event.inputs.p95_latency_ms || '0' }}" + sample_size="${{ github.event.inputs.sample_size || '0' }}" + trigger_rollback_on_abort="${{ github.event.inputs.trigger_rollback_on_abort || 'true' }}" + rollback_branch="${{ github.event.inputs.rollback_branch || 'dev' }}" + rollback_target_ref="${{ github.event.inputs.rollback_target_ref || '' }}" + fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}" + else + git fetch --tags --force origin + candidate_tag="$(git tag --list 'v*' --sort=-version:refname | head -n1)" + if [ -n "$candidate_tag" ]; then + candidate_sha="$(git rev-parse "${candidate_tag}^{commit}")" + fi + fi + + { + echo "mode=${mode}" + echo "candidate_tag=${candidate_tag}" + echo "candidate_sha=${candidate_sha}" + echo "error_rate=${error_rate}" + echo "crash_rate=${crash_rate}" + echo "p95_latency_ms=${p95_latency_ms}" + echo "sample_size=${sample_size}" + echo "trigger_rollback_on_abort=${trigger_rollback_on_abort}" + echo "rollback_branch=${rollback_branch}" + echo "rollback_target_ref=${rollback_target_ref}" + echo "fail_on_violation=${fail_on_violation}" + } >> "$GITHUB_OUTPUT" + + - name: Run canary guard + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + args=() + if [ "${{ steps.inputs.outputs.fail_on_violation }}" = "true" ]; then + args+=(--fail-on-violation) + fi + + python3 scripts/ci/canary_guard.py \ + --policy-file .github/release/canary-policy.json \ + --candidate-tag "${{ steps.inputs.outputs.candidate_tag }}" \ + --candidate-sha "${{ steps.inputs.outputs.candidate_sha }}" \ + --mode "${{ steps.inputs.outputs.mode }}" \ + --error-rate "${{ steps.inputs.outputs.error_rate }}" \ + --crash-rate "${{ steps.inputs.outputs.crash_rate }}" \ + --p95-latency-ms "${{ steps.inputs.outputs.p95_latency_ms }}" \ + --sample-size "${{ steps.inputs.outputs.sample_size }}" \ + --output-json artifacts/canary-guard.json \ + --output-md artifacts/canary-guard.md \ + "${args[@]}" + + - name: Extract canary decision outputs + id: extract + shell: bash + run: | + set -euo pipefail + decision="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/canary-guard.json', encoding='utf-8')) + print(data.get('decision', 'hold')) + PY + )" + ready_to_execute="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/canary-guard.json', encoding='utf-8')) + print(str(bool(data.get('ready_to_execute', False))).lower()) + PY + )" + echo "decision=${decision}" >> "$GITHUB_OUTPUT" + echo "ready_to_execute=${ready_to_execute}" >> "$GITHUB_OUTPUT" + + - name: Emit canary audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type canary_guard \ + --input-json artifacts/canary-guard.json \ + --output-json artifacts/audit-event-canary-guard.json \ + --artifact-name canary-guard \ + --retention-days 21 + + - name: Publish canary summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/canary-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload canary artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: canary-guard + path: | + artifacts/canary-guard.json + artifacts/canary-guard.md + artifacts/audit-event-canary-guard.json + if-no-files-found: error + retention-days: 21 + + canary-execute: + name: Canary Execute + needs: [canary-plan] + if: github.event_name == 'workflow_dispatch' && needs.canary-plan.outputs.mode == 'execute' && needs.canary-plan.outputs.ready_to_execute == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + permissions: + contents: write + actions: write + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Create canary marker tag + shell: bash + run: | + set -euo pipefail + marker_tag="canary-${{ needs.canary-plan.outputs.candidate_tag }}-${{ github.run_id }}" + git fetch --tags --force origin + git tag -a "$marker_tag" "${{ needs.canary-plan.outputs.candidate_sha }}" -m "Canary decision marker from run ${{ github.run_id }}" + git push origin "$marker_tag" + echo "Created marker tag: $marker_tag" >> "$GITHUB_STEP_SUMMARY" + + - name: Emit canary repository dispatch + if: github.event.inputs.emit_repository_dispatch == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + await github.rest.repos.createDispatchEvent({ + owner: context.repo.owner, + repo: context.repo.repo, + event_type: `canary_${{ needs.canary-plan.outputs.decision }}`, + client_payload: { + candidate_tag: "${{ needs.canary-plan.outputs.candidate_tag }}", + candidate_sha: "${{ needs.canary-plan.outputs.candidate_sha }}", + decision: "${{ needs.canary-plan.outputs.decision }}", + run_id: context.runId, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + source_sha: context.sha + } + }); + + - name: Trigger rollback guard workflow on abort + if: needs.canary-plan.outputs.decision == 'abort' && needs.canary-plan.outputs.trigger_rollback_on_abort == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const rollbackBranch = "${{ needs.canary-plan.outputs.rollback_branch }}" || "dev"; + const rollbackTargetRef = `${{ needs.canary-plan.outputs.rollback_target_ref }}`.trim(); + const workflowRef = process.env.GITHUB_REF_NAME || "dev"; + + const inputs = { + branch: rollbackBranch, + mode: "execute", + allow_non_ancestor: "false", + fail_on_violation: "true", + create_marker_tag: "true", + emit_repository_dispatch: "true", + }; + + if (rollbackTargetRef.length > 0) { + inputs.target_ref = rollbackTargetRef; + } + + await github.rest.actions.createWorkflowDispatch({ + owner: context.repo.owner, + repo: context.repo.repo, + workflow_id: "ci-rollback.yml", + ref: workflowRef, + inputs, + }); + + - name: Publish rollback trigger summary + if: needs.canary-plan.outputs.decision == 'abort' + shell: bash + run: | + set -euo pipefail + if [ "${{ needs.canary-plan.outputs.trigger_rollback_on_abort }}" = "true" ]; then + { + echo "### Canary Abort Rollback Trigger" + echo "- CI Rollback Guard dispatch: triggered" + echo "- Rollback branch: \`${{ needs.canary-plan.outputs.rollback_branch }}\`" + if [ -n "${{ needs.canary-plan.outputs.rollback_target_ref }}" ]; then + echo "- Rollback target ref: \`${{ needs.canary-plan.outputs.rollback_target_ref }}\`" + else + echo "- Rollback target ref: _auto (latest release tag strategy)_" + fi + } >> "$GITHUB_STEP_SUMMARY" + else + { + echo "### Canary Abort Rollback Trigger" + echo "- CI Rollback Guard dispatch: skipped (trigger_rollback_on_abort=false)" + } >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/ci-change-audit.yml b/.github/workflows/ci-change-audit.yml new file mode 100644 index 0000000..1fa4970 --- /dev/null +++ b/.github/workflows/ci-change-audit.yml @@ -0,0 +1,154 @@ +name: CI/CD Change Audit + +on: + pull_request: + branches: [dev, main] + paths: + - ".github/workflows/**" + - ".github/release/**" + - ".github/codeql/**" + - "scripts/ci/**" + - ".github/dependabot.yml" + - "deny.toml" + - ".gitleaks.toml" + push: + branches: [dev, main] + paths: + - ".github/workflows/**" + - ".github/release/**" + - ".github/codeql/**" + - "scripts/ci/**" + - ".github/dependabot.yml" + - "deny.toml" + - ".gitleaks.toml" + workflow_dispatch: + inputs: + base_sha: + description: "Optional base SHA (default: HEAD~1)" + required: false + default: "" + type: string + fail_on_policy: + description: "Fail when audit policy violations are found" + required: true + default: true + type: boolean + +concurrency: + group: ci-change-audit-${{ github.event.pull_request.number || github.sha || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + audit: + name: CI Change Audit + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve base/head commits + id: refs + shell: bash + run: | + set -euo pipefail + head_sha="$(git rev-parse HEAD)" + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + # For pull_request events, checkout uses refs/pull/*/merge; HEAD^1 is the + # effective base commit for this synthesized merge and avoids stale base.sha. + if git rev-parse --verify HEAD^1 >/dev/null 2>&1; then + base_sha="$(git rev-parse HEAD^1)" + else + base_sha="${{ github.event.pull_request.base.sha }}" + fi + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + else + base_sha="${{ github.event.inputs.base_sha || '' }}" + if [ -z "$base_sha" ]; then + base_sha="$(git rev-parse HEAD~1)" + fi + fi + echo "base_sha=$base_sha" >> "$GITHUB_OUTPUT" + echo "head_sha=$head_sha" >> "$GITHUB_OUTPUT" + + - name: Run CI helper script unit tests + shell: bash + run: | + set -euo pipefail + python3 -m unittest discover -s scripts/ci/tests -p 'test_*.py' -v + + - name: Generate CI change audit + shell: bash + env: + BASE_SHA: ${{ steps.refs.outputs.base_sha }} + HEAD_SHA: ${{ steps.refs.outputs.head_sha }} + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_policy="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_policy="${{ github.event.inputs.fail_on_policy || 'true' }}" + fi + cmd=(python3 scripts/ci/ci_change_audit.py + --base-sha "$BASE_SHA" + --head-sha "$HEAD_SHA" + --output-json artifacts/ci-change-audit.json + --output-md artifacts/ci-change-audit.md) + if [ "$fail_on_policy" = "true" ]; then + cmd+=(--fail-on-violations) + fi + "${cmd[@]}" + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ci-change-audit.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ci_change_audit \ + --input-json artifacts/ci-change-audit.json \ + --output-json artifacts/audit-event-ci-change-audit.json \ + --artifact-name ci-change-audit-event \ + --retention-days 14 + fi + + - name: Upload audit artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ci-change-audit + path: artifacts/ci-change-audit.* + retention-days: 14 + + - name: Publish audit summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ci-change-audit.md ]; then + cat artifacts/ci-change-audit.md >> "$GITHUB_STEP_SUMMARY" + else + echo "CI change audit report was not generated." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload audit event artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ci-change-audit-event + path: artifacts/audit-event-ci-change-audit.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/ci-provider-connectivity.yml b/.github/workflows/ci-provider-connectivity.yml new file mode 100644 index 0000000..701f923 --- /dev/null +++ b/.github/workflows/ci-provider-connectivity.yml @@ -0,0 +1,112 @@ +name: CI Provider Connectivity + +on: + schedule: + - cron: "30 */6 * * *" # Every 6 hours + workflow_dispatch: + inputs: + fail_on_critical: + description: "Fail run when critical endpoints are unreachable" + required: true + default: false + type: boolean + pull_request: + branches: [dev, main] + paths: + - ".github/workflows/ci-provider-connectivity.yml" + - ".github/connectivity/providers.json" + - "scripts/ci/provider_connectivity_matrix.py" + push: + branches: [dev, main] + paths: + - ".github/workflows/ci-provider-connectivity.yml" + - ".github/connectivity/providers.json" + - "scripts/ci/provider_connectivity_matrix.py" + +concurrency: + group: provider-connectivity-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + probe: + name: Provider Connectivity Probe + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Run connectivity matrix probe + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_critical="false" + case "${GITHUB_EVENT_NAME}" in + schedule) + fail_on_critical="true" + ;; + workflow_dispatch) + fail_on_critical="${{ github.event.inputs.fail_on_critical || 'false' }}" + ;; + esac + + cmd=(python3 scripts/ci/provider_connectivity_matrix.py + --config .github/connectivity/providers.json + --output-json artifacts/provider-connectivity-matrix.json + --output-md artifacts/provider-connectivity-matrix.md) + if [ "$fail_on_critical" = "true" ]; then + cmd+=(--fail-on-critical) + fi + "${cmd[@]}" + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/provider-connectivity-matrix.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type provider_connectivity \ + --input-json artifacts/provider-connectivity-matrix.json \ + --output-json artifacts/audit-event-provider-connectivity.json \ + --artifact-name provider-connectivity-audit-event \ + --retention-days 14 + fi + + - name: Upload connectivity artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: provider-connectivity-matrix + path: artifacts/provider-connectivity-matrix.* + retention-days: 14 + + - name: Publish summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/provider-connectivity-matrix.md ]; then + cat artifacts/provider-connectivity-matrix.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Provider connectivity report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload audit event artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: provider-connectivity-audit-event + path: artifacts/audit-event-provider-connectivity.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/ci-reproducible-build.yml b/.github/workflows/ci-reproducible-build.yml new file mode 100644 index 0000000..9deb0d6 --- /dev/null +++ b/.github/workflows/ci-reproducible-build.yml @@ -0,0 +1,121 @@ +name: CI Reproducible Build + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/reproducible_build_check.sh" + - ".github/workflows/ci-reproducible-build.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/reproducible_build_check.sh" + - ".github/workflows/ci-reproducible-build.yml" + schedule: + - cron: "45 5 * * 1" # Weekly Monday 05:45 UTC + workflow_dispatch: + inputs: + fail_on_drift: + description: "Fail workflow if deterministic hash drift is detected" + required: true + default: true + type: boolean + allow_build_id_drift: + description: "Treat GNU build-id-only drift as non-blocking" + required: true + default: true + type: boolean + +concurrency: + group: repro-build-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + reproducibility: + name: Reproducible Build Probe + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Run reproducible build check + shell: bash + run: | + set -euo pipefail + fail_on_drift="false" + allow_build_id_drift="true" + if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then + fail_on_drift="true" + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_drift="${{ github.event.inputs.fail_on_drift || 'true' }}" + allow_build_id_drift="${{ github.event.inputs.allow_build_id_drift || 'true' }}" + fi + FAIL_ON_DRIFT="$fail_on_drift" \ + ALLOW_BUILD_ID_DRIFT="$allow_build_id_drift" \ + OUTPUT_DIR="artifacts" \ + ./scripts/ci/reproducible_build_check.sh + + - name: Emit normalized audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/reproducible-build.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type reproducible_build \ + --input-json artifacts/reproducible-build.json \ + --output-json artifacts/audit-event-reproducible-build.json \ + --artifact-name reproducible-build-audit-event \ + --retention-days 14 + fi + + - name: Upload reproducibility artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: reproducible-build + path: artifacts/reproducible-build* + retention-days: 14 + + - name: Upload audit event artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: reproducible-build-audit-event + path: artifacts/audit-event-reproducible-build.json + if-no-files-found: ignore + retention-days: 14 + + - name: Publish summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/reproducible-build.md ]; then + cat artifacts/reproducible-build.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Reproducible build report missing." >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/ci-rollback.yml b/.github/workflows/ci-rollback.yml new file mode 100644 index 0000000..cc6cde2 --- /dev/null +++ b/.github/workflows/ci-rollback.yml @@ -0,0 +1,257 @@ +name: CI Rollback Guard + +on: + workflow_dispatch: + inputs: + branch: + description: "Integration branch this rollback targets" + required: true + default: dev + type: choice + options: + - dev + - main + mode: + description: "dry-run only plans; execute enables rollback marker/dispatch actions" + required: true + default: dry-run + type: choice + options: + - dry-run + - execute + target_ref: + description: "Optional explicit rollback target (tag/sha/ref). Empty = latest matching tag." + required: false + default: "" + type: string + allow_non_ancestor: + description: "Allow target not being ancestor of current head (warning-only)" + required: true + default: false + type: boolean + fail_on_violation: + description: "Fail workflow when guard violations are detected" + required: true + default: true + type: boolean + create_marker_tag: + description: "In execute mode, create and push rollback marker tag" + required: true + default: false + type: boolean + emit_repository_dispatch: + description: "In execute mode, emit repository_dispatch event `rollback_execute`" + required: true + default: false + type: boolean + schedule: + - cron: "15 7 * * 1" # Weekly Monday 07:15 UTC + +concurrency: + group: ci-rollback-${{ github.event.inputs.branch || 'dev' }} + cancel-in-progress: false + +permissions: + contents: read + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + rollback-plan: + name: Rollback Guard Plan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + branch: ${{ steps.plan.outputs.branch }} + mode: ${{ steps.plan.outputs.mode }} + target_sha: ${{ steps.plan.outputs.target_sha }} + target_ref: ${{ steps.plan.outputs.target_ref }} + ready_to_execute: ${{ steps.plan.outputs.ready_to_execute }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + ref: ${{ github.event.inputs.branch || 'dev' }} + + - name: Build rollback plan + id: plan + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + branch_input="dev" + mode_input="dry-run" + target_ref_input="" + allow_non_ancestor="false" + fail_on_violation="true" + + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + branch_input="${{ github.event.inputs.branch || 'dev' }}" + mode_input="${{ github.event.inputs.mode || 'dry-run' }}" + target_ref_input="${{ github.event.inputs.target_ref || '' }}" + allow_non_ancestor="${{ github.event.inputs.allow_non_ancestor || 'false' }}" + fail_on_violation="${{ github.event.inputs.fail_on_violation || 'true' }}" + fi + + cmd=(python3 scripts/ci/rollback_guard.py + --repo-root . + --branch "$branch_input" + --mode "$mode_input" + --strategy latest-release-tag + --tag-pattern "v*" + --output-json artifacts/rollback-plan.json + --output-md artifacts/rollback-plan.md) + + if [ -n "$target_ref_input" ]; then + cmd+=(--target-ref "$target_ref_input") + fi + if [ "$allow_non_ancestor" = "true" ]; then + cmd+=(--allow-non-ancestor) + fi + if [ "$fail_on_violation" = "true" ]; then + cmd+=(--fail-on-violation) + fi + + "${cmd[@]}" + + target_sha="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(d.get("target_sha", "")) + PY + )" + target_ref="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(d.get("target_ref", "")) + PY + )" + ready_to_execute="$(python3 - <<'PY' + import json + d = json.load(open("artifacts/rollback-plan.json", "r", encoding="utf-8")) + print(str(d.get("ready_to_execute", False)).lower()) + PY + )" + + { + echo "branch=$branch_input" + echo "mode=$mode_input" + echo "target_sha=$target_sha" + echo "target_ref=$target_ref" + echo "ready_to_execute=$ready_to_execute" + } >> "$GITHUB_OUTPUT" + + - name: Emit rollback audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/rollback-plan.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type rollback_guard \ + --input-json artifacts/rollback-plan.json \ + --output-json artifacts/audit-event-rollback-guard.json \ + --artifact-name ci-rollback-plan \ + --retention-days 21 + fi + + - name: Upload rollback artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ci-rollback-plan + path: | + artifacts/rollback-plan.* + artifacts/audit-event-rollback-guard.json + if-no-files-found: ignore + retention-days: 21 + + - name: Publish rollback summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/rollback-plan.md ]; then + cat artifacts/rollback-plan.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Rollback plan markdown report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + rollback-execute: + name: Rollback Execute Actions + needs: [rollback-plan] + if: github.event_name == 'workflow_dispatch' && needs.rollback-plan.outputs.mode == 'execute' && needs.rollback-plan.outputs.ready_to_execute == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + permissions: + contents: write + actions: read + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + ref: ${{ needs.rollback-plan.outputs.branch }} + + - name: Fetch tags + shell: bash + run: | + set -euo pipefail + git fetch --tags --force origin + + - name: Create rollback marker tag + id: marker + if: github.event.inputs.create_marker_tag == 'true' + shell: bash + run: | + set -euo pipefail + target_sha="${{ needs.rollback-plan.outputs.target_sha }}" + if [ -z "$target_sha" ]; then + echo "Rollback guard did not resolve target_sha." + exit 1 + fi + marker_tag="rollback-${{ needs.rollback-plan.outputs.branch }}-${{ github.run_id }}" + git tag -a "$marker_tag" "$target_sha" -m "Rollback marker from run ${{ github.run_id }}" + git push origin "$marker_tag" + echo "marker_tag=$marker_tag" >> "$GITHUB_OUTPUT" + + - name: Emit rollback repository dispatch + if: github.event.inputs.emit_repository_dispatch == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + await github.rest.repos.createDispatchEvent({ + owner: context.repo.owner, + repo: context.repo.repo, + event_type: "rollback_execute", + client_payload: { + branch: "${{ needs.rollback-plan.outputs.branch }}", + target_ref: "${{ needs.rollback-plan.outputs.target_ref }}", + target_sha: "${{ needs.rollback-plan.outputs.target_sha }}", + run_id: context.runId, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + source_sha: context.sha + } + }); + + - name: Publish execute summary + if: always() + shell: bash + run: | + set -euo pipefail + { + echo "### Rollback Execute Actions" + echo "- Branch: \`${{ needs.rollback-plan.outputs.branch }}\`" + echo "- Target ref: \`${{ needs.rollback-plan.outputs.target_ref }}\`" + echo "- Target sha: \`${{ needs.rollback-plan.outputs.target_sha }}\`" + if [ -n "${{ steps.marker.outputs.marker_tag || '' }}" ]; then + echo "- Marker tag: \`${{ steps.marker.outputs.marker_tag }}\`" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/ci-run.yml b/.github/workflows/ci-run.yml new file mode 100644 index 0000000..fd74bf4 --- /dev/null +++ b/.github/workflows/ci-run.yml @@ -0,0 +1,446 @@ +name: CI Run + +on: + push: + branches: [dev, main] + pull_request: + branches: [dev, main] + merge_group: + branches: [dev, main] + +concurrency: + group: ci-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + changes: + name: Detect Change Scope + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + docs_only: ${{ steps.scope.outputs.docs_only }} + docs_changed: ${{ steps.scope.outputs.docs_changed }} + rust_changed: ${{ steps.scope.outputs.rust_changed }} + workflow_changed: ${{ steps.scope.outputs.workflow_changed }} + docs_files: ${{ steps.scope.outputs.docs_files }} + base_sha: ${{ steps.scope.outputs.base_sha }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Detect docs-only changes + id: scope + shell: bash + env: + EVENT_NAME: ${{ github.event_name }} + BASE_SHA: ${{ github.event_name == 'pull_request' && github.event.pull_request.base.sha || github.event_name == 'merge_group' && github.event.merge_group.base_sha || github.event.before }} + run: ./scripts/ci/detect_change_scope.sh + + lint: + name: Lint Gate (Format + Clippy + Strict Delta) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + components: rustfmt, clippy + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-lint + - name: Run rust quality gate + run: ./scripts/ci/rust_quality_gate.sh + - name: Run strict lint delta gate + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + run: ./scripts/ci/rust_strict_delta_gate.sh + + test: + name: Test + needs: [changes, lint] + if: needs.changes.outputs.rust_changed == 'true' && needs.lint.result == 'success' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-test + - name: Run tests + run: cargo test --locked --verbose + + build: + name: Build (Smoke) + needs: [changes] + if: needs.changes.outputs.rust_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-build + cache-targets: true + - name: Build binary (smoke check) + run: cargo build --profile release-fast --locked --verbose + - name: Check binary size + run: bash scripts/ci/check_binary_size.sh target/release-fast/zeroclaw + + flake-probe: + name: Test Flake Retry Probe + needs: [changes, lint, test] + if: always() && needs.changes.outputs.rust_changed == 'true' && (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'ci:full')) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: ci-run-flake-probe + - name: Probe flaky failure via single retry + shell: bash + env: + INITIAL_TEST_RESULT: ${{ needs.test.result }} + BLOCK_ON_FLAKE: ${{ vars.CI_BLOCK_ON_FLAKE_SUSPECTED || 'false' }} + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/flake_retry_probe.py \ + --initial-result "${INITIAL_TEST_RESULT}" \ + --retry-command "cargo test --locked --verbose" \ + --output-json artifacts/flake-probe.json \ + --output-md artifacts/flake-probe.md \ + --block-on-flake "${BLOCK_ON_FLAKE}" + - name: Publish flake probe summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/flake-probe.md ]; then + cat artifacts/flake-probe.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Flake probe report missing." >> "$GITHUB_STEP_SUMMARY" + fi + - name: Upload flake probe artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: test-flake-probe + path: artifacts/flake-probe.* + if-no-files-found: ignore + retention-days: 14 + + docs-only: + name: Docs-Only Fast Path + needs: [changes] + if: needs.changes.outputs.docs_only == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Skip heavy jobs for docs-only change + run: echo "Docs-only change detected. Rust lint/test/build skipped." + + non-rust: + name: Non-Rust Fast Path + needs: [changes] + if: needs.changes.outputs.docs_only != 'true' && needs.changes.outputs.rust_changed != 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Skip Rust jobs for non-Rust change scope + run: echo "No Rust-impacting files changed. Rust lint/test/build skipped." + + docs-quality: + name: Docs Quality + needs: [changes] + if: needs.changes.outputs.docs_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Markdown lint (changed lines only) + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + DOCS_FILES: ${{ needs.changes.outputs.docs_files }} + run: ./scripts/ci/docs_quality_gate.sh + + - name: Collect added links + id: collect_links + shell: bash + env: + BASE_SHA: ${{ needs.changes.outputs.base_sha }} + DOCS_FILES: ${{ needs.changes.outputs.docs_files }} + run: | + set -euo pipefail + python3 ./scripts/ci/collect_changed_links.py \ + --base "$BASE_SHA" \ + --docs-files "$DOCS_FILES" \ + --output .ci-added-links.txt + count=$(wc -l < .ci-added-links.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + if [ "$count" -gt 0 ]; then + echo "Added links queued for check:" + cat .ci-added-links.txt + else + echo "No added links found in changed docs lines." + fi + + - name: Link check (offline, added links only) + if: steps.collect_links.outputs.count != '0' + uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2 + with: + fail: true + args: >- + --offline + --no-progress + --format detailed + .ci-added-links.txt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Skip link check (no added links) + if: steps.collect_links.outputs.count == '0' + run: echo "No added links in changed docs lines. Link check skipped." + + lint-feedback: + name: Lint Feedback + if: github.event_name == 'pull_request' + needs: [changes, lint, docs-quality] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: write + issues: write + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Post actionable lint failure summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + RUST_CHANGED: ${{ needs.changes.outputs.rust_changed }} + DOCS_CHANGED: ${{ needs.changes.outputs.docs_changed }} + LINT_RESULT: ${{ needs.lint.result }} + LINT_DELTA_RESULT: ${{ needs.lint.result }} + DOCS_RESULT: ${{ needs.docs-quality.result }} + with: + script: | + const script = require('./.github/workflows/scripts/lint_feedback.js'); + await script({github, context, core}); + + workflow-owner-approval: + name: Workflow Owner Approval + needs: [changes] + if: github.event_name == 'pull_request' && needs.changes.outputs.workflow_changed == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Require owner approval for workflow file changes + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_OWNER_LOGINS: ${{ vars.WORKFLOW_OWNER_LOGINS }} + with: + script: | + const script = require('./.github/workflows/scripts/ci_workflow_owner_approval.js'); + await script({ github, context, core }); + + human-review-approval: + name: Human Review Approval + needs: [changes] + if: github.event_name == 'pull_request' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + + - name: Require at least one human approving review + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + HUMAN_REVIEW_BOT_LOGINS: ${{ vars.HUMAN_REVIEW_BOT_LOGINS }} + with: + script: | + const script = require('./.github/workflows/scripts/ci_human_review_guard.js'); + await script({ github, context, core }); + + license-file-owner-guard: + name: License File Owner Guard + needs: [changes] + if: github.event_name == 'pull_request' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Enforce owner-only edits for root license files + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/ci_license_file_owner_guard.js'); + await script({ github, context, core }); + ci-required: + name: CI Required Gate + if: always() + needs: [changes, lint, test, build, flake-probe, docs-only, non-rust, docs-quality, lint-feedback, workflow-owner-approval, human-review-approval, license-file-owner-guard] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Enforce required status + shell: bash + run: | + set -euo pipefail + + event_name="${{ github.event_name }}" + rust_changed="${{ needs.changes.outputs.rust_changed }}" + docs_changed="${{ needs.changes.outputs.docs_changed }}" + workflow_changed="${{ needs.changes.outputs.workflow_changed }}" + docs_result="${{ needs.docs-quality.result }}" + workflow_owner_result="${{ needs.workflow-owner-approval.result }}" + human_review_result="${{ needs.human-review-approval.result }}" + license_owner_result="${{ needs.license-file-owner-guard.result }}" + + if [ "${{ needs.changes.outputs.docs_only }}" = "true" ]; then + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Docs-only change detected, but docs-quality did not pass." + exit 1 + fi + echo "Docs-only fast path passed." + exit 0 + fi + + if [ "$rust_changed" != "true" ]; then + echo "rust_changed=false (non-rust fast path)" + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Non-rust change touched docs, but docs-quality did not pass." + exit 1 + fi + echo "Non-rust fast path passed." + exit 0 + fi + + lint_result="${{ needs.lint.result }}" + lint_strict_delta_result="${{ needs.lint.result }}" + test_result="${{ needs.test.result }}" + build_result="${{ needs.build.result }}" + flake_result="${{ needs.flake-probe.result }}" + + echo "lint=${lint_result}" + echo "lint_strict_delta=${lint_strict_delta_result}" + echo "test=${test_result}" + echo "build=${build_result}" + echo "flake_probe=${flake_result}" + echo "docs=${docs_result}" + echo "workflow_owner_approval=${workflow_owner_result}" + echo "human_review_approval=${human_review_result}" + echo "license_file_owner_guard=${license_owner_result}" + + if [ "$event_name" = "pull_request" ] && [ "$workflow_changed" = "true" ] && [ "$workflow_owner_result" != "success" ]; then + echo "Workflow files changed but workflow owner approval gate did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ] && [ "$human_review_result" != "success" ]; then + echo "Human review approval guard did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ] && [ "$license_owner_result" != "success" ]; then + echo "License file owner guard did not pass." + exit 1 + fi + + if [ "$event_name" = "pull_request" ]; then + if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then + echo "Required PR CI jobs did not pass." + exit 1 + fi + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "PR changed docs, but docs-quality did not pass." + exit 1 + fi + echo "PR required checks passed." + exit 0 + fi + + if [ "$lint_result" != "success" ] || [ "$lint_strict_delta_result" != "success" ] || [ "$test_result" != "success" ] || [ "$build_result" != "success" ]; then + echo "Required push CI jobs did not pass." + exit 1 + fi + + if [ "$flake_result" != "success" ]; then + echo "Flake probe did not pass under current blocking policy." + exit 1 + fi + + if [ "$docs_changed" = "true" ] && [ "$docs_result" != "success" ]; then + echo "Push changed docs, but docs-quality did not pass." + exit 1 + fi + + echo "Push required checks passed." diff --git a/.github/workflows/ci-supply-chain-provenance.yml b/.github/workflows/ci-supply-chain-provenance.yml new file mode 100644 index 0000000..55eb28c --- /dev/null +++ b/.github/workflows/ci-supply-chain-provenance.yml @@ -0,0 +1,110 @@ +name: CI Supply Chain Provenance + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "scripts/ci/generate_provenance.py" + - ".github/workflows/ci-supply-chain-provenance.yml" + workflow_dispatch: + schedule: + - cron: "20 6 * * 1" # Weekly Monday 06:20 UTC + +concurrency: + group: supply-chain-provenance-${{ github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + id-token: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + provenance: + name: Build + Provenance Bundle + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 35 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Build release-fast artifact + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + host_target="$(rustc -vV | sed -n 's/^host: //p')" + cargo build --profile release-fast --locked --target "$host_target" + cp "target/${host_target}/release-fast/zeroclaw" "artifacts/zeroclaw-${host_target}" + sha256sum "artifacts/zeroclaw-${host_target}" > "artifacts/zeroclaw-${host_target}.sha256" + + - name: Generate provenance statement + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + python3 scripts/ci/generate_provenance.py \ + --artifact "artifacts/zeroclaw-${host_target}" \ + --subject-name "zeroclaw-${host_target}" \ + --output "artifacts/provenance-${host_target}.intoto.json" + + - name: Install cosign + uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 + + - name: Sign provenance bundle + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + statement="artifacts/provenance-${host_target}.intoto.json" + cosign sign-blob --yes \ + --bundle="${statement}.sigstore.json" \ + --output-signature="${statement}.sig" \ + --output-certificate="${statement}.pem" \ + "${statement}" + + - name: Emit normalized audit event + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + python3 scripts/ci/emit_audit_event.py \ + --event-type supply_chain_provenance \ + --input-json "artifacts/provenance-${host_target}.intoto.json" \ + --output-json "artifacts/audit-event-supply-chain-provenance.json" \ + --artifact-name supply-chain-provenance \ + --retention-days 30 + + - name: Upload provenance artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: supply-chain-provenance + path: artifacts/* + retention-days: 30 + + - name: Publish summary + shell: bash + run: | + set -euo pipefail + host_target="$(rustc -vV | sed -n 's/^host: //p')" + { + echo "### Supply Chain Provenance" + echo "- Target: \`${host_target}\`" + echo "- Artifact: \`artifacts/zeroclaw-${host_target}\`" + echo "- Statement: \`artifacts/provenance-${host_target}.intoto.json\`" + echo "- Signature: \`artifacts/provenance-${host_target}.intoto.json.sig\`" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/deploy-web.yml b/.github/workflows/deploy-web.yml new file mode 100644 index 0000000..8ad35e6 --- /dev/null +++ b/.github/workflows/deploy-web.yml @@ -0,0 +1,56 @@ +name: Deploy Web to GitHub Pages + +on: + push: + branches: [main] + paths: + - 'web/**' + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + build: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 + + - name: Setup Node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + + - name: Install dependencies + working-directory: ./web + run: npm ci + + - name: Build + working-directory: ./web + run: npm run build + + - name: Setup Pages + uses: actions/configure-pages@1f0c5cde4bc74cd7e1254d0cb4de8d49e9068c7d + + - name: Upload artifact + uses: actions/upload-pages-artifact@56afc609e74202658d3ffba0e8f6dda462b719fa + with: + path: ./web/dist + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml new file mode 100644 index 0000000..a344c7b --- /dev/null +++ b/.github/workflows/docs-deploy.yml @@ -0,0 +1,291 @@ +name: Docs Deploy + +on: + pull_request: + branches: [dev, main] + paths: + - "docs/**" + - "README*.md" + - ".github/workflows/docs-deploy.yml" + - "scripts/ci/docs_quality_gate.sh" + - "scripts/ci/collect_changed_links.py" + - ".github/release/docs-deploy-policy.json" + - "scripts/ci/docs_deploy_guard.py" + push: + branches: [dev, main] + paths: + - "docs/**" + - "README*.md" + - ".github/workflows/docs-deploy.yml" + - "scripts/ci/docs_quality_gate.sh" + - "scripts/ci/collect_changed_links.py" + - ".github/release/docs-deploy-policy.json" + - "scripts/ci/docs_deploy_guard.py" + workflow_dispatch: + inputs: + deploy_target: + description: "preview uploads artifact only; production deploys to Pages" + required: true + default: preview + type: choice + options: + - preview + - production + preview_evidence_run_url: + description: "Required for manual production deploys when policy enforces preview promotion evidence" + required: false + default: "" + rollback_ref: + description: "Optional rollback source ref (tag/sha/ref) for manual production dispatch" + required: false + default: "" + +concurrency: + group: docs-deploy-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + docs-quality: + name: Docs Quality Gate + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + docs_files: ${{ steps.scope.outputs.docs_files }} + base_sha: ${{ steps.scope.outputs.base_sha }} + deploy_target: ${{ steps.deploy_guard.outputs.deploy_target }} + deploy_mode: ${{ steps.deploy_guard.outputs.deploy_mode }} + source_ref: ${{ steps.deploy_guard.outputs.source_ref }} + production_branch_ref: ${{ steps.deploy_guard.outputs.production_branch_ref }} + ready_to_deploy: ${{ steps.deploy_guard.outputs.ready_to_deploy }} + docs_preview_retention_days: ${{ steps.deploy_guard.outputs.docs_preview_retention_days }} + docs_guard_artifact_retention_days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve docs diff scope + id: scope + shell: bash + run: | + set -euo pipefail + + base_sha="" + docs_files="" + + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + base_sha="${{ github.event.pull_request.base.sha }}" + docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')" + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then + docs_files="$(git diff --name-only "$base_sha" HEAD | awk '/\.md$|\.mdx$|^README/ {print}')" + fi + else + docs_files="$(git ls-files 'docs/**/*.md' 'README*.md')" + fi + + { + echo "base_sha=${base_sha}" + echo "docs_files<> "$GITHUB_OUTPUT" + + - name: Validate docs deploy contract + id: deploy_guard + shell: bash + env: + INPUT_DEPLOY_TARGET: ${{ github.event.inputs.deploy_target || '' }} + INPUT_PREVIEW_EVIDENCE_RUN_URL: ${{ github.event.inputs.preview_evidence_run_url || '' }} + INPUT_ROLLBACK_REF: ${{ github.event.inputs.rollback_ref || '' }} + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/docs_deploy_guard.py \ + --repo-root "$PWD" \ + --event-name "${GITHUB_EVENT_NAME}" \ + --git-ref "${GITHUB_REF}" \ + --git-sha "${GITHUB_SHA}" \ + --input-deploy-target "${INPUT_DEPLOY_TARGET}" \ + --input-preview-evidence-run-url "${INPUT_PREVIEW_EVIDENCE_RUN_URL}" \ + --input-rollback-ref "${INPUT_ROLLBACK_REF}" \ + --policy-file .github/release/docs-deploy-policy.json \ + --output-json artifacts/docs-deploy-guard.json \ + --output-md artifacts/docs-deploy-guard.md \ + --github-output-file "$GITHUB_OUTPUT" \ + --fail-on-violation + + - name: Emit docs deploy guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/docs-deploy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type docs_deploy_guard \ + --input-json artifacts/docs-deploy-guard.json \ + --output-json artifacts/audit-event-docs-deploy-guard.json \ + --artifact-name docs-deploy-guard \ + --retention-days 21 + fi + + - name: Publish docs deploy guard summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/docs-deploy-guard.md ]; then + cat artifacts/docs-deploy-guard.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload docs deploy guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: docs-deploy-guard + path: | + artifacts/docs-deploy-guard.json + artifacts/docs-deploy-guard.md + artifacts/audit-event-docs-deploy-guard.json + if-no-files-found: ignore + retention-days: ${{ steps.deploy_guard.outputs.docs_guard_artifact_retention_days || 21 }} + + - name: Markdown quality gate + env: + BASE_SHA: ${{ steps.scope.outputs.base_sha }} + DOCS_FILES: ${{ steps.scope.outputs.docs_files }} + run: ./scripts/ci/docs_quality_gate.sh + + - name: Collect added links + id: links + if: github.event_name != 'workflow_dispatch' + shell: bash + env: + BASE_SHA: ${{ steps.scope.outputs.base_sha }} + DOCS_FILES: ${{ steps.scope.outputs.docs_files }} + run: | + set -euo pipefail + python3 ./scripts/ci/collect_changed_links.py \ + --base "$BASE_SHA" \ + --docs-files "$DOCS_FILES" \ + --output .ci-added-links.txt + count=$(wc -l < .ci-added-links.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + + - name: Link check (added links) + if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count != '0' + uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # v2 + with: + fail: true + args: >- + --offline + --no-progress + --format detailed + .ci-added-links.txt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Skip link check (none added) + if: github.event_name != 'workflow_dispatch' && steps.links.outputs.count == '0' + run: echo "No added links detected in changed docs lines." + + docs-preview: + name: Docs Preview Artifact + needs: [docs-quality] + if: github.event_name == 'pull_request' || (github.event_name == 'workflow_dispatch' && github.event.inputs.deploy_target == 'preview') + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Build preview bundle + shell: bash + run: | + set -euo pipefail + rm -rf site + mkdir -p site/docs + cp -R docs/. site/docs/ + cp README.md site/README.md + cat > site/index.md <<'EOF' + # ZeroClaw Docs Preview + + This preview bundle is produced by `.github/workflows/docs-deploy.yml`. + + - [Repository README](./README.md) + - [Docs Home](./docs/README.md) + EOF + + - name: Upload preview artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: docs-preview + path: site/** + if-no-files-found: error + retention-days: ${{ needs.docs-quality.outputs.docs_preview_retention_days || 14 }} + + docs-deploy: + name: Deploy Docs to GitHub Pages + needs: [docs-quality] + if: needs.docs-quality.outputs.deploy_target == 'production' && needs.docs-quality.outputs.ready_to_deploy == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + permissions: + contents: read + pages: write + id-token: write + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.docs-quality.outputs.source_ref }} + + - name: Build deploy bundle + shell: bash + run: | + set -euo pipefail + rm -rf site + mkdir -p site/docs + cp -R docs/. site/docs/ + cp README.md site/README.md + cat > site/index.md <<'EOF' + # ZeroClaw Documentation + + This site is deployed automatically from `main` by `.github/workflows/docs-deploy.yml`. + + - [Repository README](./README.md) + - [Docs Home](./docs/README.md) + EOF + + - name: Publish deploy source summary + shell: bash + run: | + { + echo "## Docs Deploy Source" + echo "- Deploy mode: \`${{ needs.docs-quality.outputs.deploy_mode }}\`" + echo "- Source ref: \`${{ needs.docs-quality.outputs.source_ref }}\`" + echo "- Production branch ref: \`${{ needs.docs-quality.outputs.production_branch_ref }}\`" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Setup Pages + uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b # v5 + + - name: Upload Pages artifact + uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4 + with: + path: site + + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4 diff --git a/.github/workflows/feature-matrix.yml b/.github/workflows/feature-matrix.yml new file mode 100644 index 0000000..8002724 --- /dev/null +++ b/.github/workflows/feature-matrix.yml @@ -0,0 +1,382 @@ +name: Feature Matrix + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "tests/**" + - "scripts/ci/nightly_matrix_report.py" + - ".github/release/nightly-owner-routing.json" + - ".github/workflows/feature-matrix.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "tests/**" + - "scripts/ci/nightly_matrix_report.py" + - ".github/release/nightly-owner-routing.json" + - ".github/workflows/feature-matrix.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "30 4 * * 1" # Weekly Monday 04:30 UTC + - cron: "15 3 * * *" # Daily 03:15 UTC (nightly profile) + workflow_dispatch: + inputs: + profile: + description: "compile = merge-gate matrix, nightly = integration-oriented lane commands" + required: true + default: compile + type: choice + options: + - compile + - nightly + fail_on_failure: + description: "Fail summary job when any lane fails" + required: true + default: true + type: boolean + +concurrency: + group: feature-matrix-${{ github.event.pull_request.number || github.ref || github.run_id }}-${{ github.event.inputs.profile || 'auto' }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + resolve-profile: + name: Resolve Matrix Profile + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + profile: ${{ steps.resolve.outputs.profile }} + lane_job_prefix: ${{ steps.resolve.outputs.lane_job_prefix }} + summary_job_name: ${{ steps.resolve.outputs.summary_job_name }} + lane_retention_days: ${{ steps.resolve.outputs.lane_retention_days }} + lane_timeout_minutes: ${{ steps.resolve.outputs.lane_timeout_minutes }} + max_attempts: ${{ steps.resolve.outputs.max_attempts }} + summary_artifact_name: ${{ steps.resolve.outputs.summary_artifact_name }} + summary_json_name: ${{ steps.resolve.outputs.summary_json_name }} + summary_md_name: ${{ steps.resolve.outputs.summary_md_name }} + lane_artifact_prefix: ${{ steps.resolve.outputs.lane_artifact_prefix }} + fail_on_failure: ${{ steps.resolve.outputs.fail_on_failure }} + collect_history: ${{ steps.resolve.outputs.collect_history }} + steps: + - name: Resolve effective profile + id: resolve + shell: bash + run: | + set -euo pipefail + + profile="compile" + fail_on_failure="true" + lane_job_prefix="Matrix Lane" + summary_job_name="Feature Matrix Summary" + lane_retention_days="21" + lane_timeout_minutes="55" + max_attempts="1" + summary_artifact_name="feature-matrix-summary" + summary_json_name="feature-matrix-summary.json" + summary_md_name="feature-matrix-summary.md" + lane_artifact_prefix="feature-matrix" + collect_history="false" + + if [ "${GITHUB_EVENT_NAME}" = "schedule" ] && [ "${{ github.event.schedule }}" = "15 3 * * *" ]; then + profile="nightly" + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + profile="${{ github.event.inputs.profile || 'compile' }}" + fail_on_failure="${{ github.event.inputs.fail_on_failure || 'true' }}" + fi + + if [ "$profile" = "nightly" ]; then + lane_job_prefix="Nightly Lane" + summary_job_name="Nightly Summary & Routing" + lane_retention_days="30" + lane_timeout_minutes="70" + max_attempts="2" + summary_artifact_name="nightly-all-features-summary" + summary_json_name="nightly-summary.json" + summary_md_name="nightly-summary.md" + lane_artifact_prefix="nightly-lane" + collect_history="true" + fi + + { + echo "profile=${profile}" + echo "lane_job_prefix=${lane_job_prefix}" + echo "summary_job_name=${summary_job_name}" + echo "lane_retention_days=${lane_retention_days}" + echo "lane_timeout_minutes=${lane_timeout_minutes}" + echo "max_attempts=${max_attempts}" + echo "summary_artifact_name=${summary_artifact_name}" + echo "summary_json_name=${summary_json_name}" + echo "summary_md_name=${summary_md_name}" + echo "lane_artifact_prefix=${lane_artifact_prefix}" + echo "fail_on_failure=${fail_on_failure}" + echo "collect_history=${collect_history}" + } >> "$GITHUB_OUTPUT" + + feature-check: + name: ${{ needs.resolve-profile.outputs.lane_job_prefix }} (${{ matrix.name }}) + needs: [resolve-profile] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: ${{ fromJSON(needs.resolve-profile.outputs.lane_timeout_minutes) }} + strategy: + fail-fast: false + matrix: + include: + - name: default + compile_command: cargo check --locked + nightly_command: cargo test --locked --test agent_e2e --verbose + install_libudev: false + - name: whatsapp-web + compile_command: cargo check --locked --no-default-features --features whatsapp-web + nightly_command: cargo check --locked --no-default-features --features whatsapp-web --verbose + install_libudev: false + - name: browser-native + compile_command: cargo check --locked --no-default-features --features browser-native + nightly_command: cargo check --locked --no-default-features --features browser-native --verbose + install_libudev: false + - name: nightly-all-features + compile_command: cargo check --locked --all-features + nightly_command: cargo test --locked --all-features --test agent_e2e --verbose + install_libudev: true + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: feature-matrix-${{ matrix.name }} + + - name: Ensure Linux deps for all-features lane + if: matrix.install_libudev + shell: bash + run: | + set -euo pipefail + + if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then + echo "libudev development headers already available; skipping apt install." + exit 0 + fi + + echo "Installing missing libudev build dependencies..." + for attempt in 1 2 3; do + if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \ + sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then + echo "Dependency installation succeeded on attempt ${attempt}." + exit 0 + fi + if [ "$attempt" -eq 3 ]; then + echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2 + exit 1 + fi + echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..." + sleep 10 + done + + - name: Run matrix lane command + id: lane + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + profile="${{ needs.resolve-profile.outputs.profile }}" + lane_command="${{ matrix.compile_command }}" + if [ "$profile" = "nightly" ]; then + lane_command="${{ matrix.nightly_command }}" + fi + + max_attempts="${{ needs.resolve-profile.outputs.max_attempts }}" + attempt=1 + status=1 + + started_at="$(date +%s)" + while [ "$attempt" -le "$max_attempts" ]; do + echo "Running lane command (attempt ${attempt}/${max_attempts}): ${lane_command}" + set +e + bash -lc "${lane_command}" + status=$? + set -e + if [ "$status" -eq 0 ]; then + break + fi + if [ "$attempt" -lt "$max_attempts" ]; then + sleep 5 + fi + attempt="$((attempt + 1))" + done + finished_at="$(date +%s)" + duration="$((finished_at - started_at))" + + lane_status="success" + if [ "$status" -ne 0 ]; then + lane_status="failure" + fi + + cat > "artifacts/nightly-result-${{ matrix.name }}.json" <> "$GITHUB_STEP_SUMMARY" + + echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT" + echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT" + + - name: Upload lane report + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ needs.resolve-profile.outputs.lane_artifact_prefix }}-${{ matrix.name }} + path: artifacts/nightly-result-${{ matrix.name }}.json + if-no-files-found: error + retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }} + + - name: Enforce lane success + if: steps.lane.outputs.lane_status != 'success' + shell: bash + run: | + set -euo pipefail + code="${{ steps.lane.outputs.lane_exit_code }}" + if [[ "$code" =~ ^[0-9]+$ ]]; then + # shellcheck disable=SC2242 + exit "$code" + fi + echo "Invalid lane exit code: $code" >&2 + exit 1 + + summary: + name: ${{ needs.resolve-profile.outputs.summary_job_name }} + needs: [resolve-profile, feature-check] + if: always() + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Download lane reports + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Collect recent nightly history + if: needs.resolve-profile.outputs.collect_history == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require("fs"); + const path = require("path"); + + const workflowId = "feature-matrix.yml"; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const events = ["schedule", "workflow_dispatch"]; + let runs = []; + for (const event of events) { + const resp = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id: workflowId, + branch: "dev", + event, + per_page: 20, + }); + runs = runs.concat(resp.data.workflow_runs || []); + } + + const currentRunId = context.runId; + runs = runs + .filter((run) => run.id !== currentRunId && run.status === "completed") + .sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()) + .slice(0, 3) + .map((run) => ({ + run_id: run.id, + url: run.html_url, + event: run.event, + conclusion: run.conclusion || "unknown", + created_at: run.created_at, + head_sha: run.head_sha, + display_title: run.display_title || "", + })); + + fs.mkdirSync("artifacts", { recursive: true }); + fs.writeFileSync( + path.join("artifacts", "nightly-history.json"), + `${JSON.stringify(runs, null, 2)}\n`, + { encoding: "utf8" } + ); + + - name: Aggregate matrix summary + shell: bash + run: | + set -euo pipefail + args=( + --input-dir artifacts + --owners-file .github/release/nightly-owner-routing.json + --output-json "artifacts/${{ needs.resolve-profile.outputs.summary_json_name }}" + --output-md "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}" + ) + + if [ "${{ needs.resolve-profile.outputs.collect_history }}" = "true" ] && [ -f artifacts/nightly-history.json ]; then + args+=(--history-file artifacts/nightly-history.json) + fi + + if [ "${{ needs.resolve-profile.outputs.fail_on_failure }}" = "true" ]; then + args+=(--fail-on-failure) + fi + + python3 scripts/ci/nightly_matrix_report.py "${args[@]}" + + - name: Publish summary + shell: bash + run: | + set -euo pipefail + cat "artifacts/${{ needs.resolve-profile.outputs.summary_md_name }}" >> "$GITHUB_STEP_SUMMARY" + + - name: Upload summary artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ needs.resolve-profile.outputs.summary_artifact_name }} + path: | + artifacts/${{ needs.resolve-profile.outputs.summary_json_name }} + artifacts/${{ needs.resolve-profile.outputs.summary_md_name }} + artifacts/nightly-history.json + if-no-files-found: error + retention-days: ${{ fromJSON(needs.resolve-profile.outputs.lane_retention_days) }} diff --git a/.github/workflows/main-branch-flow.md b/.github/workflows/main-branch-flow.md new file mode 100644 index 0000000..07cb147 --- /dev/null +++ b/.github/workflows/main-branch-flow.md @@ -0,0 +1,266 @@ +# Main Branch Delivery Flows + +This document explains what runs when code is proposed to `dev`/`main`, merged to `main`, and released. + +Use this with: + +- [`docs/ci-map.md`](../../docs/ci-map.md) +- [`docs/pr-workflow.md`](../../docs/pr-workflow.md) +- [`docs/release-process.md`](../../docs/release-process.md) + +## Event Summary + +| Event | Main workflows | +| --- | --- | +| PR activity (`pull_request_target`) | `pr-intake-checks.yml`, `pr-labeler.yml`, `pr-auto-response.yml` | +| PR activity (`pull_request`) | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows | +| Push to `dev`/`main` | `ci-run.yml`, `sec-audit.yml`, plus path-scoped workflows | +| Tag push (`v*`) | `pub-release.yml` publish mode, `pub-docker-img.yml` publish job | +| Scheduled/manual | `pub-release.yml` verification mode, `sec-codeql.yml`, `feature-matrix.yml`, `test-fuzz.yml`, `pr-check-stale.yml`, `pr-check-status.yml`, `sync-contributors.yml`, `test-benchmarks.yml`, `test-e2e.yml` | + +## Runtime and Docker Matrix + +Observed averages below are from recent completed runs (sampled from GitHub Actions on February 17, 2026). Values are directional, not SLA. + +| Workflow | Typical trigger in main flow | Avg runtime | Docker build? | Docker run? | Docker push? | +| --- | --- | ---:| --- | --- | --- | +| `pr-intake-checks.yml` | PR open/update (`pull_request_target`) | 14.5s | No | No | No | +| `pr-labeler.yml` | PR open/update (`pull_request_target`) | 53.7s | No | No | No | +| `pr-auto-response.yml` | PR/issue automation | 24.3s | No | No | No | +| `ci-run.yml` | PR + push to `dev`/`main` | 74.7s | No | No | No | +| `sec-audit.yml` | PR + push to `dev`/`main` | 127.2s | No | No | No | +| `workflow-sanity.yml` | Workflow-file changes | 34.2s | No | No | No | +| `pr-label-policy-check.yml` | Label policy/automation changes | 14.7s | No | No | No | +| `pub-docker-img.yml` (`pull_request`) | Docker build-input PR changes | 240.4s | Yes | Yes | No | +| `pub-docker-img.yml` (`push`) | tag push `v*` | 139.9s | Yes | No | Yes | +| `pub-release.yml` | Tag push `v*` (publish) + manual/scheduled verification (no publish) | N/A in recent sample | No | No | No | + +Notes: + +1. `pub-docker-img.yml` is the only workflow in the main PR/push path that builds Docker images. +2. Container runtime verification (`docker run`) occurs in PR smoke only. +3. Container registry push occurs on tag pushes (`v*`) only. +4. `ci-run.yml` "Build (Smoke)" builds Rust binaries, not Docker images. + +## Step-By-Step + +### 1) PR from branch in this repository -> `dev` + +1. Contributor opens or updates PR against `dev`. +2. `pull_request_target` automation runs (typical runtime): + - `pr-intake-checks.yml` posts intake warnings/errors. + - `pr-labeler.yml` sets size/risk/scope labels. + - `pr-auto-response.yml` runs first-interaction and label routes. +3. `pull_request` CI workflows start: + - `ci-run.yml` + - `feature-matrix.yml` (Rust/workflow path scope) + - `sec-audit.yml` + - `sec-codeql.yml` (if Rust/codeql paths changed) + - path-scoped workflows if matching files changed: + - `pub-docker-img.yml` (Docker build-input paths only) + - `docs-deploy.yml` (docs + README markdown paths; deploy contract guard enforces promotion + rollback ref policy) + - `workflow-sanity.yml` (workflow files only) + - `pr-label-policy-check.yml` (label-policy files only) + - `ci-change-audit.yml` (CI/security path changes) + - `ci-provider-connectivity.yml` (probe config/script/workflow changes) + - `ci-reproducible-build.yml` (Rust/build reproducibility paths) +4. In `ci-run.yml`, `changes` computes: + - `docs_only` + - `docs_changed` + - `rust_changed` + - `workflow_changed` +5. `build` runs for Rust-impacting changes. +6. On PRs, full lint/test/docs checks run when PR has label `ci:full`: + - `lint` + - `lint-strict-delta` + - `test` + - `flake-probe` (single-retry telemetry; optional block via `CI_BLOCK_ON_FLAKE_SUSPECTED`) + - `docs-quality` +7. If `.github/workflows/**` changed, `workflow-owner-approval` must pass. +8. If root license files (`LICENSE-APACHE`, `LICENSE-MIT`) changed, `license-file-owner-guard` allows only PR author `willsarg`. +9. `lint-feedback` posts actionable comment if lint/docs gates fail. +10. `CI Required Gate` aggregates results to final pass/fail. +11. Maintainer merges PR once checks and review policy are satisfied. +12. Merge emits a `push` event on `dev` (see scenario 4). + +### 2) PR from fork -> `dev` + +1. External contributor opens PR from `fork/` into `zeroclaw:dev`. +2. Immediately on `opened`: + - `pull_request_target` workflows start with base-repo context and base-repo token: + - `pr-intake-checks.yml` + - `pr-labeler.yml` + - `pr-auto-response.yml` + - `pull_request` workflows are queued for the fork head commit: + - `ci-run.yml` + - `sec-audit.yml` + - path-scoped workflows (`pub-docker-img.yml`, `workflow-sanity.yml`, `pr-label-policy-check.yml`) if changed files match. +3. Fork-specific permission behavior in `pull_request` workflows: + - token is restricted (read-focused), so jobs that try to write PR comments/status extras can be limited. + - secrets from the base repo are not exposed to fork PR `pull_request` jobs. +4. Approval gate possibility: + - if Actions settings require maintainer approval for fork workflows, the `pull_request` run stays in `action_required`/waiting state until approved. +5. Event fan-out after labeling: + - `pr-labeler.yml` and manual label changes emit `labeled`/`unlabeled` events. + - those events retrigger `pull_request_target` automation (`pr-labeler.yml` and `pr-auto-response.yml`), creating extra run volume/noise. +6. When contributor pushes new commits to fork branch (`synchronize`): + - reruns: `pr-intake-checks.yml`, `pr-labeler.yml`, `ci-run.yml`, `sec-audit.yml`, and matching path-scoped PR workflows. + - does not rerun `pr-auto-response.yml` unless label/open events occur. +7. `ci-run.yml` execution details for fork PR: + - `changes` computes `docs_only`, `docs_changed`, `rust_changed`, `workflow_changed`. + - `build` runs for Rust-impacting changes. + - `lint`/`lint-strict-delta`/`test`/`docs-quality` run on PR when `ci:full` label exists. + - `workflow-owner-approval` runs when `.github/workflows/**` changed. + - `CI Required Gate` emits final pass/fail for the PR head. +8. Fork PR merge blockers to check first when diagnosing stalls: + - run approval pending for fork workflows. + - `workflow-owner-approval` failing on workflow-file changes. + - `license-file-owner-guard` failing when root license files are modified by non-owner PR author. + - `CI Required Gate` failure caused by upstream jobs. + - repeated `pull_request_target` reruns from label churn causing noisy signals. +9. After merge, normal `push` workflows on `dev` execute (scenario 4). + +### 3) PR to `main` (direct or from `dev`) + +1. Contributor or maintainer opens PR with base `main`. +2. `ci-run.yml` and `sec-audit.yml` run on the PR, plus any path-scoped workflows. +3. Maintainer merges PR once checks and review policy pass. +4. Merge emits a `push` event on `main`. + +### 4) Push/Merge Queue to `dev` or `main` (including after merge) + +1. Commit reaches `dev` or `main` (usually from a merged PR), or merge queue creates a `merge_group` validation commit. +2. `ci-run.yml` runs on `push` and `merge_group`. +3. `feature-matrix.yml` runs on `push` for Rust/workflow paths and on `merge_group`. +4. `sec-audit.yml` runs on `push` and `merge_group`. +5. `sec-codeql.yml` runs on `push`/`merge_group` when Rust/codeql paths change (path-scoped on push). +6. `ci-supply-chain-provenance.yml` runs on push when Rust/build provenance paths change. +7. Path-filtered workflows run only if touched files match their filters. +8. In `ci-run.yml`, push/merge-group behavior differs from PR behavior: + - Rust path: `lint`, `lint-strict-delta`, `test`, `build` are expected. + - Docs/non-rust paths: fast-path behavior applies. +9. `CI Required Gate` computes overall push/merge-group result. + +## Docker Publish Logic + +Workflow: `.github/workflows/pub-docker-img.yml` + +### PR behavior + +1. Triggered on `pull_request` to `dev` or `main` when Docker build-input paths change. +2. Runs `PR Docker Smoke` job: + - Builds local smoke image with Blacksmith builder. + - Verifies container with `docker run ... --version`. +3. Typical runtime in recent sample: ~240.4s. +4. No registry push happens on PR events. + +### Push behavior + +1. `publish` job runs on tag pushes `v*` only. +2. Workflow trigger includes semantic version tag pushes (`v*`) only. +3. Login to `ghcr.io` uses `${{ github.actor }}` and `${{ secrets.GITHUB_TOKEN }}`. +4. Tag computation includes semantic tag from pushed git tag (`vX.Y.Z`) + SHA tag (`sha-<12>`) + `latest`. +5. Multi-platform publish is used for tag pushes (`linux/amd64,linux/arm64`). +6. `scripts/ci/ghcr_publish_contract_guard.py` validates anonymous pullability and digest parity across `vX.Y.Z`, `sha-<12>`, and `latest`, then emits rollback candidate mapping evidence. +7. Trivy scans are emitted for version, SHA, and latest references. +8. `scripts/ci/ghcr_vulnerability_gate.py` validates Trivy JSON outputs against `.github/release/ghcr-vulnerability-policy.json` and emits audit-event evidence. +9. Typical runtime in recent sample: ~139.9s. +10. Result: pushed image tags under `ghcr.io//` with publish-contract + vulnerability-gate + scan artifacts. + +Important: Docker publish now requires a `v*` tag push; regular `dev`/`main` branch pushes do not publish images. + +## Release Logic + +Workflow: `.github/workflows/pub-release.yml` + +1. Trigger modes: + - Tag push `v*` -> publish mode. + - Manual dispatch -> verification-only or publish mode (input-driven). + - Weekly schedule -> verification-only mode. +2. `prepare` resolves release context (`release_ref`, `release_tag`, publish/draft mode) and runs `scripts/ci/release_trigger_guard.py`. + - publish mode enforces actor authorization, stable annotated tag policy, `origin/main` ancestry, and `release_tag` == `Cargo.toml` version at the tag commit. + - trigger provenance is emitted as `release-trigger-guard` artifacts. +3. `build-release` builds matrix artifacts across Linux/macOS/Windows targets. +4. `verify-artifacts` runs `scripts/ci/release_artifact_guard.py` against `.github/release/release-artifact-contract.json` in verify-stage mode (archive contract required; manifest/SBOM/notice checks intentionally skipped) and uploads `release-artifact-guard-verify` evidence. +5. In publish mode, workflow generates SBOM (`CycloneDX` + `SPDX`), `SHA256SUMS`, and a checksum provenance statement (`zeroclaw.sha256sums.intoto.json`) plus audit-event envelope. +6. In publish mode, after manifest generation, workflow reruns `release_artifact_guard.py` in full-contract mode and emits `release-artifact-guard.publish.json` plus `audit-event-release-artifact-guard-publish.json`. +7. In publish mode, workflow keyless-signs release artifacts and composes a supply-chain release-notes preface via `release_notes_with_supply_chain_refs.py`. +8. In publish mode, workflow verifies GHCR release-tag availability. +9. In publish mode, workflow creates/updates the GitHub Release for the resolved tag and commit-ish, combining generated supply-chain preface with GitHub auto-generated commit notes. + +Pre-release path: + +1. Pre-release tags (`vX.Y.Z-alpha.N`, `vX.Y.Z-beta.N`, `vX.Y.Z-rc.N`) trigger `.github/workflows/pub-prerelease.yml`. +2. `scripts/ci/prerelease_guard.py` enforces stage progression, `origin/main` ancestry, and Cargo version/tag alignment. +3. In publish mode, prerelease assets are attached to a GitHub prerelease for the stage tag. + +Canary policy lane: + +1. `.github/workflows/ci-canary-gate.yml` runs weekly or manually. +2. `scripts/ci/canary_guard.py` evaluates metrics against `.github/release/canary-policy.json`. +3. Decision output is explicit (`promote`, `hold`, `abort`) with auditable artifacts and optional dispatch signal. + +## Merge/Policy Notes + +1. Workflow-file changes (`.github/workflows/**`) activate owner-approval gate in `ci-run.yml`. +2. PR lint/test strictness is intentionally controlled by `ci:full` label. +3. `pr-intake-checks.yml` now blocks PRs missing a Linear issue key (`RMN-*`, `CDV-*`, `COM-*`) to keep execution mapped to Linear. +4. `sec-audit.yml` runs on PR/push/merge queue (`merge_group`), plus scheduled weekly. +5. `ci-change-audit.yml` enforces pinned `uses:` references for CI/security workflow changes. +6. `sec-audit.yml` includes deny policy hygiene checks (`deny_policy_guard.py`) before cargo-deny. +7. `sec-audit.yml` includes gitleaks allowlist governance checks (`secrets_governance_guard.py`) against `.github/security/gitleaks-allowlist-governance.json`. +8. `ci-reproducible-build.yml` and `ci-supply-chain-provenance.yml` provide scheduled supply-chain assurance signals outside release-only windows. +9. Some workflows are operational and non-merge-path (`pr-check-stale`, `pr-check-status`, `sync-contributors`, etc.). +10. Workflow-specific JavaScript helpers are organized under `.github/workflows/scripts/`. +11. `ci-run.yml` includes cache partitioning (`prefix-key`) across lint/test/build/flake-probe lanes to reduce cache contention. +12. `ci-rollback.yml` provides a guarded rollback planning lane (scheduled dry-run + manual execute controls) with audit artifacts. + +## Mermaid Diagrams + +### PR to Dev + +```mermaid +flowchart TD + A["PR opened or updated -> dev"] --> B["pull_request_target lane"] + B --> B1["pr-intake-checks.yml"] + B --> B2["pr-labeler.yml"] + B --> B3["pr-auto-response.yml"] + A --> C["pull_request CI lane"] + C --> C1["ci-run.yml"] + C --> C2["sec-audit.yml"] + C --> C3["pub-docker-img.yml (if Docker paths changed)"] + C --> C4["workflow-sanity.yml (if workflow files changed)"] + C --> C5["pr-label-policy-check.yml (if policy files changed)"] + C1 --> D["CI Required Gate"] + D --> E{"Checks + review policy pass?"} + E -->|No| F["PR stays open"] + E -->|Yes| G["Merge PR"] + G --> H["push event on dev"] +``` + +### Main Delivery and Release + +```mermaid +flowchart TD + D0["Commit reaches dev"] --> B0["ci-run.yml"] + D0 --> C0["sec-audit.yml"] + PRM["PR to main"] --> QM["ci-run.yml + sec-audit.yml (+ path-scoped)"] + QM --> M["Merge to main"] + M --> A["Commit reaches main"] + A --> B["ci-run.yml"] + A --> C["sec-audit.yml"] + A --> D["path-scoped workflows (if matched)"] + T["Tag push v*"] --> R["pub-release.yml"] + W["Manual/Scheduled release verify"] --> R + T --> DP["pub-docker-img.yml publish job"] + R --> R1["Artifacts + SBOM + checksums + signatures + GitHub Release"] + W --> R2["Verification build only (no GitHub Release publish)"] + DP --> P1["Push ghcr image tags (version + sha + latest)"] +``` + +## Quick Troubleshooting + +1. Unexpected skipped jobs: inspect `scripts/ci/detect_change_scope.sh` outputs. +2. Workflow-change PR blocked: verify `WORKFLOW_OWNER_LOGINS` and approvals. +3. Fork PR appears stalled: check whether Actions run approval is pending. +4. Docker not published: confirm a `v*` tag was pushed to the intended commit. diff --git a/.github/workflows/nightly-all-features.yml b/.github/workflows/nightly-all-features.yml new file mode 100644 index 0000000..caee4a2 --- /dev/null +++ b/.github/workflows/nightly-all-features.yml @@ -0,0 +1,187 @@ +name: Nightly All-Features + +on: + schedule: + - cron: "15 3 * * *" # Daily 03:15 UTC + workflow_dispatch: + inputs: + fail_on_failure: + description: "Fail workflow when any nightly lane fails" + required: true + default: true + type: boolean + +concurrency: + group: nightly-all-features-${{ github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + nightly-lanes: + name: Nightly Lane (${{ matrix.name }}) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 70 + strategy: + fail-fast: false + matrix: + include: + - name: default + command: cargo test --locked --test agent_e2e --verbose + install_libudev: false + - name: whatsapp-web + command: cargo check --locked --no-default-features --features whatsapp-web --verbose + install_libudev: false + - name: browser-native + command: cargo check --locked --no-default-features --features browser-native --verbose + install_libudev: false + - name: nightly-all-features + command: cargo test --locked --all-features --test agent_e2e --verbose + install_libudev: true + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: nightly-all-features-${{ matrix.name }} + + - name: Ensure Linux deps for all-features lane + if: matrix.install_libudev + shell: bash + run: | + set -euo pipefail + + if command -v pkg-config >/dev/null 2>&1 && pkg-config --exists libudev; then + echo "libudev development headers already available; skipping apt install." + exit 0 + fi + + echo "Installing missing libudev build dependencies..." + for attempt in 1 2 3; do + if sudo apt-get update -qq -o DPkg::Lock::Timeout=300 && \ + sudo apt-get install -y --no-install-recommends --no-upgrade -o DPkg::Lock::Timeout=300 libudev-dev pkg-config; then + echo "Dependency installation succeeded on attempt ${attempt}." + exit 0 + fi + if [ "$attempt" -eq 3 ]; then + echo "Failed to install libudev-dev/pkg-config after ${attempt} attempts." >&2 + exit 1 + fi + echo "Dependency installation failed on attempt ${attempt}; retrying in 10s..." + sleep 10 + done + + - name: Run nightly lane command + id: lane + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + started_at="$(date +%s)" + set +e + bash -lc "${{ matrix.command }}" + status=$? + set -e + finished_at="$(date +%s)" + duration="$((finished_at - started_at))" + + lane_status="success" + if [ "$status" -ne 0 ]; then + lane_status="failure" + fi + + cat > "artifacts/nightly-result-${{ matrix.name }}.json" <> "$GITHUB_STEP_SUMMARY" + + echo "lane_status=${lane_status}" >> "$GITHUB_OUTPUT" + echo "lane_exit_code=${status}" >> "$GITHUB_OUTPUT" + + - name: Upload nightly lane artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: nightly-lane-${{ matrix.name }} + path: artifacts/nightly-result-${{ matrix.name }}.json + if-no-files-found: error + retention-days: 30 + + nightly-summary: + name: Nightly Summary & Routing + needs: [nightly-lanes] + if: always() + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Download nightly artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Aggregate nightly report + shell: bash + env: + FAIL_ON_FAILURE_INPUT: ${{ github.event.inputs.fail_on_failure || 'true' }} + run: | + set -euo pipefail + fail_on_failure="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_failure="${FAIL_ON_FAILURE_INPUT}" + fi + + args=() + if [ "$fail_on_failure" = "true" ]; then + args+=(--fail-on-failure) + fi + + python3 scripts/ci/nightly_matrix_report.py \ + --input-dir artifacts \ + --owners-file .github/release/nightly-owner-routing.json \ + --output-json artifacts/nightly-summary.json \ + --output-md artifacts/nightly-summary.md \ + "${args[@]}" + + - name: Publish nightly summary + shell: bash + run: | + set -euo pipefail + cat artifacts/nightly-summary.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload nightly summary artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: nightly-all-features-summary + path: | + artifacts/nightly-summary.json + artifacts/nightly-summary.md + if-no-files-found: error + retention-days: 30 diff --git a/.github/workflows/pages-deploy.yml b/.github/workflows/pages-deploy.yml new file mode 100644 index 0000000..34fca0b --- /dev/null +++ b/.github/workflows/pages-deploy.yml @@ -0,0 +1,64 @@ +name: Deploy GitHub Pages + +on: + push: + branches: + - main + paths: + - site/** + - docs/** + - README.md + - .github/workflows/pages-deploy.yml + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: github-pages + cancel-in-progress: true + +jobs: + build: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + cache-dependency-path: site/package-lock.json + + - name: Install Dependencies + working-directory: site + run: npm ci + + - name: Build Site + working-directory: site + run: npm run build + + - name: Configure Pages + uses: actions/configure-pages@v5 + + - name: Upload Artifact + uses: actions/upload-pages-artifact@v3 + with: + path: gh-pages + + deploy: + needs: build + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/pr-auto-response.yml b/.github/workflows/pr-auto-response.yml new file mode 100644 index 0000000..9cf1a7c --- /dev/null +++ b/.github/workflows/pr-auto-response.yml @@ -0,0 +1,89 @@ +name: PR Auto Responder + +on: + issues: + types: [opened, reopened, labeled, unlabeled] + pull_request_target: + branches: [dev, main] + types: [opened, labeled, unlabeled] + +permissions: {} + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + LABEL_POLICY_PATH: .github/label-policy.json + +jobs: + contributor-tier-issues: + if: >- + (github.event_name == 'issues' && + (github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'labeled' || github.event.action == 'unlabeled')) || + (github.event_name == 'pull_request_target' && + (github.event.action == 'labeled' || github.event.action == 'unlabeled')) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + issues: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Apply contributor tier label for issue author + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + LABEL_POLICY_PATH: .github/label-policy.json + with: + script: | + const script = require('./.github/workflows/scripts/pr_auto_response_contributor_tier.js'); + await script({ github, context, core }); + first-interaction: + if: github.event.action == 'opened' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + issues: write + pull-requests: write + steps: + - name: Greet first-time contributors + uses: actions/first-interaction@a1db7729b356323c7988c20ed6f0d33fe31297be # v1 + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + issue_message: | + Thanks for opening this issue. + + Before maintainers triage it, please confirm: + - Repro steps are complete and run on latest `main` + - Environment details are included (OS, Rust version, ZeroClaw version) + - Sensitive values are redacted + + This helps us keep issue throughput high and response latency low. + pr_message: | + Thanks for contributing to ZeroClaw. + + For faster review, please ensure: + - PR template sections are fully completed + - `cargo fmt --all -- --check`, `cargo clippy --all-targets -- -D warnings`, and `cargo test` are included + - If automation/agents were used heavily, add brief workflow notes + - Scope is focused (prefer one concern per PR) + + See `CONTRIBUTING.md` and `docs/pr-workflow.md` for full collaboration rules. + + labeled-routes: + if: github.event.action == 'labeled' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + issues: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Handle label-driven responses + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_auto_response_labeled_routes.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-check-stale.yml b/.github/workflows/pr-check-stale.yml new file mode 100644 index 0000000..7c29077 --- /dev/null +++ b/.github/workflows/pr-check-stale.yml @@ -0,0 +1,49 @@ +name: PR Check Stale + +on: + schedule: + - cron: "20 2 * * *" + workflow_dispatch: + +permissions: {} + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + +jobs: + stale: + permissions: + issues: write + pull-requests: write + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Mark stale issues and pull requests + uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + days-before-issue-stale: 21 + days-before-issue-close: 7 + days-before-pr-stale: 14 + days-before-pr-close: 7 + stale-issue-label: stale + stale-pr-label: stale + exempt-issue-labels: security,pinned,no-stale,no-pr-hygiene,maintainer + exempt-pr-labels: no-stale,no-pr-hygiene,maintainer + remove-stale-when-updated: true + exempt-all-assignees: true + operations-per-run: 300 + stale-issue-message: | + This issue was automatically marked as stale due to inactivity. + Please provide an update, reproduction details, or current status to keep it open. + close-issue-message: | + Closing this issue due to inactivity. + If the problem still exists on the latest `main`, please open a new issue with fresh repro steps. + close-issue-reason: not_planned + stale-pr-message: | + This PR was automatically marked as stale due to inactivity. + Please rebase/update and post the latest validation results. + close-pr-message: | + Closing this PR due to inactivity. + Maintainers can reopen once the branch is updated and validation is provided. diff --git a/.github/workflows/pr-check-status.yml b/.github/workflows/pr-check-status.yml new file mode 100644 index 0000000..5fcdab2 --- /dev/null +++ b/.github/workflows/pr-check-status.yml @@ -0,0 +1,36 @@ +name: PR Check Status + +on: + schedule: + - cron: "15 8 * * *" # Once daily at 8:15am UTC + workflow_dispatch: + +permissions: {} + +concurrency: + group: pr-check-status + cancel-in-progress: true + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + +jobs: + nudge-stale-prs: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + permissions: + contents: read + pull-requests: write + issues: write + env: + STALE_HOURS: "48" + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - name: Nudge PRs that need rebase or CI refresh + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_check_status_nudge.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-intake-checks.yml b/.github/workflows/pr-intake-checks.yml new file mode 100644 index 0000000..1e84dcc --- /dev/null +++ b/.github/workflows/pr-intake-checks.yml @@ -0,0 +1,37 @@ +name: PR Intake Checks + +on: + pull_request_target: + branches: [dev, main] + types: [opened, reopened, synchronize, edited, ready_for_review] + +concurrency: + group: pr-intake-checks-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + intake: + name: Intake Checks + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Run safe PR intake checks + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/pr_intake_checks.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pr-label-policy-check.yml b/.github/workflows/pr-label-policy-check.yml new file mode 100644 index 0000000..613071f --- /dev/null +++ b/.github/workflows/pr-label-policy-check.yml @@ -0,0 +1,80 @@ +name: PR Label Policy Check + +on: + pull_request: + paths: + - ".github/label-policy.json" + - ".github/workflows/pr-labeler.yml" + - ".github/workflows/pr-auto-response.yml" + push: + paths: + - ".github/label-policy.json" + - ".github/workflows/pr-labeler.yml" + - ".github/workflows/pr-auto-response.yml" + +concurrency: + group: pr-label-policy-check-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + contributor-tier-consistency: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Verify shared label policy and workflow wiring + shell: bash + run: | + set -euo pipefail + python3 - <<'PY' + import json + import re + from pathlib import Path + + policy_path = Path('.github/label-policy.json') + policy = json.loads(policy_path.read_text(encoding='utf-8')) + color = str(policy.get('contributor_tier_color', '')).upper() + rules = policy.get('contributor_tiers', []) + if not re.fullmatch(r'[0-9A-F]{6}', color): + raise SystemExit('invalid contributor_tier_color in .github/label-policy.json') + if not rules: + raise SystemExit('contributor_tiers must not be empty in .github/label-policy.json') + + labels = set() + prev_min = None + for entry in rules: + label = str(entry.get('label', '')).strip().lower() + min_merged = int(entry.get('min_merged_prs', 0)) + if not label.endswith('contributor'): + raise SystemExit(f'invalid contributor tier label: {label}') + if label in labels: + raise SystemExit(f'duplicate contributor tier label: {label}') + if prev_min is not None and min_merged > prev_min: + raise SystemExit('contributor_tiers must be sorted descending by min_merged_prs') + labels.add(label) + prev_min = min_merged + + workflow_paths = [ + Path('.github/workflows/pr-labeler.yml'), + Path('.github/workflows/pr-auto-response.yml'), + ] + for workflow in workflow_paths: + text = workflow.read_text(encoding='utf-8') + if '.github/label-policy.json' not in text: + raise SystemExit(f'{workflow} must load .github/label-policy.json') + if re.search(r'contributorTierColor\s*=\s*"[0-9A-Fa-f]{6}"', text): + raise SystemExit(f'{workflow} contains hardcoded contributorTierColor') + + print('label policy file is valid and workflow consumers are wired to shared policy') + PY diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml new file mode 100644 index 0000000..2e60988 --- /dev/null +++ b/.github/workflows/pr-labeler.yml @@ -0,0 +1,56 @@ +name: PR Labeler + +on: + pull_request_target: + branches: [dev, main] + types: [opened, reopened, synchronize, edited, labeled, unlabeled] + workflow_dispatch: + inputs: + mode: + description: "Run mode for managed-label governance" + required: true + default: "audit" + type: choice + options: + - audit + - repair + +concurrency: + group: pr-labeler-${{ github.event.pull_request.number || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + LABEL_POLICY_PATH: .github/label-policy.json + +jobs: + label: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Apply path labels + if: github.event_name == 'pull_request_target' + uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1 + continue-on-error: true + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + sync-labels: true + + - name: Apply size/risk/module labels + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + continue-on-error: true + env: + LABEL_POLICY_PATH: .github/label-policy.json + with: + script: | + const script = require('./.github/workflows/scripts/pr_labeler.js'); + await script({ github, context, core }); diff --git a/.github/workflows/pub-docker-img.yml b/.github/workflows/pub-docker-img.yml new file mode 100644 index 0000000..0942182 --- /dev/null +++ b/.github/workflows/pub-docker-img.yml @@ -0,0 +1,352 @@ +name: Pub Docker Img + +on: + push: + tags: ["v*"] + pull_request: + branches: [dev, main] + paths: + - "Dockerfile" + - ".dockerignore" + - "docker-compose.yml" + - "rust-toolchain.toml" + - "dev/config.template.toml" + - ".github/workflows/pub-docker-img.yml" + - ".github/release/ghcr-tag-policy.json" + - ".github/release/ghcr-vulnerability-policy.json" + - "scripts/ci/ghcr_publish_contract_guard.py" + - "scripts/ci/ghcr_vulnerability_gate.py" + workflow_dispatch: + +concurrency: + group: docker-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + pr-smoke: + name: PR Docker Smoke + if: github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 25 + permissions: + contents: read + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + + - name: Extract metadata (tags, labels) + if: github.event_name == 'pull_request' + id: meta + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=pr + + - name: Build smoke image + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + with: + context: . + push: false + load: true + provenance: false + sbom: false + tags: zeroclaw-pr-smoke:latest + labels: ${{ steps.meta.outputs.labels || '' }} + platforms: linux/amd64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Verify image + run: docker run --rm zeroclaw-pr-smoke:latest --version + + publish: + name: Build and Push Docker Image + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'zeroclaw-labs/zeroclaw' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + permissions: + contents: read + packages: write + security-events: write + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Setup Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3 + + - name: Log in to Container Registry + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Compute tags + id: meta + shell: bash + run: | + set -euo pipefail + IMAGE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" + SHA_SUFFIX="sha-${GITHUB_SHA::12}" + SHA_TAG="${IMAGE}:${SHA_SUFFIX}" + LATEST_SUFFIX="latest" + LATEST_TAG="${IMAGE}:${LATEST_SUFFIX}" + if [[ "${GITHUB_REF}" != refs/tags/v* ]]; then + echo "::error::Docker publish is restricted to v* tag pushes." + exit 1 + fi + + RELEASE_TAG="${GITHUB_REF#refs/tags/}" + VERSION_TAG="${IMAGE}:${RELEASE_TAG}" + TAGS="${VERSION_TAG},${SHA_TAG},${LATEST_TAG}" + + { + echo "tags=${TAGS}" + echo "release_tag=${RELEASE_TAG}" + echo "sha_tag=${SHA_SUFFIX}" + echo "latest_tag=${LATEST_SUFFIX}" + } >> "$GITHUB_OUTPUT" + + - name: Build and push Docker image + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Set GHCR package visibility to public + shell: bash + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -euo pipefail + owner="${GITHUB_REPOSITORY_OWNER,,}" + repo="${GITHUB_REPOSITORY#*/}" + + # Package path can vary depending on repository/package linkage. + candidates=( + "$repo" + "${owner}%2F${repo}" + ) + + for scope in orgs users; do + for pkg in "${candidates[@]}"; do + code="$(curl -sS -o /tmp/ghcr-visibility.json -w "%{http_code}" \ + -X PATCH \ + -H "Authorization: Bearer ${GH_TOKEN}" \ + -H "Accept: application/vnd.github+json" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + "https://api.github.com/${scope}/${owner}/packages/container/${pkg}/visibility" \ + -d '{"visibility":"public"}' || true)" + + if [ "$code" = "200" ] || [ "$code" = "204" ]; then + echo "GHCR package visibility is public (${scope}/${owner}/${pkg})." + exit 0 + fi + + echo "Visibility attempt ${scope}/${owner}/${pkg} returned HTTP ${code}." + done + done + + echo "::warning::Unable to update GHCR visibility via API in this run; proceeding to GHCR publish contract verification." + + - name: Validate GHCR publish contract + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/ghcr_publish_contract_guard.py \ + --repository "${GITHUB_REPOSITORY,,}" \ + --release-tag "${{ steps.meta.outputs.release_tag }}" \ + --sha "${GITHUB_SHA}" \ + --policy-file .github/release/ghcr-tag-policy.json \ + --output-json artifacts/ghcr-publish-contract.json \ + --output-md artifacts/ghcr-publish-contract.md \ + --fail-on-violation + + - name: Emit GHCR publish contract audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-publish-contract.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ghcr_publish_contract \ + --input-json artifacts/ghcr-publish-contract.json \ + --output-json artifacts/audit-event-ghcr-publish-contract.json \ + --artifact-name ghcr-publish-contract \ + --retention-days 21 + fi + + - name: Publish GHCR contract summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-publish-contract.md ]; then + cat artifacts/ghcr-publish-contract.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload GHCR publish contract artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-publish-contract + path: | + artifacts/ghcr-publish-contract.json + artifacts/ghcr-publish-contract.md + artifacts/audit-event-ghcr-publish-contract.json + if-no-files-found: ignore + retention-days: 21 + + - name: Scan published image for vulnerabilities (Trivy) + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + + TAG_NAME="${{ steps.meta.outputs.release_tag }}" + SHA_TAG="${{ steps.meta.outputs.sha_tag }}" + LATEST_TAG="${{ steps.meta.outputs.latest_tag }}" + IMAGE_BASE="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" + VERSION_REF="${IMAGE_BASE}:${TAG_NAME}" + SHA_REF="${IMAGE_BASE}:${SHA_TAG}" + LATEST_REF="${IMAGE_BASE}:${LATEST_TAG}" + SARIF_OUT="artifacts/trivy-${TAG_NAME}.sarif" + TABLE_OUT="artifacts/trivy-${TAG_NAME}.txt" + JSON_OUT="artifacts/trivy-${TAG_NAME}.json" + SHA_TABLE_OUT="artifacts/trivy-${SHA_TAG}.txt" + SHA_JSON_OUT="artifacts/trivy-${SHA_TAG}.json" + LATEST_TABLE_OUT="artifacts/trivy-${LATEST_TAG}.txt" + LATEST_JSON_OUT="artifacts/trivy-${LATEST_TAG}.json" + + scan_trivy() { + local image_ref="$1" + local output_prefix="$2" + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format json \ + --output "/work/${output_prefix}.json" \ + "${image_ref}" + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format table \ + --output "/work/${output_prefix}.txt" \ + "${image_ref}" + } + + docker run --rm \ + -v "$PWD/artifacts:/work" \ + aquasec/trivy:0.58.2 image \ + --quiet \ + --ignore-unfixed \ + --severity HIGH,CRITICAL \ + --format sarif \ + --output "/work/trivy-${TAG_NAME}.sarif" \ + "${VERSION_REF}" + + scan_trivy "${VERSION_REF}" "trivy-${TAG_NAME}" + scan_trivy "${SHA_REF}" "trivy-${SHA_TAG}" + scan_trivy "${LATEST_REF}" "trivy-${LATEST_TAG}" + + echo "Generated Trivy reports:" + ls -1 "$SARIF_OUT" "$TABLE_OUT" "$JSON_OUT" "$SHA_TABLE_OUT" "$SHA_JSON_OUT" "$LATEST_TABLE_OUT" "$LATEST_JSON_OUT" + + - name: Validate GHCR vulnerability gate + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/ghcr_vulnerability_gate.py \ + --release-tag "${{ steps.meta.outputs.release_tag }}" \ + --sha-tag "${{ steps.meta.outputs.sha_tag }}" \ + --latest-tag "${{ steps.meta.outputs.latest_tag }}" \ + --release-report-json "artifacts/trivy-${{ steps.meta.outputs.release_tag }}.json" \ + --sha-report-json "artifacts/trivy-${{ steps.meta.outputs.sha_tag }}.json" \ + --latest-report-json "artifacts/trivy-${{ steps.meta.outputs.latest_tag }}.json" \ + --policy-file .github/release/ghcr-vulnerability-policy.json \ + --output-json artifacts/ghcr-vulnerability-gate.json \ + --output-md artifacts/ghcr-vulnerability-gate.md \ + --fail-on-violation + + - name: Emit GHCR vulnerability gate audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-vulnerability-gate.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type ghcr_vulnerability_gate \ + --input-json artifacts/ghcr-vulnerability-gate.json \ + --output-json artifacts/audit-event-ghcr-vulnerability-gate.json \ + --artifact-name ghcr-vulnerability-gate \ + --retention-days 21 + fi + + - name: Publish GHCR vulnerability summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/ghcr-vulnerability-gate.md ]; then + cat artifacts/ghcr-vulnerability-gate.md >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Upload GHCR vulnerability gate artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-vulnerability-gate + path: | + artifacts/ghcr-vulnerability-gate.json + artifacts/ghcr-vulnerability-gate.md + artifacts/audit-event-ghcr-vulnerability-gate.json + if-no-files-found: ignore + retention-days: 21 + + - name: Upload Trivy SARIF + if: always() + uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + sarif_file: artifacts/trivy-${{ github.ref_name }}.sarif + category: ghcr-trivy + + - name: Upload Trivy report artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ghcr-trivy-report + path: | + artifacts/trivy-${{ github.ref_name }}.sarif + artifacts/trivy-${{ github.ref_name }}.txt + artifacts/trivy-${{ github.ref_name }}.json + artifacts/trivy-sha-*.txt + artifacts/trivy-sha-*.json + artifacts/trivy-latest.txt + artifacts/trivy-latest.json + if-no-files-found: ignore + retention-days: 14 diff --git a/.github/workflows/pub-prerelease.yml b/.github/workflows/pub-prerelease.yml new file mode 100644 index 0000000..01c0830 --- /dev/null +++ b/.github/workflows/pub-prerelease.yml @@ -0,0 +1,259 @@ +name: Pub Pre-release + +on: + push: + tags: + - "v*-alpha.*" + - "v*-beta.*" + - "v*-rc.*" + workflow_dispatch: + inputs: + tag: + description: "Existing pre-release tag (e.g. v0.1.8-rc.1)" + required: true + default: "" + type: string + mode: + description: "dry-run validates/builds only; publish creates prerelease" + required: true + default: dry-run + type: choice + options: + - dry-run + - publish + draft: + description: "Create prerelease as draft" + required: true + default: true + type: boolean + +concurrency: + group: prerelease-${{ github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + prerelease-guard: + name: Pre-release Guard + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + outputs: + release_tag: ${{ steps.vars.outputs.release_tag }} + mode: ${{ steps.vars.outputs.mode }} + draft: ${{ steps.vars.outputs.draft }} + ready_to_publish: ${{ steps.extract.outputs.ready_to_publish }} + stage: ${{ steps.extract.outputs.stage }} + transition_outcome: ${{ steps.extract.outputs.transition_outcome }} + latest_stage: ${{ steps.extract.outputs.latest_stage }} + latest_stage_tag: ${{ steps.extract.outputs.latest_stage_tag }} + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Resolve prerelease inputs + id: vars + shell: bash + run: | + set -euo pipefail + if [ "${GITHUB_EVENT_NAME}" = "push" ]; then + release_tag="${GITHUB_REF_NAME}" + mode="publish" + draft="false" + else + release_tag="${{ inputs.tag }}" + mode="${{ inputs.mode }}" + draft="${{ inputs.draft }}" + fi + + { + echo "release_tag=${release_tag}" + echo "mode=${mode}" + echo "draft=${draft}" + } >> "$GITHUB_OUTPUT" + + - name: Validate prerelease stage gate + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/prerelease_guard.py \ + --repo-root . \ + --tag "${{ steps.vars.outputs.release_tag }}" \ + --stage-config-file .github/release/prerelease-stage-gates.json \ + --mode "${{ steps.vars.outputs.mode }}" \ + --output-json artifacts/prerelease-guard.json \ + --output-md artifacts/prerelease-guard.md \ + --fail-on-violation + + - name: Extract prerelease outputs + id: extract + shell: bash + run: | + set -euo pipefail + ready_to_publish="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + print(str(bool(data.get('ready_to_publish', False))).lower()) + PY + )" + stage="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + print(data.get('stage', 'unknown')) + PY + )" + transition_outcome="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + transition = data.get('transition') or {} + print(transition.get('outcome', 'unknown')) + PY + )" + latest_stage="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + history = data.get('stage_history') or {} + print(history.get('latest_stage', 'unknown')) + PY + )" + latest_stage_tag="$(python3 - <<'PY' + import json + data = json.load(open('artifacts/prerelease-guard.json', encoding='utf-8')) + history = data.get('stage_history') or {} + print(history.get('latest_tag', 'unknown')) + PY + )" + { + echo "ready_to_publish=${ready_to_publish}" + echo "stage=${stage}" + echo "transition_outcome=${transition_outcome}" + echo "latest_stage=${latest_stage}" + echo "latest_stage_tag=${latest_stage_tag}" + } >> "$GITHUB_OUTPUT" + + - name: Emit prerelease audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type prerelease_guard \ + --input-json artifacts/prerelease-guard.json \ + --output-json artifacts/audit-event-prerelease-guard.json \ + --artifact-name prerelease-guard \ + --retention-days 21 + + - name: Publish prerelease summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/prerelease-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload prerelease guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: prerelease-guard + path: | + artifacts/prerelease-guard.json + artifacts/prerelease-guard.md + artifacts/audit-event-prerelease-guard.json + if-no-files-found: error + retention-days: 21 + + build-prerelease: + name: Build Pre-release Artifact + needs: [prerelease-guard] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - name: Checkout tag + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prerelease-guard.outputs.release_tag }} + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: prerelease-${{ needs.prerelease-guard.outputs.release_tag }} + cache-targets: true + + - name: Build release-fast binary + shell: bash + run: | + set -euo pipefail + cargo build --profile release-fast --locked --target x86_64-unknown-linux-gnu + + - name: Package prerelease artifact + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + cp target/x86_64-unknown-linux-gnu/release-fast/zeroclaw artifacts/zeroclaw + tar czf artifacts/zeroclaw-x86_64-unknown-linux-gnu.tar.gz -C artifacts zeroclaw + rm artifacts/zeroclaw + + - name: Generate manifest + checksums + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_manifest.py \ + --artifacts-dir artifacts \ + --release-tag "${{ needs.prerelease-guard.outputs.release_tag }}" \ + --output-json artifacts/prerelease-manifest.json \ + --output-md artifacts/prerelease-manifest.md \ + --checksums-path artifacts/SHA256SUMS \ + --fail-empty + + - name: Publish prerelease build summary + shell: bash + run: | + set -euo pipefail + cat artifacts/prerelease-manifest.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload prerelease build artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: prerelease-artifacts + path: artifacts/* + if-no-files-found: error + retention-days: 14 + + publish-prerelease: + name: Publish GitHub Pre-release + needs: [prerelease-guard, build-prerelease] + if: needs.prerelease-guard.outputs.ready_to_publish == 'true' + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 15 + steps: + - name: Download prerelease artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: prerelease-artifacts + path: artifacts + + - name: Create or update GitHub pre-release + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + with: + tag_name: ${{ needs.prerelease-guard.outputs.release_tag }} + prerelease: true + draft: ${{ needs.prerelease-guard.outputs.draft == 'true' }} + generate_release_notes: true + files: | + artifacts/**/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pub-release.yml b/.github/workflows/pub-release.yml new file mode 100644 index 0000000..db0ec27 --- /dev/null +++ b/.github/workflows/pub-release.yml @@ -0,0 +1,645 @@ +name: Pub Release + +on: + push: + tags: ["v*"] + workflow_dispatch: + inputs: + release_ref: + description: "Git ref (branch, tag, or SHA) to build" + required: false + default: "main" + type: string + publish_release: + description: "Publish a GitHub release (false = verification build only)" + required: false + default: false + type: boolean + release_tag: + description: "Existing release tag (required when publish_release=true), e.g. v0.1.1" + required: false + default: "" + type: string + draft: + description: "Create release as draft (manual publish only)" + required: false + default: true + type: boolean + schedule: + # Weekly release-readiness verification on default branch (no publish) + - cron: "17 8 * * 1" + +concurrency: + group: release-${{ github.ref || github.run_id }} + cancel-in-progress: false + +permissions: + contents: write + packages: read + id-token: write # Required for cosign keyless signing via OIDC + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + prepare: + name: Prepare Release Context + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + outputs: + release_ref: ${{ steps.vars.outputs.release_ref }} + release_tag: ${{ steps.vars.outputs.release_tag }} + publish_release: ${{ steps.vars.outputs.publish_release }} + draft_release: ${{ steps.vars.outputs.draft_release }} + steps: + - name: Resolve release inputs + id: vars + shell: bash + run: | + set -euo pipefail + + event_name="${GITHUB_EVENT_NAME}" + publish_release="false" + draft_release="false" + + if [[ "$event_name" == "push" ]]; then + release_ref="${GITHUB_REF_NAME}" + release_tag="${GITHUB_REF_NAME}" + publish_release="true" + elif [[ "$event_name" == "workflow_dispatch" ]]; then + release_ref="${{ inputs.release_ref }}" + publish_release="${{ inputs.publish_release }}" + draft_release="${{ inputs.draft }}" + + if [[ "$publish_release" == "true" ]]; then + release_tag="${{ inputs.release_tag }}" + if [[ -z "$release_tag" ]]; then + echo "::error::release_tag is required when publish_release=true" + exit 1 + fi + release_ref="$release_tag" + else + release_tag="verify-${GITHUB_SHA::12}" + fi + else + # schedule + release_ref="main" + release_tag="verify-${GITHUB_SHA::12}" + fi + + { + echo "release_ref=${release_ref}" + echo "release_tag=${release_tag}" + echo "publish_release=${publish_release}" + echo "draft_release=${draft_release}" + } >> "$GITHUB_OUTPUT" + + { + echo "### Release Context" + echo "- event: ${event_name}" + echo "- release_ref: ${release_ref}" + echo "- release_tag: ${release_tag}" + echo "- publish_release: ${publish_release}" + echo "- draft_release: ${draft_release}" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Validate release trigger and authorization guard + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/release_trigger_guard.py \ + --repo-root . \ + --repository "${GITHUB_REPOSITORY}" \ + --event-name "${GITHUB_EVENT_NAME}" \ + --actor "${GITHUB_ACTOR}" \ + --release-ref "${{ steps.vars.outputs.release_ref }}" \ + --release-tag "${{ steps.vars.outputs.release_tag }}" \ + --publish-release "${{ steps.vars.outputs.publish_release }}" \ + --authorized-actors "${{ vars.RELEASE_AUTHORIZED_ACTORS || 'willsarg,theonlyhennygod,chumyin' }}" \ + --authorized-tagger-emails "${{ vars.RELEASE_AUTHORIZED_TAGGER_EMAILS || '' }}" \ + --require-annotated-tag true \ + --output-json artifacts/release-trigger-guard.json \ + --output-md artifacts/release-trigger-guard.md \ + --fail-on-violation + + - name: Emit release trigger audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_trigger_guard \ + --input-json artifacts/release-trigger-guard.json \ + --output-json artifacts/audit-event-release-trigger-guard.json \ + --artifact-name release-trigger-guard \ + --retention-days 30 + + - name: Publish release trigger guard summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/release-trigger-guard.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload release trigger guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: release-trigger-guard + path: | + artifacts/release-trigger-guard.json + artifacts/release-trigger-guard.md + artifacts/audit-event-release-trigger-guard.json + if-no-files-found: error + retention-days: 30 + + build-release: + name: Build ${{ matrix.target }} + needs: [prepare] + runs-on: ${{ matrix.os }} + timeout-minutes: 40 + strategy: + fail-fast: false + matrix: + include: + # Keep GNU Linux release artifacts on Ubuntu 22.04 to preserve + # a broadly compatible GLIBC baseline for user distributions. + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-linux-gnu + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-linux-musl + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-unknown-linux-gnu + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: gcc-aarch64-linux-gnu + linker_env: CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER + linker: aarch64-linux-gnu-gcc + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-unknown-linux-musl + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: armv7-unknown-linux-gnueabihf + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: gcc-arm-linux-gnueabihf + linker_env: CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER + linker: arm-linux-gnueabihf-gcc + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: armv7-linux-androideabi + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + android_ndk: true + android_api: 21 + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-linux-android + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + android_ndk: true + android_api: 21 + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-unknown-freebsd + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + use_cross: true + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-apple-darwin + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: aarch64-apple-darwin + artifact: zeroclaw + archive_ext: tar.gz + cross_compiler: "" + linker_env: "" + linker: "" + - os: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + target: x86_64-pc-windows-msvc + artifact: zeroclaw.exe + archive_ext: zip + cross_compiler: "" + linker_env: "" + linker: "" + + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + targets: ${{ matrix.target }} + + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + if: runner.os != 'Windows' + + - name: Install cross for cross-built targets + if: matrix.use_cross + run: | + cargo install cross --git https://github.com/cross-rs/cross + + - name: Install cross-compilation toolchain (Linux) + if: runner.os == 'Linux' && matrix.cross_compiler != '' + run: | + sudo apt-get update -qq + sudo apt-get install -y "${{ matrix.cross_compiler }}" + + - name: Setup Android NDK + if: matrix.android_ndk + shell: bash + run: | + set -euo pipefail + NDK_VERSION="r26d" + NDK_ZIP="android-ndk-${NDK_VERSION}-linux.zip" + NDK_URL="https://dl.google.com/android/repository/${NDK_ZIP}" + NDK_ROOT="${RUNNER_TEMP}/android-ndk" + NDK_HOME="${NDK_ROOT}/android-ndk-${NDK_VERSION}" + + sudo apt-get update -qq + sudo apt-get install -y unzip + + mkdir -p "${NDK_ROOT}" + curl -fsSL "${NDK_URL}" -o "${RUNNER_TEMP}/${NDK_ZIP}" + unzip -q "${RUNNER_TEMP}/${NDK_ZIP}" -d "${NDK_ROOT}" + + echo "ANDROID_NDK_HOME=${NDK_HOME}" >> "$GITHUB_ENV" + echo "${NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin" >> "$GITHUB_PATH" + + - name: Configure Android toolchain + if: matrix.android_ndk + shell: bash + run: | + echo "Setting up Android NDK toolchain for ${{ matrix.target }}" + NDK_HOME="${ANDROID_NDK_HOME:-}" + if [[ -z "$NDK_HOME" ]]; then + echo "::error::ANDROID_NDK_HOME was not configured." + exit 1 + fi + TOOLCHAIN="$NDK_HOME/toolchains/llvm/prebuilt/linux-x86_64/bin" + + # Add to path for linker resolution + echo "$TOOLCHAIN" >> "$GITHUB_PATH" + + # Set linker environment variables + if [[ "${{ matrix.target }}" == "armv7-linux-androideabi" ]]; then + ARMV7_CC="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang" + ARMV7_CXX="${TOOLCHAIN}/armv7a-linux-androideabi${{ matrix.android_api }}-clang++" + + # Some crates still probe legacy compiler names (arm-linux-androideabi-clang). + ln -sf "$ARMV7_CC" "${TOOLCHAIN}/arm-linux-androideabi-clang" + ln -sf "$ARMV7_CXX" "${TOOLCHAIN}/arm-linux-androideabi-clang++" + + { + echo "CARGO_TARGET_ARMV7_LINUX_ANDROIDEABI_LINKER=${ARMV7_CC}" + echo "CC_armv7_linux_androideabi=${ARMV7_CC}" + echo "CXX_armv7_linux_androideabi=${ARMV7_CXX}" + echo "AR_armv7_linux_androideabi=${TOOLCHAIN}/llvm-ar" + } >> "$GITHUB_ENV" + elif [[ "${{ matrix.target }}" == "aarch64-linux-android" ]]; then + AARCH64_CC="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang" + AARCH64_CXX="${TOOLCHAIN}/aarch64-linux-android${{ matrix.android_api }}-clang++" + + { + echo "CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=${AARCH64_CC}" + echo "CC_aarch64_linux_android=${AARCH64_CC}" + echo "CXX_aarch64_linux_android=${AARCH64_CXX}" + echo "AR_aarch64_linux_android=${TOOLCHAIN}/llvm-ar" + } >> "$GITHUB_ENV" + fi + + - name: Build release + shell: bash + env: + LINKER_ENV: ${{ matrix.linker_env }} + LINKER: ${{ matrix.linker }} + USE_CROSS: ${{ matrix.use_cross }} + run: | + if [ -n "$LINKER_ENV" ] && [ -n "$LINKER" ]; then + echo "Using linker override: $LINKER_ENV=$LINKER" + export "$LINKER_ENV=$LINKER" + fi + if [ "$USE_CROSS" = "true" ]; then + echo "Using cross for MUSL target" + cross build --profile release-fast --locked --target ${{ matrix.target }} + else + cargo build --profile release-fast --locked --target ${{ matrix.target }} + fi + + - name: Check binary size (Unix) + if: runner.os != 'Windows' + run: bash scripts/ci/check_binary_size.sh "target/${{ matrix.target }}/release-fast/${{ matrix.artifact }}" "${{ matrix.target }}" + + - name: Package (Unix) + if: runner.os != 'Windows' + run: | + cd target/${{ matrix.target }}/release-fast + tar czf ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }} + + - name: Package (Windows) + if: runner.os == 'Windows' + run: | + cd target/${{ matrix.target }}/release-fast + 7z a ../../../zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} ${{ matrix.artifact }} + + - name: Upload artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: zeroclaw-${{ matrix.target }} + path: zeroclaw-${{ matrix.target }}.${{ matrix.archive_ext }} + retention-days: 7 + + verify-artifacts: + name: Verify Artifact Set + needs: [prepare, build-release] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - name: Download all artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Validate release archive contract (verify stage) + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_artifact_guard.py \ + --artifacts-dir artifacts \ + --contract-file .github/release/release-artifact-contract.json \ + --output-json artifacts/release-artifact-guard.verify.json \ + --output-md artifacts/release-artifact-guard.verify.md \ + --allow-extra-archives \ + --skip-manifest-files \ + --skip-sbom-files \ + --skip-notice-files \ + --fail-on-violation + + - name: Emit verify-stage artifact guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_artifact_guard_verify \ + --input-json artifacts/release-artifact-guard.verify.json \ + --output-json artifacts/audit-event-release-artifact-guard-verify.json \ + --artifact-name release-artifact-guard-verify \ + --retention-days 21 + + - name: Publish verify-stage artifact guard summary + if: always() + shell: bash + run: | + set -euo pipefail + cat artifacts/release-artifact-guard.verify.md >> "$GITHUB_STEP_SUMMARY" + + - name: Upload verify-stage artifact guard reports + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: release-artifact-guard-verify + path: | + artifacts/release-artifact-guard.verify.json + artifacts/release-artifact-guard.verify.md + artifacts/audit-event-release-artifact-guard-verify.json + if-no-files-found: error + retention-days: 21 + + publish: + name: Publish Release + if: needs.prepare.outputs.publish_release == 'true' + needs: [prepare, verify-artifacts] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 45 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + ref: ${{ needs.prepare.outputs.release_ref }} + + - name: Download all artifacts + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + path: artifacts + + - name: Install syft + shell: bash + run: | + set -euo pipefail + mkdir -p "${RUNNER_TEMP}/bin" + ./scripts/ci/install_syft.sh "${RUNNER_TEMP}/bin" + echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH" + + - name: Generate SBOM (CycloneDX) + run: | + syft dir:. --source-name zeroclaw -o cyclonedx-json=artifacts/zeroclaw.cdx.json -o spdx-json=artifacts/zeroclaw.spdx.json + { + echo "### SBOM Generated" + echo "- CycloneDX: zeroclaw.cdx.json" + echo "- SPDX: zeroclaw.spdx.json" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Attach license and notice files + run: | + cp LICENSE-APACHE artifacts/LICENSE-APACHE + cp LICENSE-MIT artifacts/LICENSE-MIT + cp NOTICE artifacts/NOTICE + + - name: Generate release manifest + checksums + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/release_manifest.py \ + --artifacts-dir artifacts \ + --release-tag "${RELEASE_TAG}" \ + --output-json artifacts/release-manifest.json \ + --output-md artifacts/release-manifest.md \ + --checksums-path artifacts/SHA256SUMS \ + --fail-empty + + - name: Generate SHA256SUMS provenance statement + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/generate_provenance.py \ + --artifact artifacts/SHA256SUMS \ + --subject-name "zeroclaw-${RELEASE_TAG}-sha256sums" \ + --output artifacts/zeroclaw.sha256sums.intoto.json + + - name: Emit SHA256SUMS provenance audit event + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_sha256sums_provenance \ + --input-json artifacts/zeroclaw.sha256sums.intoto.json \ + --output-json artifacts/audit-event-release-sha256sums-provenance.json \ + --artifact-name release-sha256sums-provenance \ + --retention-days 30 + + - name: Validate release artifact contract (publish stage) + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/release_artifact_guard.py \ + --artifacts-dir artifacts \ + --contract-file .github/release/release-artifact-contract.json \ + --output-json artifacts/release-artifact-guard.publish.json \ + --output-md artifacts/release-artifact-guard.publish.md \ + --allow-extra-archives \ + --allow-extra-manifest-files \ + --allow-extra-sbom-files \ + --allow-extra-notice-files \ + --fail-on-violation + + - name: Emit publish-stage artifact guard audit event + if: always() + shell: bash + run: | + set -euo pipefail + python3 scripts/ci/emit_audit_event.py \ + --event-type release_artifact_guard_publish \ + --input-json artifacts/release-artifact-guard.publish.json \ + --output-json artifacts/audit-event-release-artifact-guard-publish.json \ + --artifact-name release-artifact-guard-publish \ + --retention-days 30 + + - name: Publish artifact guard summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-artifact-guard.publish.md >> "$GITHUB_STEP_SUMMARY" + + - name: Publish release manifest summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-manifest.md >> "$GITHUB_STEP_SUMMARY" + + - name: Install cosign + uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0 + + - name: Sign artifacts with cosign (keyless) + shell: bash + run: | + set -euo pipefail + while IFS= read -r -d '' file; do + cosign sign-blob --yes \ + --bundle="${file}.sigstore.json" \ + --output-signature="${file}.sig" \ + --output-certificate="${file}.pem" \ + "$file" + done < <(find artifacts -type f ! -name '*.sig' ! -name '*.pem' ! -name '*.sigstore.json' -print0) + + - name: Compose release-notes supply-chain references + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + python3 scripts/ci/release_notes_with_supply_chain_refs.py \ + --artifacts-dir artifacts \ + --repository "${GITHUB_REPOSITORY}" \ + --release-tag "${RELEASE_TAG}" \ + --output-json artifacts/release-notes-supply-chain.json \ + --output-md artifacts/release-notes-supply-chain.md \ + --fail-on-missing + + - name: Publish release-notes supply-chain summary + shell: bash + run: | + set -euo pipefail + cat artifacts/release-notes-supply-chain.md >> "$GITHUB_STEP_SUMMARY" + + - name: Verify GHCR release tag availability + shell: bash + env: + RELEASE_TAG: ${{ needs.prepare.outputs.release_tag }} + run: | + set -euo pipefail + repo="${GITHUB_REPOSITORY,,}" + manifest_url="https://ghcr.io/v2/${repo}/manifests/${RELEASE_TAG}" + accept_header="application/vnd.oci.image.index.v1+json, application/vnd.docker.distribution.manifest.v2+json" + max_attempts=75 + sleep_seconds=20 + + for attempt in $(seq 1 "$max_attempts"); do + token_resp="$(curl -sS "https://ghcr.io/token?scope=repository:${repo}:pull" || true)" + token="$(echo "$token_resp" | sed -n 's/.*"token":"\([^"]*\)".*/\1/p')" + + if [ -z "$token" ]; then + code="000" + else + code="$(curl -sS -o /tmp/ghcr-release-manifest.json -w "%{http_code}" \ + -H "Authorization: Bearer ${token}" \ + -H "Accept: ${accept_header}" \ + "${manifest_url}" || true)" + fi + + if [ "$code" = "200" ]; then + echo "GHCR release tag is available: ${repo}:${RELEASE_TAG}" + exit 0 + fi + + if [ "$attempt" -lt "$max_attempts" ]; then + echo "Waiting for GHCR tag ${repo}:${RELEASE_TAG} (attempt ${attempt}/${max_attempts}, HTTP ${code})..." + sleep "$sleep_seconds" + fi + done + + echo "::error::GHCR tag ${repo}:${RELEASE_TAG} was not available before release publish timeout." + cat /tmp/ghcr-release-manifest.json || true + exit 1 + + - name: Create GitHub Release + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2 + with: + tag_name: ${{ needs.prepare.outputs.release_tag }} + draft: ${{ needs.prepare.outputs.draft_release == 'true' }} + body_path: artifacts/release-notes-supply-chain.md + generate_release_notes: true + files: | + artifacts/**/* + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/scripts/ci_human_review_guard.js b/.github/workflows/scripts/ci_human_review_guard.js new file mode 100644 index 0000000..b13923b --- /dev/null +++ b/.github/workflows/scripts/ci_human_review_guard.js @@ -0,0 +1,61 @@ +// Enforce at least one human approval on pull requests. +// Used by .github/workflows/ci-run.yml via actions/github-script. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const botAllowlist = new Set( + (process.env.HUMAN_REVIEW_BOT_LOGINS || "github-actions[bot],dependabot[bot],coderabbitai[bot]") + .split(",") + .map((value) => value.trim().toLowerCase()) + .filter(Boolean), + ); + + const isBotAccount = (login, accountType) => { + if (!login) return false; + if ((accountType || "").toLowerCase() === "bot") return true; + if (login.endsWith("[bot]")) return true; + return botAllowlist.has(login); + }; + + const reviews = await github.paginate(github.rest.pulls.listReviews, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const latestReviewByUser = new Map(); + const decisiveStates = new Set(["APPROVED", "CHANGES_REQUESTED", "DISMISSED"]); + for (const review of reviews) { + const login = review.user?.login?.toLowerCase(); + if (!login) continue; + if (!decisiveStates.has(review.state)) continue; + latestReviewByUser.set(login, { + state: review.state, + type: review.user?.type || "", + }); + } + + const humanApprovers = []; + for (const [login, review] of latestReviewByUser.entries()) { + if (review.state !== "APPROVED") continue; + if (isBotAccount(login, review.type)) continue; + humanApprovers.push(login); + } + + if (humanApprovers.length === 0) { + core.setFailed( + "No human approving review found. At least one non-bot approval is required before merge.", + ); + return; + } + + core.info(`Human approval check passed. Approver(s): ${humanApprovers.join(", ")}`); +}; diff --git a/.github/workflows/scripts/ci_license_file_owner_guard.js b/.github/workflows/scripts/ci_license_file_owner_guard.js new file mode 100644 index 0000000..ee0befa --- /dev/null +++ b/.github/workflows/scripts/ci_license_file_owner_guard.js @@ -0,0 +1,54 @@ +// Enforce ownership rules for root license files in PRs. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || ""; + + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const ownerAllowlist = ["willsarg"]; + + if (ownerAllowlist.length === 0) { + core.setFailed("License owner allowlist is empty."); + return; + } + + const protectedFiles = new Set(["LICENSE-APACHE", "LICENSE-MIT"]); + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const changedProtectedFiles = files + .map((file) => file.filename) + .filter((name) => protectedFiles.has(name)); + + if (changedProtectedFiles.length === 0) { + core.info("No protected root license files changed in this PR."); + return; + } + + core.info(`Protected license files changed:\n- ${changedProtectedFiles.join("\n- ")}`); + core.info(`Allowed license file editors: ${ownerAllowlist.join(", ")}`); + + if (!prAuthor) { + core.setFailed("Unable to resolve PR author login."); + return; + } + + if (!ownerAllowlist.includes(prAuthor)) { + core.setFailed( + `Root license files (${changedProtectedFiles.join(", ")}) can only be changed by ${ownerAllowlist.join(", ")}. PR author is @${prAuthor}.`, + ); + return; + } + + core.info(`License file edit authorized for PR author: @${prAuthor}`); +}; diff --git a/.github/workflows/scripts/ci_workflow_owner_approval.js b/.github/workflows/scripts/ci_workflow_owner_approval.js new file mode 100644 index 0000000..2f3bf29 --- /dev/null +++ b/.github/workflows/scripts/ci_workflow_owner_approval.js @@ -0,0 +1,83 @@ +// Extracted from ci-run.yml step: Require owner approval for workflow file changes + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const prNumber = context.payload.pull_request?.number; + const prAuthor = context.payload.pull_request?.user?.login?.toLowerCase() || ""; + if (!prNumber) { + core.setFailed("Missing pull_request context."); + return; + } + + const baseOwners = ["theonlyhennygod", "willsarg", "chumyin"]; + const configuredOwners = (process.env.WORKFLOW_OWNER_LOGINS || "") + .split(",") + .map((login) => login.trim().toLowerCase()) + .filter(Boolean); + const ownerAllowlist = [...new Set([...baseOwners, ...configuredOwners])]; + + if (ownerAllowlist.length === 0) { + core.setFailed("Workflow owner allowlist is empty."); + return; + } + + core.info(`Workflow owner allowlist: ${ownerAllowlist.join(", ")}`); + + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const workflowFiles = files + .map((file) => file.filename) + .filter((name) => name.startsWith(".github/workflows/")); + + if (workflowFiles.length === 0) { + core.info("No workflow files changed in this PR."); + return; + } + + core.info(`Workflow files changed:\n- ${workflowFiles.join("\n- ")}`); + + if (prAuthor && ownerAllowlist.includes(prAuthor)) { + core.info(`Workflow PR authored by allowlisted owner: @${prAuthor}`); + return; + } + + const reviews = await github.paginate(github.rest.pulls.listReviews, { + owner, + repo, + pull_number: prNumber, + per_page: 100, + }); + + const latestReviewByUser = new Map(); + for (const review of reviews) { + const login = review.user?.login; + if (!login) continue; + latestReviewByUser.set(login.toLowerCase(), review.state); + } + + const approvedUsers = [...latestReviewByUser.entries()] + .filter(([, state]) => state === "APPROVED") + .map(([login]) => login); + + if (approvedUsers.length === 0) { + core.setFailed("Workflow files changed but no approving review is present."); + return; + } + + const ownerApprover = approvedUsers.find((login) => ownerAllowlist.includes(login)); + if (!ownerApprover) { + core.setFailed( + `Workflow files changed. Approvals found (${approvedUsers.join(", ")}), but none match workflow owner allowlist.`, + ); + return; + } + + core.info(`Workflow owner approval present: @${ownerApprover}`); + +}; diff --git a/.github/workflows/scripts/lint_feedback.js b/.github/workflows/scripts/lint_feedback.js new file mode 100644 index 0000000..8b90161 --- /dev/null +++ b/.github/workflows/scripts/lint_feedback.js @@ -0,0 +1,90 @@ +// Post actionable lint failure summary as a PR comment. +// Used by the lint-feedback CI job via actions/github-script. +// +// Required environment variables: +// RUST_CHANGED — "true" if Rust files changed +// DOCS_CHANGED — "true" if docs files changed +// LINT_RESULT — result of the lint job +// LINT_DELTA_RESULT — result of the strict delta lint job +// DOCS_RESULT — result of the docs-quality job + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const issueNumber = context.payload.pull_request?.number; + if (!issueNumber) return; + + const marker = ""; + const rustChanged = process.env.RUST_CHANGED === "true"; + const docsChanged = process.env.DOCS_CHANGED === "true"; + const lintResult = process.env.LINT_RESULT || "skipped"; + const lintDeltaResult = process.env.LINT_DELTA_RESULT || "skipped"; + const docsResult = process.env.DOCS_RESULT || "skipped"; + + const failures = []; + if (rustChanged && !["success", "skipped"].includes(lintResult)) { + failures.push("`Lint Gate (Format + Clippy)` failed."); + } + if (rustChanged && !["success", "skipped"].includes(lintDeltaResult)) { + failures.push("`Lint Gate (Strict Delta)` failed."); + } + if (docsChanged && !["success", "skipped"].includes(docsResult)) { + failures.push("`Docs Quality` failed."); + } + + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); + const existing = comments.find((comment) => (comment.body || "").includes(marker)); + + if (failures.length === 0) { + if (existing) { + await github.rest.issues.deleteComment({ + owner, + repo, + comment_id: existing.id, + }); + } + core.info("No lint/docs gate failures. No feedback comment required."); + return; + } + + const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`; + const body = [ + marker, + "### CI lint feedback", + "", + "This PR failed one or more fast lint/documentation gates:", + "", + ...failures.map((item) => `- ${item}`), + "", + "Open the failing logs in this run:", + `- ${runUrl}`, + "", + "Local fix commands:", + "- `./scripts/ci/rust_quality_gate.sh`", + "- `./scripts/ci/rust_strict_delta_gate.sh`", + "- `./scripts/ci/docs_quality_gate.sh`", + "", + "After fixes, push a new commit and CI will re-run automatically.", + ].join("\n"); + + if (existing) { + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body, + }); + } +}; diff --git a/.github/workflows/scripts/pr_auto_response_contributor_tier.js b/.github/workflows/scripts/pr_auto_response_contributor_tier.js new file mode 100644 index 0000000..76dc0fb --- /dev/null +++ b/.github/workflows/scripts/pr_auto_response_contributor_tier.js @@ -0,0 +1,132 @@ +// Extracted from pr-auto-response.yml step: Apply contributor tier label for issue author + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const issue = context.payload.issue; + const pullRequest = context.payload.pull_request; + const target = issue ?? pullRequest; + async function loadContributorTierPolicy() { + const policyPath = process.env.LABEL_POLICY_PATH || ".github/label-policy.json"; + const fallback = { + contributorTierColor: "2ED9FF", + contributorTierRules: [ + { label: "distinguished contributor", minMergedPRs: 50 }, + { label: "principal contributor", minMergedPRs: 20 }, + { label: "experienced contributor", minMergedPRs: 10 }, + { label: "trusted contributor", minMergedPRs: 5 }, + ], + }; + try { + const { data } = await github.rest.repos.getContent({ + owner, + repo, + path: policyPath, + ref: context.payload.repository?.default_branch || "main", + }); + const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8")); + const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({ + label: String(entry.label || "").trim(), + minMergedPRs: Number(entry.min_merged_prs || 0), + })); + const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase(); + if (!contributorTierColor || contributorTierRules.length === 0) { + return fallback; + } + return { contributorTierColor, contributorTierRules }; + } catch (error) { + core.warning(`failed to load ${policyPath}, using fallback policy: ${error.message}`); + return fallback; + } + } + + const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy(); + const contributorTierLabels = contributorTierRules.map((rule) => rule.label); + const managedContributorLabels = new Set(contributorTierLabels); + const action = context.payload.action; + const changedLabel = context.payload.label?.name; + + if (!target) return; + if ((action === "labeled" || action === "unlabeled") && !managedContributorLabels.has(changedLabel)) { + return; + } + + const author = target.user; + if (!author || author.type === "Bot") return; + + function contributorTierDescription(rule) { + return `Contributor with ${rule.minMergedPRs}+ merged PRs.`; + } + + async function ensureContributorTierLabels() { + for (const rule of contributorTierRules) { + const label = rule.label; + const expectedDescription = contributorTierDescription(rule); + try { + const { data: existing } = await github.rest.issues.getLabel({ owner, repo, name: label }); + const currentColor = (existing.color || "").toUpperCase(); + const currentDescription = (existing.description || "").trim(); + if (currentColor !== contributorTierColor || currentDescription !== expectedDescription) { + await github.rest.issues.updateLabel({ + owner, + repo, + name: label, + new_name: label, + color: contributorTierColor, + description: expectedDescription, + }); + } + } catch (error) { + if (error.status !== 404) throw error; + await github.rest.issues.createLabel({ + owner, + repo, + name: label, + color: contributorTierColor, + description: expectedDescription, + }); + } + } + } + + function selectContributorTier(mergedCount) { + const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs); + return matchedTier ? matchedTier.label : null; + } + + let contributorTierLabel = null; + try { + const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({ + q: `repo:${owner}/${repo} is:pr is:merged author:${author.login}`, + per_page: 1, + }); + const mergedCount = mergedSearch.total_count || 0; + contributorTierLabel = selectContributorTier(mergedCount); + } catch (error) { + core.warning(`failed to evaluate contributor tier status: ${error.message}`); + return; + } + + await ensureContributorTierLabels(); + + const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({ + owner, + repo, + issue_number: target.number, + }); + const keepLabels = currentLabels + .map((label) => label.name) + .filter((label) => !contributorTierLabels.includes(label)); + + if (contributorTierLabel) { + keepLabels.push(contributorTierLabel); + } + + await github.rest.issues.setLabels({ + owner, + repo, + issue_number: target.number, + labels: [...new Set(keepLabels)], + }); + +}; diff --git a/.github/workflows/scripts/pr_auto_response_labeled_routes.js b/.github/workflows/scripts/pr_auto_response_labeled_routes.js new file mode 100644 index 0000000..eb5410e --- /dev/null +++ b/.github/workflows/scripts/pr_auto_response_labeled_routes.js @@ -0,0 +1,94 @@ +// Extracted from pr-auto-response.yml step: Handle label-driven responses + +module.exports = async ({ github, context, core }) => { + const label = context.payload.label?.name; + if (!label) return; + + const issue = context.payload.issue; + const pullRequest = context.payload.pull_request; + const target = issue ?? pullRequest; + if (!target) return; + + const isIssue = Boolean(issue); + const issueNumber = target.number; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const rules = [ + { + label: "r:support", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "This looks like a usage/support request. Please use README + docs first, then open a focused bug with repro details if behavior is incorrect.", + }, + { + label: "r:needs-repro", + close: false, + message: + "Thanks for the report. Please add deterministic repro steps, exact environment, and redacted logs so maintainers can triage quickly.", + }, + { + label: "invalid", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "Closing as invalid based on current information. If this is still relevant, open a new issue with updated evidence and reproducible steps.", + }, + { + label: "duplicate", + close: true, + closeIssuesOnly: true, + closeReason: "not_planned", + message: + "Closing as duplicate. Please continue discussion in the canonical linked issue/PR.", + }, + ]; + + const rule = rules.find((entry) => entry.label === label); + if (!rule) return; + + const marker = ``; + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: issueNumber, + per_page: 100, + }); + + const alreadyCommented = comments.some((comment) => + (comment.body || "").includes(marker) + ); + + if (!alreadyCommented) { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: issueNumber, + body: `${rule.message}\n\n${marker}`, + }); + } + + if (!rule.close) return; + if (rule.closeIssuesOnly && !isIssue) return; + if (target.state === "closed") return; + + if (isIssue) { + await github.rest.issues.update({ + owner, + repo, + issue_number: issueNumber, + state: "closed", + state_reason: rule.closeReason || "not_planned", + }); + } else { + await github.rest.issues.update({ + owner, + repo, + issue_number: issueNumber, + state: "closed", + }); + } +}; diff --git a/.github/workflows/scripts/pr_check_status_nudge.js b/.github/workflows/scripts/pr_check_status_nudge.js new file mode 100644 index 0000000..1d81215 --- /dev/null +++ b/.github/workflows/scripts/pr_check_status_nudge.js @@ -0,0 +1,161 @@ +// Extracted from pr-check-status.yml step: Nudge PRs that need rebase or CI refresh + +module.exports = async ({ github, context, core }) => { + const staleHours = Number(process.env.STALE_HOURS || "48"); + const ignoreLabels = new Set(["no-stale", "stale", "maintainer", "no-pr-hygiene"]); + const marker = ""; + const owner = context.repo.owner; + const repo = context.repo.repo; + + const openPrs = await github.paginate(github.rest.pulls.list, { + owner, + repo, + state: "open", + per_page: 100, + }); + + const activePrs = openPrs.filter((pr) => { + if (pr.draft) { + return false; + } + + const labels = new Set((pr.labels || []).map((label) => label.name)); + return ![...ignoreLabels].some((label) => labels.has(label)); + }); + + core.info(`Scanning ${activePrs.length} open PR(s) for hygiene nudges.`); + + let nudged = 0; + let skipped = 0; + + for (const pr of activePrs) { + const { data: headCommit } = await github.rest.repos.getCommit({ + owner, + repo, + ref: pr.head.sha, + }); + + const headCommitAt = + headCommit.commit?.committer?.date || headCommit.commit?.author?.date; + if (!headCommitAt) { + skipped += 1; + core.info(`#${pr.number}: missing head commit timestamp, skipping.`); + continue; + } + + const ageHours = (Date.now() - new Date(headCommitAt).getTime()) / 3600000; + if (ageHours < staleHours) { + skipped += 1; + continue; + } + + const { data: prDetail } = await github.rest.pulls.get({ + owner, + repo, + pull_number: pr.number, + }); + + const isBehindBase = prDetail.mergeable_state === "behind"; + + const { data: checkRunsData } = await github.rest.checks.listForRef({ + owner, + repo, + ref: pr.head.sha, + per_page: 100, + }); + + const ciGateRuns = (checkRunsData.check_runs || []) + .filter((run) => run.name === "CI Required Gate") + .sort((a, b) => { + const aTime = new Date(a.started_at || a.completed_at || a.created_at).getTime(); + const bTime = new Date(b.started_at || b.completed_at || b.created_at).getTime(); + return bTime - aTime; + }); + + let ciState = "missing"; + if (ciGateRuns.length > 0) { + const latest = ciGateRuns[0]; + if (latest.status !== "completed") { + ciState = "in_progress"; + } else if (["success", "neutral", "skipped"].includes(latest.conclusion || "")) { + ciState = "success"; + } else { + ciState = String(latest.conclusion || "failure"); + } + } + + const ciMissing = ciState === "missing"; + const ciFailing = !["success", "in_progress", "missing"].includes(ciState); + + if (!isBehindBase && !ciMissing && !ciFailing) { + skipped += 1; + continue; + } + + const reasons = []; + if (isBehindBase) { + reasons.push("- Branch is behind `main` (please rebase or merge the latest base branch)."); + } + if (ciMissing) { + reasons.push("- No `CI Required Gate` run was found for the current head commit."); + } + if (ciFailing) { + reasons.push(`- Latest \`CI Required Gate\` result is \`${ciState}\`.`); + } + + const shortSha = pr.head.sha.slice(0, 12); + const body = [ + marker, + `Hi @${pr.user.login}, friendly automation nudge from PR hygiene.`, + "", + `This PR has had no new commits for **${Math.floor(ageHours)}h** and still needs an update before merge:`, + "", + ...reasons, + "", + "### Recommended next steps", + "1. Rebase your branch on `main`.", + "2. Push the updated branch and re-run checks (or use **Re-run failed jobs**).", + "3. Post fresh validation output in this PR thread.", + "", + "Maintainers: apply `no-stale` to opt out for accepted-but-blocked work.", + `Head SHA: \`${shortSha}\``, + ].join("\n"); + + const { data: comments } = await github.rest.issues.listComments({ + owner, + repo, + issue_number: pr.number, + per_page: 100, + }); + + const existing = comments.find( + (comment) => comment.user?.type === "Bot" && comment.body?.includes(marker), + ); + + if (existing) { + if (existing.body === body) { + skipped += 1; + continue; + } + + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body, + }); + } + + nudged += 1; + core.info(`#${pr.number}: hygiene nudge posted/updated.`); + } + + core.info(`Done. Nudged=${nudged}, skipped=${skipped}`); +}; diff --git a/.github/workflows/scripts/pr_intake_checks.js b/.github/workflows/scripts/pr_intake_checks.js new file mode 100644 index 0000000..33d188f --- /dev/null +++ b/.github/workflows/scripts/pr_intake_checks.js @@ -0,0 +1,202 @@ +// Run safe intake checks for PR events and maintain a single sticky comment. +// Used by .github/workflows/pr-intake-checks.yml via actions/github-script. + +module.exports = async ({ github, context, core }) => { + const owner = context.repo.owner; + const repo = context.repo.repo; + const pr = context.payload.pull_request; + if (!pr) return; + + const marker = ""; + const legacyMarker = ""; + const requiredSections = [ + "## Summary", + "## Validation Evidence (required)", + "## Security Impact (required)", + "## Privacy and Data Hygiene (required)", + "## Rollback Plan (required)", + ]; + const body = pr.body || ""; + const linearKeyRegex = /\b(?:RMN|CDV|COM)-\d+\b/g; + const linearKeys = Array.from( + new Set([...(pr.title.match(linearKeyRegex) || []), ...(body.match(linearKeyRegex) || [])]), + ); + + const missingSections = requiredSections.filter((section) => !body.includes(section)); + const missingFields = []; + const requiredFieldChecks = [ + ["summary problem", /- Problem:\s*\S+/m], + ["summary why it matters", /- Why it matters:\s*\S+/m], + ["summary what changed", /- What changed:\s*\S+/m], + ["validation commands", /Commands and result summary:\s*[\s\S]*```/m], + ["security risk/mitigation", /- New permissions\/capabilities\?\s*\(`Yes\/No`\):\s*\S+/m], + ["privacy status", /- Data-hygiene status\s*\(`pass\|needs-follow-up`\):\s*\S+/m], + ["rollback plan", /- Fast rollback command\/path:\s*\S+/m], + ]; + for (const [name, pattern] of requiredFieldChecks) { + if (!pattern.test(body)) { + missingFields.push(name); + } + } + + const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: pr.number, + per_page: 100, + }); + + const formatWarnings = []; + const dangerousProblems = []; + for (const file of files) { + const patch = file.patch || ""; + if (!patch) continue; + const lines = patch.split("\n"); + for (let idx = 0; idx < lines.length; idx += 1) { + const line = lines[idx]; + if (!line.startsWith("+") || line.startsWith("+++")) continue; + const added = line.slice(1); + const lineNo = idx + 1; + if (/\t/.test(added)) { + formatWarnings.push(`${file.filename}:patch#${lineNo} contains tab characters`); + } + if (/[ \t]+$/.test(added)) { + formatWarnings.push(`${file.filename}:patch#${lineNo} contains trailing whitespace`); + } + if (/^(<<<<<<<|=======|>>>>>>>)/.test(added)) { + dangerousProblems.push(`${file.filename}:patch#${lineNo} contains merge conflict markers`); + } + } + } + + const workflowFilesChanged = files + .map((file) => file.filename) + .filter((name) => name.startsWith(".github/workflows/")); + + const advisoryFindings = []; + const blockingFindings = []; + if (missingSections.length > 0) { + advisoryFindings.push(`Missing required PR template sections: ${missingSections.join(", ")}`); + } + if (missingFields.length > 0) { + advisoryFindings.push(`Incomplete required PR template fields: ${missingFields.join(", ")}`); + } + if (formatWarnings.length > 0) { + advisoryFindings.push(`Formatting issues in added lines (${formatWarnings.length})`); + } + if (dangerousProblems.length > 0) { + blockingFindings.push(`Dangerous patch markers found (${dangerousProblems.length})`); + } + if (linearKeys.length === 0) { + advisoryFindings.push( + "Missing Linear issue key reference (`RMN-`, `CDV-`, or `COM-`) in PR title/body (recommended for traceability, non-blocking).", + ); + } + + const comments = await github.paginate(github.rest.issues.listComments, { + owner, + repo, + issue_number: pr.number, + per_page: 100, + }); + const existing = comments.find((comment) => { + const body = comment.body || ""; + return body.includes(marker) || body.includes(legacyMarker); + }); + + if (advisoryFindings.length === 0 && blockingFindings.length === 0) { + if (existing) { + await github.rest.issues.deleteComment({ + owner, + repo, + comment_id: existing.id, + }); + } + core.info("PR intake sanity checks passed."); + return; + } + + const runUrl = `${context.serverUrl}/${owner}/${repo}/actions/runs/${context.runId}`; + const advisoryDetails = []; + if (formatWarnings.length > 0) { + advisoryDetails.push(...formatWarnings.slice(0, 20).map((entry) => `- ${entry}`)); + if (formatWarnings.length > 20) { + advisoryDetails.push(`- ...and ${formatWarnings.length - 20} more issue(s)`); + } + } + const blockingDetails = []; + if (dangerousProblems.length > 0) { + blockingDetails.push(...dangerousProblems.slice(0, 20).map((entry) => `- ${entry}`)); + if (dangerousProblems.length > 20) { + blockingDetails.push(`- ...and ${dangerousProblems.length - 20} more issue(s)`); + } + } + + const isBlocking = blockingFindings.length > 0; + + const ownerApprovalNote = workflowFilesChanged.length > 0 + ? [ + "", + "Workflow files changed in this PR:", + ...workflowFilesChanged.map((name) => `- \`${name}\``), + "", + "Reminder: workflow changes require owner approval via `CI Required Gate`.", + ].join("\n") + : ""; + + const commentBody = [ + marker, + isBlocking + ? "### PR intake checks failed (blocking)" + : "### PR intake checks found warnings (non-blocking)", + "", + isBlocking + ? "Fast safe checks found blocking safety issues:" + : "Fast safe checks found advisory issues. CI lint/test/build gates still enforce merge quality.", + ...(blockingFindings.length > 0 ? blockingFindings.map((entry) => `- ${entry}`) : []), + ...(advisoryFindings.length > 0 ? advisoryFindings.map((entry) => `- ${entry}`) : []), + "", + "Action items:", + "1. Complete required PR template sections/fields.", + "2. (Recommended) Link this PR to one active Linear issue key (`RMN-xxx`/`CDV-xxx`/`COM-xxx`) for traceability.", + "3. Remove tabs, trailing whitespace, and merge conflict markers from added lines.", + "4. Re-run local checks before pushing:", + " - `./scripts/ci/rust_quality_gate.sh`", + " - `./scripts/ci/rust_strict_delta_gate.sh`", + " - `./scripts/ci/docs_quality_gate.sh`", + "", + `Detected Linear keys: ${linearKeys.length > 0 ? linearKeys.join(", ") : "none"}`, + "", + `Run logs: ${runUrl}`, + "", + "Detected blocking line issues (sample):", + ...(blockingDetails.length > 0 ? blockingDetails : ["- none"]), + "", + "Detected advisory line issues (sample):", + ...(advisoryDetails.length > 0 ? advisoryDetails : ["- none"]), + ownerApprovalNote, + ].join("\n"); + + if (existing) { + await github.rest.issues.updateComment({ + owner, + repo, + comment_id: existing.id, + body: commentBody, + }); + } else { + await github.rest.issues.createComment({ + owner, + repo, + issue_number: pr.number, + body: commentBody, + }); + } + + if (isBlocking) { + core.setFailed("PR intake sanity checks found blocking issues. See sticky comment for details."); + return; + } + + core.info("PR intake sanity checks found advisory issues only."); +}; diff --git a/.github/workflows/scripts/pr_labeler.js b/.github/workflows/scripts/pr_labeler.js new file mode 100644 index 0000000..7232606 --- /dev/null +++ b/.github/workflows/scripts/pr_labeler.js @@ -0,0 +1,805 @@ +// Apply managed PR labels (size/risk/path/module/contributor tiers). +// Extracted from pr-labeler workflow inline github-script for maintainability. + +module.exports = async ({ github, context, core }) => { +const pr = context.payload.pull_request; +const owner = context.repo.owner; +const repo = context.repo.repo; +const action = context.payload.action; +const changedLabel = context.payload.label?.name; + +const sizeLabels = ["size: XS", "size: S", "size: M", "size: L", "size: XL"]; +const computedRiskLabels = ["risk: low", "risk: medium", "risk: high"]; +const manualRiskOverrideLabel = "risk: manual"; +const managedEnforcedLabels = new Set([ + ...sizeLabels, + manualRiskOverrideLabel, + ...computedRiskLabels, +]); +if ((action === "labeled" || action === "unlabeled") && !managedEnforcedLabels.has(changedLabel)) { + core.info(`skip non-size/risk label event: ${changedLabel || "unknown"}`); + return; +} + +async function loadContributorTierPolicy() { + const policyPath = process.env.LABEL_POLICY_PATH || ".github/label-policy.json"; + const fallback = { + contributorTierColor: "2ED9FF", + contributorTierRules: [ + { label: "distinguished contributor", minMergedPRs: 50 }, + { label: "principal contributor", minMergedPRs: 20 }, + { label: "experienced contributor", minMergedPRs: 10 }, + { label: "trusted contributor", minMergedPRs: 5 }, + ], + }; + try { + const { data } = await github.rest.repos.getContent({ + owner, + repo, + path: policyPath, + ref: context.payload.repository?.default_branch || "main", + }); + const json = JSON.parse(Buffer.from(data.content, "base64").toString("utf8")); + const contributorTierRules = (json.contributor_tiers || []).map((entry) => ({ + label: String(entry.label || "").trim(), + minMergedPRs: Number(entry.min_merged_prs || 0), + })); + const contributorTierColor = String(json.contributor_tier_color || "").toUpperCase(); + if (!contributorTierColor || contributorTierRules.length === 0) { + return fallback; + } + return { contributorTierColor, contributorTierRules }; + } catch (error) { + core.warning(`failed to load ${policyPath}, using fallback policy: ${error.message}`); + return fallback; + } +} + +const { contributorTierColor, contributorTierRules } = await loadContributorTierPolicy(); +const contributorTierLabels = contributorTierRules.map((rule) => rule.label); + +const managedPathLabels = [ + "docs", + "dependencies", + "ci", + "core", + "agent", + "channel", + "config", + "cron", + "daemon", + "doctor", + "gateway", + "health", + "heartbeat", + "integration", + "memory", + "observability", + "onboard", + "provider", + "runtime", + "security", + "service", + "skillforge", + "skills", + "tool", + "tunnel", + "tests", + "scripts", + "dev", +]; +const managedPathLabelSet = new Set(managedPathLabels); + +const moduleNamespaceRules = [ + { root: "src/agent/", prefix: "agent", coreEntries: new Set(["mod.rs"]) }, + { root: "src/channels/", prefix: "channel", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/config/", prefix: "config", coreEntries: new Set(["mod.rs", "schema.rs"]) }, + { root: "src/cron/", prefix: "cron", coreEntries: new Set(["mod.rs"]) }, + { root: "src/daemon/", prefix: "daemon", coreEntries: new Set(["mod.rs"]) }, + { root: "src/doctor/", prefix: "doctor", coreEntries: new Set(["mod.rs"]) }, + { root: "src/gateway/", prefix: "gateway", coreEntries: new Set(["mod.rs"]) }, + { root: "src/health/", prefix: "health", coreEntries: new Set(["mod.rs"]) }, + { root: "src/heartbeat/", prefix: "heartbeat", coreEntries: new Set(["mod.rs"]) }, + { root: "src/integrations/", prefix: "integration", coreEntries: new Set(["mod.rs", "registry.rs"]) }, + { root: "src/memory/", prefix: "memory", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/observability/", prefix: "observability", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/onboard/", prefix: "onboard", coreEntries: new Set(["mod.rs"]) }, + { root: "src/providers/", prefix: "provider", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/runtime/", prefix: "runtime", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/security/", prefix: "security", coreEntries: new Set(["mod.rs"]) }, + { root: "src/service/", prefix: "service", coreEntries: new Set(["mod.rs"]) }, + { root: "src/skillforge/", prefix: "skillforge", coreEntries: new Set(["mod.rs"]) }, + { root: "src/skills/", prefix: "skills", coreEntries: new Set(["mod.rs"]) }, + { root: "src/tools/", prefix: "tool", coreEntries: new Set(["mod.rs", "traits.rs"]) }, + { root: "src/tunnel/", prefix: "tunnel", coreEntries: new Set(["mod.rs"]) }, +]; +const managedModulePrefixes = [...new Set(moduleNamespaceRules.map((rule) => `${rule.prefix}:`))]; +const orderedOtherLabelStyles = [ + { label: "health", color: "8EC9B8" }, + { label: "tool", color: "7FC4B6" }, + { label: "agent", color: "86C4A2" }, + { label: "memory", color: "8FCB99" }, + { label: "channel", color: "7EB6F2" }, + { label: "service", color: "95C7B6" }, + { label: "integration", color: "8DC9AE" }, + { label: "tunnel", color: "9FC8B3" }, + { label: "config", color: "AABCD0" }, + { label: "observability", color: "84C9D0" }, + { label: "docs", color: "8FBBE0" }, + { label: "dev", color: "B9C1CC" }, + { label: "tests", color: "9DC8C7" }, + { label: "skills", color: "BFC89B" }, + { label: "skillforge", color: "C9C39B" }, + { label: "provider", color: "958DF0" }, + { label: "runtime", color: "A3ADD8" }, + { label: "heartbeat", color: "C0C88D" }, + { label: "daemon", color: "C8C498" }, + { label: "doctor", color: "C1CF9D" }, + { label: "onboard", color: "D2BF86" }, + { label: "cron", color: "D2B490" }, + { label: "ci", color: "AEB4CE" }, + { label: "dependencies", color: "9FB1DE" }, + { label: "gateway", color: "B5A8E5" }, + { label: "security", color: "E58D85" }, + { label: "core", color: "C8A99B" }, + { label: "scripts", color: "C9B49F" }, +]; +const otherLabelDisplayOrder = orderedOtherLabelStyles.map((entry) => entry.label); +const modulePrefixSet = new Set(moduleNamespaceRules.map((rule) => rule.prefix)); +const modulePrefixPriority = otherLabelDisplayOrder.filter((label) => modulePrefixSet.has(label)); +const pathLabelPriority = [...otherLabelDisplayOrder]; +const riskDisplayOrder = ["risk: high", "risk: medium", "risk: low", "risk: manual"]; +const sizeDisplayOrder = ["size: XS", "size: S", "size: M", "size: L", "size: XL"]; +const contributorDisplayOrder = [ + "distinguished contributor", + "principal contributor", + "experienced contributor", + "trusted contributor", +]; +const modulePrefixPriorityIndex = new Map( + modulePrefixPriority.map((prefix, index) => [prefix, index]) +); +const pathLabelPriorityIndex = new Map( + pathLabelPriority.map((label, index) => [label, index]) +); +const riskPriorityIndex = new Map( + riskDisplayOrder.map((label, index) => [label, index]) +); +const sizePriorityIndex = new Map( + sizeDisplayOrder.map((label, index) => [label, index]) +); +const contributorPriorityIndex = new Map( + contributorDisplayOrder.map((label, index) => [label, index]) +); + +const otherLabelColors = Object.fromEntries( + orderedOtherLabelStyles.map((entry) => [entry.label, entry.color]) +); +const staticLabelColors = { + "size: XS": "E7CDD3", + "size: S": "E1BEC7", + "size: M": "DBB0BB", + "size: L": "D4A2AF", + "size: XL": "CE94A4", + "risk: low": "97D3A6", + "risk: medium": "E4C47B", + "risk: high": "E98E88", + "risk: manual": "B7A4E0", + ...otherLabelColors, +}; +const staticLabelDescriptions = { + "size: XS": "Auto size: <=80 non-doc changed lines.", + "size: S": "Auto size: 81-250 non-doc changed lines.", + "size: M": "Auto size: 251-500 non-doc changed lines.", + "size: L": "Auto size: 501-1000 non-doc changed lines.", + "size: XL": "Auto size: >1000 non-doc changed lines.", + "risk: low": "Auto risk: docs/chore-only paths.", + "risk: medium": "Auto risk: src/** or dependency/config changes.", + "risk: high": "Auto risk: security/runtime/gateway/tools/workflows.", + "risk: manual": "Maintainer override: keep selected risk label.", + docs: "Auto scope: docs/markdown/template files changed.", + dependencies: "Auto scope: dependency manifest/lock/policy changed.", + ci: "Auto scope: CI/workflow/hook files changed.", + core: "Auto scope: root src/*.rs files changed.", + agent: "Auto scope: src/agent/** changed.", + channel: "Auto scope: src/channels/** changed.", + config: "Auto scope: src/config/** changed.", + cron: "Auto scope: src/cron/** changed.", + daemon: "Auto scope: src/daemon/** changed.", + doctor: "Auto scope: src/doctor/** changed.", + gateway: "Auto scope: src/gateway/** changed.", + health: "Auto scope: src/health/** changed.", + heartbeat: "Auto scope: src/heartbeat/** changed.", + integration: "Auto scope: src/integrations/** changed.", + memory: "Auto scope: src/memory/** changed.", + observability: "Auto scope: src/observability/** changed.", + onboard: "Auto scope: src/onboard/** changed.", + provider: "Auto scope: src/providers/** changed.", + runtime: "Auto scope: src/runtime/** changed.", + security: "Auto scope: src/security/** changed.", + service: "Auto scope: src/service/** changed.", + skillforge: "Auto scope: src/skillforge/** changed.", + skills: "Auto scope: src/skills/** changed.", + tool: "Auto scope: src/tools/** changed.", + tunnel: "Auto scope: src/tunnel/** changed.", + tests: "Auto scope: tests/** changed.", + scripts: "Auto scope: scripts/** changed.", + dev: "Auto scope: dev/** changed.", +}; +for (const label of contributorTierLabels) { + staticLabelColors[label] = contributorTierColor; + const rule = contributorTierRules.find((entry) => entry.label === label); + if (rule) { + staticLabelDescriptions[label] = `Contributor with ${rule.minMergedPRs}+ merged PRs.`; + } +} + +const modulePrefixColors = Object.fromEntries( + modulePrefixPriority.map((prefix) => [ + `${prefix}:`, + otherLabelColors[prefix] || "BFDADC", + ]) +); + +const providerKeywordHints = [ + "deepseek", + "moonshot", + "kimi", + "qwen", + "mistral", + "doubao", + "baichuan", + "yi", + "siliconflow", + "vertex", + "azure", + "perplexity", + "venice", + "vercel", + "cloudflare", + "synthetic", + "opencode", + "zai", + "glm", + "minimax", + "bedrock", + "qianfan", + "groq", + "together", + "fireworks", + "novita", + "cohere", + "openai", + "openrouter", + "anthropic", + "gemini", + "ollama", +]; + +const channelKeywordHints = [ + "telegram", + "discord", + "slack", + "whatsapp", + "matrix", + "irc", + "imessage", + "email", + "cli", +]; + +function isDocsLike(path) { + return ( + path.startsWith("docs/") || + path.endsWith(".md") || + path.endsWith(".mdx") || + path === "LICENSE" || + path === ".markdownlint-cli2.yaml" || + path === ".github/pull_request_template.md" || + path.startsWith(".github/ISSUE_TEMPLATE/") + ); +} + +function normalizeLabelSegment(segment) { + return (segment || "") + .toLowerCase() + .replace(/\.rs$/g, "") + .replace(/[^a-z0-9_-]+/g, "-") + .replace(/^[-_]+|[-_]+$/g, "") + .slice(0, 40); +} + +function containsKeyword(text, keyword) { + const escaped = keyword.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const pattern = new RegExp(`(^|[^a-z0-9_])${escaped}([^a-z0-9_]|$)`, "i"); + return pattern.test(text); +} + +function formatModuleLabel(prefix, segment) { + return `${prefix}: ${segment}`; +} + +function parseModuleLabel(label) { + if (typeof label !== "string") return null; + const match = label.match(/^([^:]+):\s*(.+)$/); + if (!match) return null; + const prefix = match[1].trim().toLowerCase(); + const segment = (match[2] || "").trim().toLowerCase(); + if (!prefix || !segment) return null; + return { prefix, segment }; +} + +function sortByPriority(labels, priorityIndex) { + return [...new Set(labels)].sort((left, right) => { + const leftPriority = priorityIndex.has(left) ? priorityIndex.get(left) : Number.MAX_SAFE_INTEGER; + const rightPriority = priorityIndex.has(right) + ? priorityIndex.get(right) + : Number.MAX_SAFE_INTEGER; + if (leftPriority !== rightPriority) return leftPriority - rightPriority; + return left.localeCompare(right); + }); +} + +function sortModuleLabels(labels) { + return [...new Set(labels)].sort((left, right) => { + const leftParsed = parseModuleLabel(left); + const rightParsed = parseModuleLabel(right); + if (!leftParsed || !rightParsed) return left.localeCompare(right); + + const leftPrefixPriority = modulePrefixPriorityIndex.has(leftParsed.prefix) + ? modulePrefixPriorityIndex.get(leftParsed.prefix) + : Number.MAX_SAFE_INTEGER; + const rightPrefixPriority = modulePrefixPriorityIndex.has(rightParsed.prefix) + ? modulePrefixPriorityIndex.get(rightParsed.prefix) + : Number.MAX_SAFE_INTEGER; + + if (leftPrefixPriority !== rightPrefixPriority) { + return leftPrefixPriority - rightPrefixPriority; + } + if (leftParsed.prefix !== rightParsed.prefix) { + return leftParsed.prefix.localeCompare(rightParsed.prefix); + } + + const leftIsCore = leftParsed.segment === "core"; + const rightIsCore = rightParsed.segment === "core"; + if (leftIsCore !== rightIsCore) return leftIsCore ? 1 : -1; + + return leftParsed.segment.localeCompare(rightParsed.segment); + }); +} + +function refineModuleLabels(rawLabels) { + const refined = new Set(rawLabels); + const segmentsByPrefix = new Map(); + + for (const label of rawLabels) { + const parsed = parseModuleLabel(label); + if (!parsed) continue; + if (!segmentsByPrefix.has(parsed.prefix)) { + segmentsByPrefix.set(parsed.prefix, new Set()); + } + segmentsByPrefix.get(parsed.prefix).add(parsed.segment); + } + + for (const [prefix, segments] of segmentsByPrefix) { + const hasSpecificSegment = [...segments].some((segment) => segment !== "core"); + if (hasSpecificSegment) { + refined.delete(formatModuleLabel(prefix, "core")); + } + } + + return refined; +} + +function compactModuleLabels(labels) { + const groupedSegments = new Map(); + const compactedModuleLabels = new Set(); + const forcePathPrefixes = new Set(); + + for (const label of labels) { + const parsed = parseModuleLabel(label); + if (!parsed) { + compactedModuleLabels.add(label); + continue; + } + if (!groupedSegments.has(parsed.prefix)) { + groupedSegments.set(parsed.prefix, new Set()); + } + groupedSegments.get(parsed.prefix).add(parsed.segment); + } + + for (const [prefix, segments] of groupedSegments) { + const uniqueSegments = [...new Set([...segments].filter(Boolean))]; + if (uniqueSegments.length === 0) continue; + + if (uniqueSegments.length === 1) { + compactedModuleLabels.add(formatModuleLabel(prefix, uniqueSegments[0])); + } else { + forcePathPrefixes.add(prefix); + } + } + + return { + moduleLabels: compactedModuleLabels, + forcePathPrefixes, + }; +} + +function colorForLabel(label) { + if (staticLabelColors[label]) return staticLabelColors[label]; + const matchedPrefix = Object.keys(modulePrefixColors).find((prefix) => label.startsWith(prefix)); + if (matchedPrefix) return modulePrefixColors[matchedPrefix]; + return "BFDADC"; +} + +function descriptionForLabel(label) { + if (staticLabelDescriptions[label]) return staticLabelDescriptions[label]; + + const parsed = parseModuleLabel(label); + if (parsed) { + if (parsed.segment === "core") { + return `Auto module: ${parsed.prefix} core files changed.`; + } + return `Auto module: ${parsed.prefix}/${parsed.segment} changed.`; + } + + return "Auto-managed label."; +} + +async function ensureLabel(name, existing = null) { + const expectedColor = colorForLabel(name); + const expectedDescription = descriptionForLabel(name); + try { + const current = existing || (await github.rest.issues.getLabel({ owner, repo, name })).data; + const currentColor = (current.color || "").toUpperCase(); + const currentDescription = (current.description || "").trim(); + if (currentColor !== expectedColor || currentDescription !== expectedDescription) { + await github.rest.issues.updateLabel({ + owner, + repo, + name, + new_name: name, + color: expectedColor, + description: expectedDescription, + }); + } + } catch (error) { + if (error.status !== 404) throw error; + await github.rest.issues.createLabel({ + owner, + repo, + name, + color: expectedColor, + description: expectedDescription, + }); + } +} + +function isManagedLabel(label) { + if (label === manualRiskOverrideLabel) return true; + if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return true; + if (managedPathLabelSet.has(label)) return true; + if (contributorTierLabels.includes(label)) return true; + if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return true; + return false; +} + +async function ensureManagedRepoLabelsMetadata() { + const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, { + owner, + repo, + per_page: 100, + }); + + for (const existingLabel of repoLabels) { + const labelName = existingLabel.name || ""; + if (!isManagedLabel(labelName)) continue; + await ensureLabel(labelName, existingLabel); + } +} + +function selectContributorTier(mergedCount) { + const matchedTier = contributorTierRules.find((rule) => mergedCount >= rule.minMergedPRs); + return matchedTier ? matchedTier.label : null; +} + +if (context.eventName === "workflow_dispatch") { + const mode = (context.payload.inputs?.mode || "audit").toLowerCase(); + const shouldRepair = mode === "repair"; + const repoLabels = await github.paginate(github.rest.issues.listLabelsForRepo, { + owner, + repo, + per_page: 100, + }); + + let managedScanned = 0; + const drifts = []; + + for (const existingLabel of repoLabels) { + const labelName = existingLabel.name || ""; + if (!isManagedLabel(labelName)) continue; + managedScanned += 1; + + const expectedColor = colorForLabel(labelName); + const expectedDescription = descriptionForLabel(labelName); + const currentColor = (existingLabel.color || "").toUpperCase(); + const currentDescription = (existingLabel.description || "").trim(); + if (currentColor !== expectedColor || currentDescription !== expectedDescription) { + drifts.push({ + name: labelName, + currentColor, + expectedColor, + currentDescription, + expectedDescription, + }); + if (shouldRepair) { + await ensureLabel(labelName, existingLabel); + } + } + } + + core.summary + .addHeading("Managed Label Governance", 2) + .addRaw(`Mode: ${shouldRepair ? "repair" : "audit"}`) + .addEOL() + .addRaw(`Managed labels scanned: ${managedScanned}`) + .addEOL() + .addRaw(`Drifts found: ${drifts.length}`) + .addEOL(); + + if (drifts.length > 0) { + const sample = drifts.slice(0, 30).map((entry) => [ + entry.name, + `${entry.currentColor} -> ${entry.expectedColor}`, + `${entry.currentDescription || "(blank)"} -> ${entry.expectedDescription}`, + ]); + core.summary.addTable([ + [{ data: "Label", header: true }, { data: "Color", header: true }, { data: "Description", header: true }], + ...sample, + ]); + if (drifts.length > sample.length) { + core.summary + .addRaw(`Additional drifts not shown: ${drifts.length - sample.length}`) + .addEOL(); + } + } + + await core.summary.write(); + + if (!shouldRepair && drifts.length > 0) { + core.info(`Managed-label metadata drifts detected: ${drifts.length}. Re-run with mode=repair to auto-fix.`); + } else if (shouldRepair) { + core.info(`Managed-label metadata repair applied to ${drifts.length} labels.`); + } else { + core.info("No managed-label metadata drift detected."); + } + + return; +} + +const files = await github.paginate(github.rest.pulls.listFiles, { + owner, + repo, + pull_number: pr.number, + per_page: 100, +}); + +const detectedModuleLabels = new Set(); +for (const file of files) { + const path = (file.filename || "").toLowerCase(); + for (const rule of moduleNamespaceRules) { + if (!path.startsWith(rule.root)) continue; + + const relative = path.slice(rule.root.length); + if (!relative) continue; + + const first = relative.split("/")[0]; + const firstStem = first.endsWith(".rs") ? first.slice(0, -3) : first; + let segment = firstStem; + + if (rule.coreEntries.has(first) || rule.coreEntries.has(firstStem)) { + segment = "core"; + } + + segment = normalizeLabelSegment(segment); + if (!segment) continue; + + detectedModuleLabels.add(formatModuleLabel(rule.prefix, segment)); + } +} + +const providerRelevantFiles = files.filter((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/providers/") || + path.startsWith("src/integrations/") || + path.startsWith("src/onboard/") || + path.startsWith("src/config/") + ); +}); + +if (providerRelevantFiles.length > 0) { + const searchableText = [ + pr.title || "", + pr.body || "", + ...providerRelevantFiles.map((file) => file.filename || ""), + ...providerRelevantFiles.map((file) => file.patch || ""), + ] + .join("\n") + .toLowerCase(); + + for (const keyword of providerKeywordHints) { + if (containsKeyword(searchableText, keyword)) { + detectedModuleLabels.add(formatModuleLabel("provider", keyword)); + } + } +} + +const channelRelevantFiles = files.filter((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/channels/") || + path.startsWith("src/onboard/") || + path.startsWith("src/config/") + ); +}); + +if (channelRelevantFiles.length > 0) { + const searchableText = [ + pr.title || "", + pr.body || "", + ...channelRelevantFiles.map((file) => file.filename || ""), + ...channelRelevantFiles.map((file) => file.patch || ""), + ] + .join("\n") + .toLowerCase(); + + for (const keyword of channelKeywordHints) { + if (containsKeyword(searchableText, keyword)) { + detectedModuleLabels.add(formatModuleLabel("channel", keyword)); + } + } +} + +const refinedModuleLabels = refineModuleLabels(detectedModuleLabels); +const compactedModuleState = compactModuleLabels(refinedModuleLabels); +const selectedModuleLabels = compactedModuleState.moduleLabels; +const forcePathPrefixes = compactedModuleState.forcePathPrefixes; +const modulePrefixesWithLabels = new Set( + [...selectedModuleLabels] + .map((label) => parseModuleLabel(label)?.prefix) + .filter(Boolean) +); + +const { data: currentLabels } = await github.rest.issues.listLabelsOnIssue({ + owner, + repo, + issue_number: pr.number, +}); +const currentLabelNames = currentLabels.map((label) => label.name); +const currentPathLabels = currentLabelNames.filter((label) => managedPathLabelSet.has(label)); +const candidatePathLabels = new Set([...currentPathLabels, ...forcePathPrefixes]); + +const dedupedPathLabels = [...candidatePathLabels].filter((label) => { + if (label === "core") return true; + if (forcePathPrefixes.has(label)) return true; + return !modulePrefixesWithLabels.has(label); +}); + +const excludedLockfiles = new Set(["Cargo.lock"]); +const changedLines = files.reduce((total, file) => { + const path = file.filename || ""; + if (isDocsLike(path) || excludedLockfiles.has(path)) { + return total; + } + return total + (file.additions || 0) + (file.deletions || 0); +}, 0); + +let sizeLabel = "size: XL"; +if (changedLines <= 80) sizeLabel = "size: XS"; +else if (changedLines <= 250) sizeLabel = "size: S"; +else if (changedLines <= 500) sizeLabel = "size: M"; +else if (changedLines <= 1000) sizeLabel = "size: L"; + +const hasHighRiskPath = files.some((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/security/") || + path.startsWith("src/runtime/") || + path.startsWith("src/gateway/") || + path.startsWith("src/tools/") || + path.startsWith(".github/workflows/") + ); +}); + +const hasMediumRiskPath = files.some((file) => { + const path = file.filename || ""; + return ( + path.startsWith("src/") || + path === "Cargo.toml" || + path === "Cargo.lock" || + path === "deny.toml" || + path.startsWith(".githooks/") + ); +}); + +let riskLabel = "risk: low"; +if (hasHighRiskPath) { + riskLabel = "risk: high"; +} else if (hasMediumRiskPath) { + riskLabel = "risk: medium"; +} + +await ensureManagedRepoLabelsMetadata(); + +const labelsToEnsure = new Set([ + ...sizeLabels, + ...computedRiskLabels, + manualRiskOverrideLabel, + ...managedPathLabels, + ...contributorTierLabels, + ...selectedModuleLabels, +]); + +for (const label of labelsToEnsure) { + await ensureLabel(label); +} + +let contributorTierLabel = null; +const authorLogin = pr.user?.login; +if (authorLogin && pr.user?.type !== "Bot") { + try { + const { data: mergedSearch } = await github.rest.search.issuesAndPullRequests({ + q: `repo:${owner}/${repo} is:pr is:merged author:${authorLogin}`, + per_page: 1, + }); + const mergedCount = mergedSearch.total_count || 0; + contributorTierLabel = selectContributorTier(mergedCount); + } catch (error) { + core.warning(`failed to compute contributor tier label: ${error.message}`); + } +} + +const hasManualRiskOverride = currentLabelNames.includes(manualRiskOverrideLabel); +const keepNonManagedLabels = currentLabelNames.filter((label) => { + if (label === manualRiskOverrideLabel) return true; + if (contributorTierLabels.includes(label)) return false; + if (sizeLabels.includes(label) || computedRiskLabels.includes(label)) return false; + if (managedPathLabelSet.has(label)) return false; + if (managedModulePrefixes.some((prefix) => label.startsWith(prefix))) return false; + return true; +}); + +const manualRiskSelection = + currentLabelNames.find((label) => computedRiskLabels.includes(label)) || riskLabel; + +const moduleLabelList = sortModuleLabels([...selectedModuleLabels]); +const contributorLabelList = contributorTierLabel ? [contributorTierLabel] : []; +const selectedRiskLabels = hasManualRiskOverride + ? sortByPriority([manualRiskSelection, manualRiskOverrideLabel], riskPriorityIndex) + : sortByPriority([riskLabel], riskPriorityIndex); +const selectedSizeLabels = sortByPriority([sizeLabel], sizePriorityIndex); +const sortedContributorLabels = sortByPriority(contributorLabelList, contributorPriorityIndex); +const sortedPathLabels = sortByPriority(dedupedPathLabels, pathLabelPriorityIndex); +const sortedKeepNonManagedLabels = [...new Set(keepNonManagedLabels)].sort((left, right) => + left.localeCompare(right) +); + +const nextLabels = [ + ...new Set([ + ...selectedRiskLabels, + ...selectedSizeLabels, + ...sortedContributorLabels, + ...moduleLabelList, + ...sortedPathLabels, + ...sortedKeepNonManagedLabels, + ]), +]; + +await github.rest.issues.setLabels({ + owner, + repo, + issue_number: pr.number, + labels: nextLabels, +}); +}; diff --git a/.github/workflows/scripts/test_benchmarks_pr_comment.js b/.github/workflows/scripts/test_benchmarks_pr_comment.js new file mode 100644 index 0000000..d517141 --- /dev/null +++ b/.github/workflows/scripts/test_benchmarks_pr_comment.js @@ -0,0 +1,57 @@ +// Extracted from test-benchmarks.yml step: Post benchmark summary on PR + +module.exports = async ({ github, context, core }) => { + const fs = require('fs'); + const output = fs.readFileSync('benchmark_output.txt', 'utf8'); + + // Extract Criterion result lines + const lines = output.split('\n').filter(l => + l.includes('time:') || l.includes('change:') || l.includes('Performance') + ); + + if (lines.length === 0) { + core.info('No benchmark results to post.'); + return; + } + + const body = [ + '## 📊 Benchmark Results', + '', + '```', + lines.join('\n'), + '```', + '', + '
Full output', + '', + '```', + output.substring(0, 60000), + '```', + '
', + ].join('\n'); + + // Find and update or create comment + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + }); + + const marker = '## 📊 Benchmark Results'; + const existing = comments.find(c => c.body && c.body.startsWith(marker)); + + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.pull_request.number, + body, + }); + } +}; diff --git a/.github/workflows/sec-audit.yml b/.github/workflows/sec-audit.yml new file mode 100644 index 0000000..9c1b031 --- /dev/null +++ b/.github/workflows/sec-audit.yml @@ -0,0 +1,597 @@ +name: Sec Audit + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "deny.toml" + - ".gitleaks.toml" + - ".github/security/gitleaks-allowlist-governance.json" + - ".github/security/deny-ignore-governance.json" + - ".github/security/unsafe-audit-governance.json" + - "scripts/ci/install_gitleaks.sh" + - "scripts/ci/install_syft.sh" + - "scripts/ci/deny_policy_guard.py" + - "scripts/ci/secrets_governance_guard.py" + - "scripts/ci/unsafe_debt_audit.py" + - "scripts/ci/unsafe_policy_guard.py" + - "scripts/ci/config/unsafe_debt_policy.toml" + - "scripts/ci/emit_audit_event.py" + - "scripts/ci/security_regression_tests.sh" + - ".github/workflows/sec-audit.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - "deny.toml" + - ".gitleaks.toml" + - ".github/security/gitleaks-allowlist-governance.json" + - ".github/security/deny-ignore-governance.json" + - ".github/security/unsafe-audit-governance.json" + - "scripts/ci/install_gitleaks.sh" + - "scripts/ci/install_syft.sh" + - "scripts/ci/deny_policy_guard.py" + - "scripts/ci/secrets_governance_guard.py" + - "scripts/ci/unsafe_debt_audit.py" + - "scripts/ci/unsafe_policy_guard.py" + - "scripts/ci/config/unsafe_debt_policy.toml" + - "scripts/ci/emit_audit_event.py" + - "scripts/ci/security_regression_tests.sh" + - ".github/workflows/sec-audit.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "0 6 * * 1" # Weekly on Monday 6am UTC + workflow_dispatch: + inputs: + full_secret_scan: + description: "Scan full git history for secrets" + required: true + default: false + type: boolean + fail_on_secret_leak: + description: "Fail workflow if secret leaks are detected" + required: true + default: true + type: boolean + fail_on_governance_violation: + description: "Fail workflow if secrets governance policy violations are detected" + required: true + default: true + type: boolean + +concurrency: + group: security-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: + contents: read + security-events: write + actions: read + checks: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + audit: + name: Security Audit + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - uses: rustsec/audit-check@69366f33c96575abad1ee0dba8212993eecbe998 # v2.0.0 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + deny: + name: License & Supply Chain + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Enforce deny policy hygiene + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/deny_policy_guard.py \ + --deny-file deny.toml \ + --governance-file .github/security/deny-ignore-governance.json \ + --output-json artifacts/deny-policy-guard.json \ + --output-md artifacts/deny-policy-guard.md \ + --fail-on-violation + + - uses: EmbarkStudios/cargo-deny-action@3fd3802e88374d3fe9159b834c7714ec57d6c979 # v2 + with: + command: check advisories licenses sources + + - name: Emit deny audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/deny-policy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type deny_policy_guard \ + --input-json artifacts/deny-policy-guard.json \ + --output-json artifacts/audit-event-deny-policy-guard.json \ + --artifact-name deny-policy-audit-event \ + --retention-days 14 + fi + + - name: Upload deny policy artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: deny-policy-guard + path: artifacts/deny-policy-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload deny policy audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: deny-policy-audit-event + path: artifacts/audit-event-deny-policy-guard.json + if-no-files-found: ignore + retention-days: 14 + + security-regressions: + name: Security Regression Tests + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + with: + prefix-key: sec-audit-security-regressions + - name: Run security regression suite + shell: bash + run: ./scripts/ci/security_regression_tests.sh + + secrets: + name: Secrets Governance (Gitleaks) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Enforce gitleaks allowlist governance + shell: bash + env: + FAIL_ON_GOVERNANCE_INPUT: ${{ github.event.inputs.fail_on_governance_violation || 'true' }} + run: | + set -euo pipefail + mkdir -p artifacts + fail_on_governance="true" + if [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + fail_on_governance="${FAIL_ON_GOVERNANCE_INPUT}" + fi + cmd=(python3 scripts/ci/secrets_governance_guard.py + --gitleaks-file .gitleaks.toml + --governance-file .github/security/gitleaks-allowlist-governance.json + --output-json artifacts/secrets-governance-guard.json + --output-md artifacts/secrets-governance-guard.md) + if [ "$fail_on_governance" = "true" ]; then + cmd+=(--fail-on-violation) + fi + "${cmd[@]}" + + - name: Publish secrets governance summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/secrets-governance-guard.md ]; then + cat artifacts/secrets-governance-guard.md >> "$GITHUB_STEP_SUMMARY" + else + echo "Secrets governance report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Emit secrets governance audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/secrets-governance-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type secrets_governance_guard \ + --input-json artifacts/secrets-governance-guard.json \ + --output-json artifacts/audit-event-secrets-governance-guard.json \ + --artifact-name secrets-governance-audit-event \ + --retention-days 14 + fi + + - name: Upload secrets governance artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: secrets-governance-guard + path: artifacts/secrets-governance-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload secrets governance audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: secrets-governance-audit-event + path: artifacts/audit-event-secrets-governance-guard.json + if-no-files-found: ignore + retention-days: 14 + + - name: Install gitleaks + shell: bash + run: | + set -euo pipefail + mkdir -p "${RUNNER_TEMP}/bin" + ./scripts/ci/install_gitleaks.sh "${RUNNER_TEMP}/bin" + echo "${RUNNER_TEMP}/bin" >> "$GITHUB_PATH" + + - name: Run gitleaks scan + shell: bash + env: + FULL_SECRET_SCAN_INPUT: ${{ github.event.inputs.full_secret_scan || 'false' }} + FAIL_ON_SECRET_LEAK_INPUT: ${{ github.event.inputs.fail_on_secret_leak || 'true' }} + run: | + set -euo pipefail + mkdir -p artifacts + log_opts="" + scan_scope="full-history" + fail_on_leak="true" + + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ]; then + log_opts="${{ github.event.pull_request.base.sha }}..${GITHUB_SHA}" + scan_scope="diff-range" + elif [ "${GITHUB_EVENT_NAME}" = "push" ]; then + base_sha="${{ github.event.before }}" + if [ -n "$base_sha" ] && [ "$base_sha" != "0000000000000000000000000000000000000000" ]; then + log_opts="${base_sha}..${GITHUB_SHA}" + scan_scope="diff-range" + fi + elif [ "${GITHUB_EVENT_NAME}" = "merge_group" ]; then + base_sha="${{ github.event.merge_group.base_sha }}" + if [ -n "$base_sha" ]; then + log_opts="${base_sha}..${GITHUB_SHA}" + scan_scope="diff-range" + fi + elif [ "${GITHUB_EVENT_NAME}" = "workflow_dispatch" ]; then + if [ "${FULL_SECRET_SCAN_INPUT}" != "true" ]; then + if [ -n "${{ github.sha }}" ]; then + log_opts="${{ github.sha }}~1..${{ github.sha }}" + scan_scope="latest-commit" + fi + fi + fail_on_leak="${FAIL_ON_SECRET_LEAK_INPUT}" + fi + + cmd=(gitleaks git + --config .gitleaks.toml + --redact + --report-format sarif + --report-path artifacts/gitleaks.sarif + --verbose) + if [ -n "$log_opts" ]; then + cmd+=(--log-opts="$log_opts") + fi + + set +e + "${cmd[@]}" + status=$? + set -e + + echo "### Gitleaks scan" >> "$GITHUB_STEP_SUMMARY" + echo "- Scope: ${scan_scope}" >> "$GITHUB_STEP_SUMMARY" + if [ -n "$log_opts" ]; then + echo "- Log range: \`${log_opts}\`" >> "$GITHUB_STEP_SUMMARY" + fi + echo "- Exit code: ${status}" >> "$GITHUB_STEP_SUMMARY" + + cat > artifacts/gitleaks-summary.json <> "$GITHUB_PATH" + + - name: Generate CycloneDX + SPDX SBOM + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + syft dir:. --source-name zeroclaw \ + -o cyclonedx-json=artifacts/zeroclaw.cdx.json \ + -o spdx-json=artifacts/zeroclaw.spdx.json + { + echo "### SBOM snapshot" + echo "- CycloneDX: artifacts/zeroclaw.cdx.json" + echo "- SPDX: artifacts/zeroclaw.spdx.json" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Upload SBOM artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: sbom-snapshot + path: artifacts/zeroclaw.*.json + retention-days: 14 + + - name: Emit SBOM audit event + if: always() + shell: bash + run: | + set -euo pipefail + cat > artifacts/sbom-summary.json <> "$GITHUB_STEP_SUMMARY" + else + echo "Unsafe policy governance report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Run unsafe debt audit + shell: bash + run: | + set -euo pipefail + mkdir -p artifacts + python3 scripts/ci/unsafe_debt_audit.py \ + --repo-root . \ + --policy-file scripts/ci/config/unsafe_debt_policy.toml \ + --output-json artifacts/unsafe-debt-audit.json \ + --fail-on-findings \ + --fail-on-excluded-crate-roots + + - name: Publish unsafe debt summary + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-debt-audit.json ]; then + python3 - <<'PY' >> "$GITHUB_STEP_SUMMARY" + import json + from pathlib import Path + + report = json.loads(Path("artifacts/unsafe-debt-audit.json").read_text(encoding="utf-8")) + summary = report.get("summary", {}) + source = report.get("source", {}) + by_pattern = summary.get("by_pattern", {}) + + print("### Unsafe debt audit") + print(f"- Total findings: `{summary.get('total_findings', 0)}`") + print(f"- Files scanned: `{source.get('files_scanned', 0)}`") + print(f"- Crate roots scanned: `{source.get('crate_roots_scanned', 0)}`") + print(f"- Crate roots excluded: `{source.get('crate_roots_excluded', 0)}`") + if by_pattern: + print("- Findings by pattern:") + for pattern_id, count in sorted(by_pattern.items()): + print(f" - `{pattern_id}`: `{count}`") + else: + print("- Findings by pattern: none") + PY + else + echo "Unsafe debt audit JSON report missing." >> "$GITHUB_STEP_SUMMARY" + fi + + - name: Emit unsafe policy governance audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-policy-guard.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type unsafe_policy_guard \ + --input-json artifacts/unsafe-policy-guard.json \ + --output-json artifacts/audit-event-unsafe-policy-guard.json \ + --artifact-name unsafe-policy-audit-event \ + --retention-days 14 + fi + + - name: Emit unsafe debt audit event + if: always() + shell: bash + run: | + set -euo pipefail + if [ -f artifacts/unsafe-debt-audit.json ]; then + python3 scripts/ci/emit_audit_event.py \ + --event-type unsafe_debt_audit \ + --input-json artifacts/unsafe-debt-audit.json \ + --output-json artifacts/audit-event-unsafe-debt-audit.json \ + --artifact-name unsafe-debt-audit-event \ + --retention-days 14 + fi + + - name: Upload unsafe policy guard artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-policy-guard + path: artifacts/unsafe-policy-guard.* + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe debt audit artifact + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-debt-audit + path: artifacts/unsafe-debt-audit.json + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe policy audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-policy-audit-event + path: artifacts/audit-event-unsafe-policy-guard.json + if-no-files-found: ignore + retention-days: 14 + + - name: Upload unsafe debt audit event + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: unsafe-debt-audit-event + path: artifacts/audit-event-unsafe-debt-audit.json + if-no-files-found: ignore + retention-days: 14 + + security-required: + name: Security Required Gate + if: always() && (github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'merge_group') + needs: [audit, deny, security-regressions, secrets, sbom, unsafe-debt] + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Enforce security gate + shell: bash + run: | + set -euo pipefail + results=( + "audit=${{ needs.audit.result }}" + "deny=${{ needs.deny.result }}" + "security-regressions=${{ needs.security-regressions.result }}" + "secrets=${{ needs.secrets.result }}" + "sbom=${{ needs.sbom.result }}" + "unsafe-debt=${{ needs['unsafe-debt'].result }}" + ) + for item in "${results[@]}"; do + echo "$item" + done + for item in "${results[@]}"; do + result="${item#*=}" + if [ "$result" != "success" ]; then + echo "Security gate failed: $item" + exit 1 + fi + done diff --git a/.github/workflows/sec-codeql.yml b/.github/workflows/sec-codeql.yml new file mode 100644 index 0000000..a2bc484 --- /dev/null +++ b/.github/workflows/sec-codeql.yml @@ -0,0 +1,72 @@ +name: Sec CodeQL + +on: + push: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - ".github/codeql/**" + - ".github/workflows/sec-codeql.yml" + pull_request: + branches: [dev, main] + paths: + - "Cargo.toml" + - "Cargo.lock" + - "src/**" + - "crates/**" + - ".github/codeql/**" + - ".github/workflows/sec-codeql.yml" + merge_group: + branches: [dev, main] + schedule: + - cron: "0 6 * * 1" # Weekly Monday 6am UTC + workflow_dispatch: + +concurrency: + group: codeql-${{ github.event.pull_request.number || github.ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + security-events: write + actions: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + codeql: + name: CodeQL Analysis + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + with: + fetch-depth: 0 + + - name: Initialize CodeQL + uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + languages: rust + config-file: ./.github/codeql/codeql-config.yml + queries: security-and-quality + + - name: Set up Rust + uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + + - name: Build + run: cargo build --workspace --all-targets --locked + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4 + with: + category: "/language:rust" diff --git a/.github/workflows/sec-vorpal-reviewdog.yml b/.github/workflows/sec-vorpal-reviewdog.yml new file mode 100644 index 0000000..6187550 --- /dev/null +++ b/.github/workflows/sec-vorpal-reviewdog.yml @@ -0,0 +1,191 @@ +name: Sec Vorpal Reviewdog + +on: + workflow_dispatch: + inputs: + scan_scope: + description: "File selection mode when source_path is empty" + required: true + type: choice + default: changed + options: + - changed + - all + base_ref: + description: "Base branch/ref for changed diff mode" + required: true + type: string + default: main + source_path: + description: "Optional comma-separated file paths to scan (overrides scan_scope)" + required: false + type: string + include_tests: + description: "Include test/fixture files in scan selection" + required: true + type: choice + default: "false" + options: + - "false" + - "true" + folders_to_ignore: + description: "Optional comma-separated path prefixes to ignore" + required: false + type: string + default: target,node_modules,web/dist,.venv,venv + reporter: + description: "Reviewdog reporter mode" + required: true + type: choice + default: github-pr-check + options: + - github-pr-check + - github-pr-review + filter_mode: + description: "Reviewdog filter mode" + required: true + type: choice + default: file + options: + - added + - diff_context + - file + - nofilter + level: + description: "Reviewdog severity level" + required: true + type: choice + default: error + options: + - info + - warning + - error + fail_on_error: + description: "Fail workflow when Vorpal reports findings" + required: true + type: choice + default: "false" + options: + - "false" + - "true" + reviewdog_flags: + description: "Optional extra reviewdog flags" + required: false + type: string + +concurrency: + group: sec-vorpal-reviewdog-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + checks: write + pull-requests: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + vorpal: + name: Vorpal Reviewdog Scan + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 20 + steps: + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Resolve source paths + id: sources + shell: bash + env: + INPUT_SOURCE_PATH: ${{ inputs.source_path }} + INPUT_SCAN_SCOPE: ${{ inputs.scan_scope }} + INPUT_BASE_REF: ${{ inputs.base_ref }} + INPUT_INCLUDE_TESTS: ${{ inputs.include_tests }} + run: | + set -euo pipefail + + strip_space() { + local value="$1" + value="${value//$'\n'/}" + value="${value//$'\r'/}" + value="${value// /}" + echo "$value" + } + + source_override="$(strip_space "${INPUT_SOURCE_PATH}")" + if [ -n "${source_override}" ]; then + normalized="$(echo "${INPUT_SOURCE_PATH}" | tr '\n' ',' | sed -E 's/[[:space:]]+//g; s/,+/,/g; s/^,|,$//g')" + if [ -n "${normalized}" ]; then + { + echo "scan=true" + echo "source_path=${normalized}" + echo "selection=manual" + } >> "${GITHUB_OUTPUT}" + exit 0 + fi + fi + + include_ext='\.(py|js|jsx|ts|tsx)$' + exclude_paths='^(target/|node_modules/|web/node_modules/|dist/|web/dist/|\.venv/|venv/)' + exclude_tests='(^|/)(test|tests|__tests__|fixtures|mocks|examples)/|(^|/)test_helpers/|(_test\.py$)|(^|/)test_.*\.py$|(\.spec\.(ts|tsx|js|jsx)$)|(\.test\.(ts|tsx|js|jsx)$)' + + if [ "${INPUT_SCAN_SCOPE}" = "all" ]; then + candidate_files="$(git ls-files)" + else + base_ref="${INPUT_BASE_REF#refs/heads/}" + base_ref="${base_ref#origin/}" + if git fetch --no-tags --depth=1 origin "${base_ref}" >/dev/null 2>&1; then + if merge_base="$(git merge-base HEAD "origin/${base_ref}" 2>/dev/null)"; then + candidate_files="$(git diff --name-only --diff-filter=ACMR "${merge_base}"...HEAD)" + else + echo "Unable to resolve merge-base for origin/${base_ref}; falling back to tracked files." + candidate_files="$(git ls-files)" + fi + else + echo "Unable to fetch origin/${base_ref}; falling back to tracked files." + candidate_files="$(git ls-files)" + fi + fi + + source_files="$(printf '%s\n' "${candidate_files}" | sed '/^$/d' | grep -E "${include_ext}" | grep -Ev "${exclude_paths}" || true)" + if [ "${INPUT_INCLUDE_TESTS}" != "true" ] && [ -n "${source_files}" ]; then + source_files="$(printf '%s\n' "${source_files}" | grep -Ev "${exclude_tests}" || true)" + fi + if [ -z "${source_files}" ]; then + { + echo "scan=false" + echo "source_path=" + echo "selection=none" + } >> "${GITHUB_OUTPUT}" + exit 0 + fi + + source_path="$(printf '%s\n' "${source_files}" | paste -sd, -)" + { + echo "scan=true" + echo "source_path=${source_path}" + echo "selection=auto-${INPUT_SCAN_SCOPE}" + } >> "${GITHUB_OUTPUT}" + + - name: No supported files to scan + if: steps.sources.outputs.scan != 'true' + shell: bash + run: | + echo "No supported files selected for Vorpal scan (extensions: .py .js .jsx .ts .tsx)." + + - name: Run Vorpal with reviewdog + if: steps.sources.outputs.scan == 'true' + uses: Checkmarx/vorpal-reviewdog-github-action@8cc292f337a2f1dea581b4f4bd73852e7becb50d # v1.2.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + source_path: ${{ steps.sources.outputs.source_path }} + folders_to_ignore: ${{ inputs.folders_to_ignore }} + reporter: ${{ inputs.reporter }} + filter_mode: ${{ inputs.filter_mode }} + level: ${{ inputs.level }} + fail_on_error: ${{ inputs.fail_on_error }} + reviewdog_flags: ${{ inputs.reviewdog_flags }} diff --git a/.github/workflows/sync-contributors.yml b/.github/workflows/sync-contributors.yml new file mode 100644 index 0000000..3dc0483 --- /dev/null +++ b/.github/workflows/sync-contributors.yml @@ -0,0 +1,116 @@ +name: Sync Contributors + +on: + workflow_dispatch: + schedule: + # Run every Sunday at 00:00 UTC + - cron: '0 0 * * 0' + +concurrency: + group: update-notice-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: write + pull-requests: write + +jobs: + update-notice: + name: Update NOTICE with new contributors + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Fetch contributors + id: contributors + env: + GH_TOKEN: ${{ github.token }} + run: | + # Fetch all contributors (excluding bots) + gh api \ + --paginate \ + "repos/${{ github.repository }}/contributors" \ + --jq '.[] | select(.type != "Bot") | .login' > /tmp/contributors_raw.txt + + # Sort alphabetically and filter + sort -f < /tmp/contributors_raw.txt > contributors.txt + + # Count contributors + count=$(wc -l < contributors.txt | tr -d ' ') + echo "count=$count" >> "$GITHUB_OUTPUT" + + - name: Generate new NOTICE file + run: | + cat > NOTICE << 'EOF' + ZeroClaw + Copyright 2025 ZeroClaw Labs + + This product includes software developed at ZeroClaw Labs (https://github.com/zeroclaw-labs). + + Contributors + ============ + + The following individuals have contributed to ZeroClaw: + + EOF + + # Append contributors in alphabetical order + sed 's/^/- /' contributors.txt >> NOTICE + + # Add third-party dependencies section + cat >> NOTICE << 'EOF' + + + Third-Party Dependencies + ========================= + + This project uses the following third-party libraries and components, + each licensed under their respective terms: + + See Cargo.lock for a complete list of dependencies and their licenses. + EOF + + - name: Check if NOTICE changed + id: check_diff + run: | + if git diff --quiet NOTICE; then + echo "changed=false" >> "$GITHUB_OUTPUT" + else + echo "changed=true" >> "$GITHUB_OUTPUT" + fi + + - name: Create Pull Request + if: steps.check_diff.outputs.changed == 'true' + env: + GH_TOKEN: ${{ github.token }} + COUNT: ${{ steps.contributors.outputs.count }} + run: | + branch_name="auto/update-notice-$(date +%Y%m%d)" + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git checkout -b "$branch_name" + git add NOTICE + git commit -m "chore(notice): update contributor list" + git push origin "$branch_name" + + gh pr create \ + --title "chore(notice): update contributor list" \ + --body "Auto-generated update to NOTICE file with $COUNT contributors." \ + --label "chore" \ + --label "docs" \ + --draft || true + + - name: Summary + run: | + echo "## NOTICE Update Results" >> "$GITHUB_STEP_SUMMARY" + echo "" >> "$GITHUB_STEP_SUMMARY" + if [ "${{ steps.check_diff.outputs.changed }}" = "true" ]; then + echo "✅ PR created to update NOTICE" >> "$GITHUB_STEP_SUMMARY" + else + echo "✓ NOTICE file is up to date" >> "$GITHUB_STEP_SUMMARY" + fi + echo "" >> "$GITHUB_STEP_SUMMARY" + echo "**Contributors:** ${{ steps.contributors.outputs.count }}" >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/test-benchmarks.yml b/.github/workflows/test-benchmarks.yml new file mode 100644 index 0000000..9654d9c --- /dev/null +++ b/.github/workflows/test-benchmarks.yml @@ -0,0 +1,53 @@ +name: Test Benchmarks + +on: + schedule: + - cron: "0 3 * * 1" # Weekly Monday 3am UTC + workflow_dispatch: + +concurrency: + group: bench-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + benchmarks: + name: Criterion Benchmarks + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + + - name: Run benchmarks + run: cargo bench --locked 2>&1 | tee benchmark_output.txt + + - name: Upload benchmark results + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: benchmark-results + path: | + target/criterion/ + benchmark_output.txt + retention-days: 7 + + - name: Post benchmark summary on PR + if: github.event_name == 'pull_request' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const script = require('./.github/workflows/scripts/test_benchmarks_pr_comment.js'); + await script({ github, context, core }); diff --git a/.github/workflows/test-e2e.yml b/.github/workflows/test-e2e.yml new file mode 100644 index 0000000..97dabf7 --- /dev/null +++ b/.github/workflows/test-e2e.yml @@ -0,0 +1,33 @@ +name: Test E2E + +on: + push: + branches: [dev, main] + workflow_dispatch: + +concurrency: + group: e2e-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + integration-tests: + name: Integration / E2E Tests + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 30 + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: 1.92.0 + - uses: useblacksmith/rust-cache@f53e7f127245d2a269b3d90879ccf259876842d5 # v3 + - name: Run integration / E2E tests + run: cargo test --test agent_e2e --locked --verbose diff --git a/.github/workflows/test-fuzz.yml b/.github/workflows/test-fuzz.yml new file mode 100644 index 0000000..809672a --- /dev/null +++ b/.github/workflows/test-fuzz.yml @@ -0,0 +1,75 @@ +name: Test Fuzz + +on: + schedule: + - cron: "0 2 * * 0" # Weekly Sunday 2am UTC + workflow_dispatch: + inputs: + fuzz_seconds: + description: "Seconds to run each fuzz target" + required: false + default: "300" + +concurrency: + group: fuzz-${{ github.ref }} + cancel-in-progress: true + +permissions: + contents: read + issues: write + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + CARGO_TERM_COLOR: always + +jobs: + fuzz: + name: Fuzz (${{ matrix.target }}) + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + target: + - fuzz_config_parse + - fuzz_tool_params + steps: + - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - uses: dtolnay/rust-toolchain@631a55b12751854ce901bb631d5902ceb48146f7 # stable + with: + toolchain: nightly + components: llvm-tools-preview + + - name: Install cargo-fuzz + run: cargo install cargo-fuzz --locked + + - name: Run fuzz target + run: | + SECONDS="${{ github.event.inputs.fuzz_seconds || '300' }}" + echo "Fuzzing ${{ matrix.target }} for ${SECONDS}s" + cargo +nightly fuzz run ${{ matrix.target }} -- \ + -max_total_time="${SECONDS}" \ + -max_len=4096 + continue-on-error: true + id: fuzz + + - name: Upload crash artifacts + if: failure() || steps.fuzz.outcome == 'failure' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: fuzz-crashes-${{ matrix.target }} + path: fuzz/artifacts/${{ matrix.target }}/ + retention-days: 30 + if-no-files-found: ignore + + - name: Report fuzz results + run: | + echo "### Fuzz: ${{ matrix.target }}" >> "$GITHUB_STEP_SUMMARY" + if [ "${{ steps.fuzz.outcome }}" = "failure" ]; then + echo "- :x: Crashes found — see artifacts" >> "$GITHUB_STEP_SUMMARY" + else + echo "- :white_check_mark: No crashes found" >> "$GITHUB_STEP_SUMMARY" + fi diff --git a/.github/workflows/workflow-sanity.yml b/.github/workflows/workflow-sanity.yml new file mode 100644 index 0000000..da9d7f3 --- /dev/null +++ b/.github/workflows/workflow-sanity.yml @@ -0,0 +1,106 @@ +name: Workflow Sanity + +on: + pull_request: + paths: + - ".github/workflows/**" + - ".github/*.yml" + - ".github/*.yaml" + push: + paths: + - ".github/workflows/**" + - ".github/*.yml" + - ".github/*.yaml" + +concurrency: + group: workflow-sanity-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +permissions: + contents: read + +env: + GIT_CONFIG_COUNT: "1" + GIT_CONFIG_KEY_0: core.hooksPath + GIT_CONFIG_VALUE_0: /dev/null + + +jobs: + no-tabs: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Normalize git global hooks config + shell: bash + run: | + set -euo pipefail + git config --global --unset-all core.hooksPath || true + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Fail on tabs in workflow files + shell: bash + run: | + set -euo pipefail + python3 - <<'PY' + from __future__ import annotations + + import pathlib + import sys + + root = pathlib.Path(".github/workflows") + bad: list[str] = [] + for path in sorted(root.rglob("*.yml")): + if b"\t" in path.read_bytes(): + bad.append(str(path)) + for path in sorted(root.rglob("*.yaml")): + if b"\t" in path.read_bytes(): + bad.append(str(path)) + + if bad: + print("Tabs found in workflow file(s):") + for path in bad: + print(f"- {path}") + sys.exit(1) + PY + + actionlint: + runs-on: [self-hosted, Linux, X64, aws-india, blacksmith-2vcpu-ubuntu-2404, hetzner] + timeout-minutes: 10 + steps: + - name: Normalize git global hooks config + shell: bash + run: | + set -euo pipefail + git config --global --unset-all core.hooksPath || true + + - name: Checkout + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 + + - name: Install actionlint binary + shell: bash + run: | + set -euo pipefail + version="1.7.11" + arch="$(uname -m)" + case "$arch" in + x86_64|amd64) archive="actionlint_${version}_linux_amd64.tar.gz" ;; + aarch64|arm64) archive="actionlint_${version}_linux_arm64.tar.gz" ;; + *) + echo "::error::Unsupported architecture: ${arch}" + exit 1 + ;; + esac + + curl -fsSL \ + -o "$RUNNER_TEMP/actionlint.tgz" \ + "https://github.com/rhysd/actionlint/releases/download/v${version}/${archive}" + tar -xzf "$RUNNER_TEMP/actionlint.tgz" -C "$RUNNER_TEMP" actionlint + chmod +x "$RUNNER_TEMP/actionlint" + echo "$RUNNER_TEMP" >> "$GITHUB_PATH" + "$RUNNER_TEMP/actionlint" -version + + - name: Lint GitHub workflows + shell: bash + run: actionlint -color