Skip to content

Publish

Publish #72

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
name: Publish
on:
workflow_dispatch:
inputs:
dry_run:
description: "Dry run (build/test only, no actual publish)"
type: boolean
default: true
use_latest_ci:
description: "Use latest CI configuration and scripts from master branch (recommended for compatibility)"
type: boolean
required: false
default: true
skip_tag_creation:
description: "Skip creating git tags (useful for re-publishing or testing)"
type: boolean
required: false
default: false
commit:
description: "Commit SHA to publish from"
type: string
required: true
publish_crates:
description: "Rust crates to publish (comma-separated: rust-sdk, rust-cli, rust-binary-protocol, rust-common)"
type: string
required: false
default: ""
publish_dockerhub:
description: "Docker images to publish (comma-separated: rust-server, rust-mcp, rust-bench-dashboard, rust-connectors, web-ui)"
type: string
required: false
default: ""
publish_other:
description: "Other SDKs to publish (comma-separated: python, node, java, csharp, go)"
type: string
required: false
default: ""
workflow_call:
inputs:
dry_run:
description: "Dry run (build/test only, no actual publish)"
type: boolean
default: false
commit:
description: "Commit SHA (defaults to github.sha)"
type: string
default: ""
publish_crates:
description: "Rust crates to publish (comma-separated)"
type: string
default: ""
publish_dockerhub:
description: "Docker images to publish (comma-separated)"
type: string
default: ""
publish_other:
description: "Other SDKs to publish (comma-separated)"
type: string
default: ""
skip_tag_creation:
description: "Skip git tag creation"
type: boolean
default: false
use_latest_ci:
description: "Use latest CI from master"
type: boolean
default: false
create_edge_docker_tag:
description: "Create rolling :edge Docker tag"
type: boolean
default: false
secrets:
CARGO_REGISTRY_TOKEN:
required: false
DOCKERHUB_USER:
required: false
DOCKERHUB_TOKEN:
required: false
PYPI_API_TOKEN:
required: false
NPM_TOKEN:
required: false
NEXUS_USER:
required: false
NEXUS_PW:
required: false
JAVA_GPG_SIGNING_KEY:
required: false
JAVA_GPG_PASSWORD:
required: false
NUGET_API_KEY:
required: false
env:
IGGY_CI_BUILD: true
permissions:
contents: write # For tag creation
packages: write
id-token: write
# Static group so two concurrent release dispatches cannot race the
# crates.io / Maven Central / npm / NuGet / DockerHub / PyPI uploads.
# `run_id` in the group was effectively a no-op because it is unique
# per dispatch. `cancel-in-progress: false` keeps any in-flight upload
# running to completion rather than leaving partial state in registries.
concurrency:
group: publish-release
cancel-in-progress: false
jobs:
validate:
name: Validate inputs
runs-on: ubuntu-latest
outputs:
commit: ${{ steps.resolve.outputs.commit }}
has_targets: ${{ steps.check.outputs.has_targets }}
is_workflow_call: ${{ steps.detect.outputs.is_workflow_call }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
# Detect whether we are invoked via workflow_call (reusable) or
# workflow_dispatch (manual). Trust chain: when called via
# workflow_call, the caller is responsible for having validated its
# own trigger — the only in-tree caller today is
# .github/workflows/post-merge.yml, which is branch-filtered to
# master, so the SHA passed in is by construction a commit on
# master. Direct workflow_dispatch callers have no such guarantee
# and must pass through the master-ancestry check in Resolve
# commit below. Any future direct workflow_call caller added to
# this repo must preserve the master-ancestry property or the
# branch gate below will not apply to them.
- name: Detect trigger type
id: detect
run: |
# workflow_call sets github.event_name to 'workflow_call'
if [ "${{ github.event_name }}" = "workflow_call" ]; then
echo "is_workflow_call=true" >> "$GITHUB_OUTPUT"
echo "📞 Triggered via workflow_call"
else
echo "is_workflow_call=false" >> "$GITHUB_OUTPUT"
echo "🖱️ Triggered via workflow_dispatch"
fi
- name: Check if any targets specified
id: check
run: |
if [ -z "${{ inputs.publish_crates }}" ] && \
[ -z "${{ inputs.publish_dockerhub }}" ] && \
[ -z "${{ inputs.publish_other }}" ]; then
echo "has_targets=false" >> "$GITHUB_OUTPUT"
else
echo "has_targets=true" >> "$GITHUB_OUTPUT"
fi
- name: Resolve commit
id: resolve
env:
INPUT_COMMIT: ${{ inputs.commit }}
IS_WORKFLOW_CALL: ${{ steps.detect.outputs.is_workflow_call }}
DRY_RUN: ${{ inputs.dry_run }}
run: |
set -euo pipefail
COMMIT="${INPUT_COMMIT}"
if [ -z "$COMMIT" ]; then
COMMIT="${{ github.sha }}"
echo "ℹ️ No commit specified, using github.sha: $COMMIT"
fi
# Validate AND normalize to the canonical full 40-char SHA in one
# shot. Downstream jobs (rust crates, docker manifests, SDK matrix,
# tag creation) all need the same fully-qualified SHA so they cannot
# drift if HEAD moves on the input branch mid-run.
FULL_SHA=$(git rev-parse --verify "${COMMIT}^{commit}" 2>/dev/null || true)
if [ -z "$FULL_SHA" ]; then
echo "❌ Invalid commit: $COMMIT"
exit 1
fi
if ! [[ "$FULL_SHA" =~ ^[0-9a-f]{40}$ ]]; then
echo "❌ git rev-parse returned non-canonical SHA: $FULL_SHA"
exit 1
fi
if [ "$FULL_SHA" != "$COMMIT" ]; then
echo "ℹ️ Normalized $COMMIT -> $FULL_SHA"
fi
COMMIT="$FULL_SHA"
# Skip master branch check for workflow_call (caller already verified on master)
if [ "$IS_WORKFLOW_CALL" = "true" ]; then
echo "✅ Called from workflow, skipping master check"
elif [ "$DRY_RUN" = "true" ]; then
echo "🌵 Dry run, skipping master branch check"
else
echo "🔍 Verifying commit is on master branch..."
git fetch origin master --depth=1000
if git merge-base --is-ancestor "$COMMIT" origin/master; then
echo "✅ Commit is on master branch"
else
echo "❌ ERROR: Commit $COMMIT is not on the master branch!"
echo ""
echo "Publishing is only allowed from commits on the master branch."
echo "Please ensure your commit has been merged to master before publishing."
echo ""
echo "To check which branch contains this commit, run:"
echo " git branch -r --contains $COMMIT"
exit 1
fi
fi
echo "commit=$COMMIT" >> "$GITHUB_OUTPUT"
echo "✅ Will publish from commit: $COMMIT"
echo
echo "Commit details:"
git log -1 --pretty=format:" Author: %an <%ae>%n Date: %ad%n Subject: %s" "$COMMIT"
plan:
name: Build publish plan
needs: validate
if: needs.validate.outputs.has_targets == 'true'
runs-on: ubuntu-latest
outputs:
targets: ${{ steps.mk.outputs.targets }}
non_rust_targets: ${{ steps.mk.outputs.non_rust_targets }}
non_docker_targets: ${{ steps.mk.outputs.non_docker_targets }}
docker_matrix: ${{ steps.mk.outputs.docker_matrix }}
docker_components: ${{ steps.mk.outputs.docker_components }}
count: ${{ steps.mk.outputs.count }}
has_python: ${{ steps.mk.outputs.has_python }}
has_rust_crates: ${{ steps.mk.outputs.has_rust_crates }}
has_docker: ${{ steps.mk.outputs.has_docker }}
steps:
- name: Download latest copy script from master
if: inputs.use_latest_ci
run: |
# Download the copy script from master branch
curl -sSL "https://raw.githubusercontent.com/${{ github.repository }}/master/scripts/copy-latest-from-master.sh" \
-o /tmp/copy-latest-from-master.sh
chmod +x /tmp/copy-latest-from-master.sh
echo "✅ Downloaded latest copy script from master"
- uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
- name: Save and apply latest CI from master
if: inputs.use_latest_ci
run: |
# Save latest files from master (including config)
/tmp/copy-latest-from-master.sh save \
.github \
scripts
# Apply them to current checkout
/tmp/copy-latest-from-master.sh apply
- name: Load publish config
id: cfg
run: |
if ! command -v yq &> /dev/null; then
YQ_VERSION="v4.47.1"
YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
wget -qO /tmp/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
echo "${YQ_CHECKSUM} /tmp/yq" | sha256sum -c - || exit 1
chmod +x /tmp/yq && sudo mv /tmp/yq /usr/local/bin/yq
fi
echo "components_b64=$(yq -o=json -I=0 '.components' .github/config/publish.yml | base64 -w0)" >> "$GITHUB_OUTPUT"
- name: Build matrix from inputs
id: mk
uses: actions/github-script@v7
with:
script: |
const componentsB64 = '${{ steps.cfg.outputs.components_b64 }}';
const cfg = JSON.parse(Buffer.from(componentsB64, 'base64').toString('utf-8') || "{}");
const wants = [];
// Parse Rust crates
('${{ inputs.publish_crates }}').split(',').map(s => s.trim()).filter(Boolean).forEach(crate => {
if (['rust-sdk','rust-cli','rust-binary-protocol','rust-common'].includes(crate)) wants.push(crate);
else core.warning(`Unknown crate: ${crate}`);
});
// Parse Docker images
('${{ inputs.publish_dockerhub }}').split(',').map(s => s.trim()).filter(Boolean).forEach(img => {
if (['rust-server','rust-mcp','rust-bench-dashboard','rust-connectors','web-ui'].includes(img)) wants.push(img);
else core.warning(`Unknown Docker image: ${img}`);
});
// Parse other SDKs
('${{ inputs.publish_other }}').split(',').map(s => s.trim()).filter(Boolean).forEach(sdk => {
if (['python','node','java','csharp','go'].includes(sdk)) {
wants.push(`sdk-${sdk}`);
} else {
core.warning(`Unknown SDK: ${sdk}`);
}
});
const toType = (entry) => ({
dockerhub: 'docker',
crates: 'rust',
pypi: 'python',
npm: 'node',
maven: 'java',
nuget: 'csharp',
none: 'go'
}[entry.registry] || 'unknown');
const targets = [];
const nonRustTargets = [];
const seen = new Set();
let hasRustCrates = false;
for (const key of wants) {
if (seen.has(key)) continue;
seen.add(key);
const entry = cfg[key];
if (!entry) { core.warning(`Component '${key}' not found in publish.yml`); continue; }
const target = {
key,
name: key,
type: toType(entry),
registry: entry.registry || '',
package: entry.package || '',
image: entry.image || '',
dockerfile: entry.dockerfile || '',
platforms: Array.isArray(entry.platforms) ? entry.platforms.join(',') : '',
tag_pattern: entry.tag_pattern || '',
version_file: entry.version_file || '',
version_regex: entry.version_regex || ''
};
targets.push(target);
// Separate Rust crates from other targets
if (target.type === 'rust') {
hasRustCrates = true;
// Rust crates are handled by the sequential job
} else {
nonRustTargets.push(target);
}
}
// Separate Docker targets from other non-Rust targets
const dockerTargets = nonRustTargets.filter(t => t.type === 'docker');
const nonDockerTargets = nonRustTargets.filter(t => t.type !== 'docker');
console.log(`Publishing ${targets.length} components:`);
targets.forEach(t => console.log(` - ${t.name} (${t.type}) -> ${t.registry || 'N/A'}`));
console.log(` (${nonRustTargets.length} non-Rust, ${targets.length - nonRustTargets.length} Rust crates)`);
console.log(` (${dockerTargets.length} Docker, ${nonDockerTargets.length} other SDKs)`);
// Output all targets for reference and tag creation
core.setOutput('targets', JSON.stringify(targets.length ? { include: targets } : { include: [{ key: 'noop', type: 'noop' }] }));
// Output only non-Rust targets for the parallel publish job
core.setOutput('non_rust_targets', JSON.stringify(nonRustTargets.length ? { include: nonRustTargets } : { include: [{ key: 'noop', type: 'noop' }] }));
// Output non-Docker, non-Rust targets (SDKs only)
core.setOutput('non_docker_targets', JSON.stringify(nonDockerTargets.length ? { include: nonDockerTargets } : { include: [{ key: 'noop', type: 'noop' }] }));
// Build Docker matrix: components × platforms for native runner builds
const platforms = [
{ platform: 'linux/amd64', arch: 'amd64', runner: 'ubuntu-latest' },
{ platform: 'linux/arm64', arch: 'arm64', runner: 'ubuntu-24.04-arm' }
];
const dockerMatrix = [];
for (const t of dockerTargets) {
for (const p of platforms) {
dockerMatrix.push({ ...t, ...p });
}
}
core.setOutput('docker_matrix', JSON.stringify(dockerMatrix.length ? { include: dockerMatrix } : { include: [{ key: 'noop', type: 'noop' }] }));
core.setOutput('docker_components', JSON.stringify(dockerTargets.length ? { include: dockerTargets } : { include: [{ key: 'noop', type: 'noop' }] }));
core.setOutput('has_docker', String(dockerTargets.length > 0));
core.setOutput('count', String(targets.length));
core.setOutput('has_rust_crates', String(hasRustCrates));
// Check if Python SDK is in targets and extract version
const pythonTarget = targets.find(t => t.key === 'sdk-python');
if (pythonTarget) {
core.setOutput('has_python', 'true');
// Python version will be extracted in the publish job
} else {
core.setOutput('has_python', 'false');
}
check-tags:
name: Check existing tags
needs: [validate, plan]
if: needs.validate.outputs.has_targets == 'true' && fromJson(needs.plan.outputs.targets).include[0].key != 'noop'
runs-on: ubuntu-latest
steps:
- name: Download latest copy script from master
if: inputs.use_latest_ci
run: |
curl -sSL "https://raw.githubusercontent.com/${{ github.repository }}/master/scripts/copy-latest-from-master.sh" \
-o /tmp/copy-latest-from-master.sh
chmod +x /tmp/copy-latest-from-master.sh
echo "✅ Downloaded latest copy script from master"
- name: Checkout at commit
uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
# check-tags only runs `git ls-remote --tags` against origin,
# which does not need local history. fetch-depth:1 is enough.
fetch-depth: 1
- name: Save and apply latest CI from master
if: inputs.use_latest_ci
run: |
/tmp/copy-latest-from-master.sh save \
.github \
scripts \
web/Dockerfile \
core/server/Dockerfile \
core/ai/mcp/Dockerfile \
core/connectors/runtime/Dockerfile \
core/bench/dashboard/server/Dockerfile
/tmp/copy-latest-from-master.sh apply
- name: Setup yq
run: |
YQ_VERSION="v4.47.1"
YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
sudo wget -qO /usr/local/bin/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
echo "${YQ_CHECKSUM} /usr/local/bin/yq" | sha256sum -c - || exit 1
sudo chmod +x /usr/local/bin/yq
- name: Check for existing tags
run: |
set -euo pipefail
echo "## 🏷️ Tag Existence Check" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ inputs.skip_tag_creation }}" = "true" ]; then
echo "### ℹ️ Tag Creation Disabled" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Tag creation has been explicitly disabled for this run." >> $GITHUB_STEP_SUMMARY
echo "Components will be published without creating git tags." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
TARGETS_JSON='${{ needs.plan.outputs.targets }}'
EXISTING_TAGS=()
NEW_TAGS=()
WRONG_TARGET_TAGS=() # populated when a tag exists at a commit != this run's target
echo "| Component | Version | Tag | Status |" >> $GITHUB_STEP_SUMMARY
echo "|-----------|---------|-----|--------|" >> $GITHUB_STEP_SUMMARY
# Process substitution (not pipe) so EXISTING_TAGS / NEW_TAGS
# mutations survive the loop: `jq | while ...` puts the loop
# body in a pipe subshell and discards the arrays on exit,
# which silently killed the aggregate summary block below.
while IFS= read -r row; do
_jq() { echo "$row" | base64 -d | jq -r "$1"; }
KEY=$(_jq '.key')
NAME=$(_jq '.name')
TAG_PATTERN=$(_jq '.tag_pattern')
# Skip components without tag patterns
if [ -z "$TAG_PATTERN" ] || [ "$TAG_PATTERN" = "null" ]; then
echo "Skipping $NAME - no tag pattern defined"
continue
fi
# Make script executable if needed
chmod +x scripts/extract-version.sh || true
VERSION=$(scripts/extract-version.sh "$KEY" 2>/dev/null || echo "ERROR")
TAG=$(scripts/extract-version.sh "$KEY" --tag 2>/dev/null || echo "ERROR")
# Consult --should-tag as the single source of truth for
# taggability. It handles SNAPSHOT and "no tag_pattern" in
# one place (extract-version.sh:348-360), which keeps this
# check in sync with the SDK matrix's own should_tag gate.
# A previous inline `[[ $VERSION =~ -SNAPSHOT$ ]]` check
# duplicated the rule and would silently drift when a new
# SDK added another SNAPSHOT-style pre-release marker.
SHOULD_TAG=$(scripts/extract-version.sh "$KEY" --should-tag 2>/dev/null || echo "false")
if [ "$SHOULD_TAG" = "false" ]; then
echo "ℹ️ $NAME: no tag will be created (SNAPSHOT or no tag_pattern)"
echo "| $NAME | $VERSION | _(none)_ | ℹ️ No tag (SNAPSHOT / no pattern) |" >> $GITHUB_STEP_SUMMARY
continue
fi
# In auto-publish mode, stable Docker versions only get :edge tag.
# Versioned git tags require manual publish (workflow_dispatch).
REGISTRY=$(_jq '.registry')
if [ "${{ inputs.create_edge_docker_tag }}" = "true" ] && [ "$REGISTRY" = "dockerhub" ]; then
if [[ ! "$VERSION" =~ -(edge|rc) ]]; then
echo "⏭️ $NAME: Stable Docker version in auto-publish mode, tag will be skipped"
echo "| $NAME | $VERSION | $TAG | ⏭️ Stable (manual publish only) |" >> $GITHUB_STEP_SUMMARY
continue
fi
fi
if [ "$VERSION" = "ERROR" ] || [ "$TAG" = "ERROR" ]; then
echo "❌ Failed to extract version/tag for $NAME"
echo "| $NAME | ERROR | ERROR | ❌ Failed to extract |" >> $GITHUB_STEP_SUMMARY
exit 1
fi
# Check if tag exists on the remote. Querying the REMOTE (not
# local `git rev-parse`) so this operator summary agrees with
# the authoritative view used by create-git-tag: a tag can exist
# on origin without being in this checkout, and the old local
# query hid exactly that case - you would see "will create" in
# the summary and then create-git-tag would refuse the push.
REMOTE_LINE=$(git ls-remote --tags origin "refs/tags/${TAG}" 2>/dev/null || true)
if [ -n "$REMOTE_LINE" ]; then
EXISTING_TAGS+=("$TAG")
REMOTE_PEELED=$(git ls-remote --tags origin "refs/tags/${TAG}^{}" 2>/dev/null | awk '{print $1}')
REMOTE_RAW=$(echo "$REMOTE_LINE" | awk '{print $1}')
EXISTING_SHA="${REMOTE_PEELED:-${REMOTE_RAW}}"
SHORT_SHA=$(echo "$EXISTING_SHA" | head -c 8)
# Fail-fast on wrong-target. A wrong-target tag means
# create-git-tag would hard-fail 20-40 minutes later after
# publishing artifacts to crates.io / PyPI / npm / Maven /
# NuGet / DockerHub. Catching it at check-tags converts that
# into a fast, cheap failure at the top of the run.
# Same-target is still benign (rerun convergence).
#
# skip_tag_creation=true accepts the invariant hole (the
# operator opted out of tag writes) but the wrong-target
# state is still rendered loudly under a DISTINCT cell label,
# so an operator cannot mistake it for a benign same-target
# skip. Operators must reconcile the tag/registry divergence
# manually or run a follow-up without skip_tag_creation to
# converge. The fail-fast exit 1 below is gated on
# skip_tag_creation != true.
if [ "$EXISTING_SHA" != "${{ needs.validate.outputs.commit }}" ]; then
WRONG_TARGET_TAGS+=("$TAG|$SHORT_SHA")
if [ "${{ inputs.skip_tag_creation }}" = "true" ]; then
echo "⚠️ Tag exists on remote at WRONG target: $TAG (points to $SHORT_SHA, not enforced: skip_tag_creation=true)"
echo "| $NAME | $VERSION | $TAG | ⚠️ Wrong target at $SHORT_SHA (NOT enforced) |" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Tag exists on remote at wrong target: $TAG (points to $SHORT_SHA)"
echo "| $NAME | $VERSION | $TAG | ❌ Wrong target at $SHORT_SHA |" >> $GITHUB_STEP_SUMMARY
fi
else
echo "⚠️ Tag exists on remote at same target: $TAG (points to $SHORT_SHA)"
echo "| $NAME | $VERSION | $TAG | ⚠️ Exists at $SHORT_SHA (benign) |" >> $GITHUB_STEP_SUMMARY
fi
else
NEW_TAGS+=("$TAG")
echo "✅ Tag will be created: $TAG"
echo "| $NAME | $VERSION | $TAG | ✅ Will create |" >> $GITHUB_STEP_SUMMARY
fi
done < <(echo "$TARGETS_JSON" | jq -r '.include[] | select(.key!="noop") | @base64')
echo "" >> $GITHUB_STEP_SUMMARY
# Summary
if [ ${#EXISTING_TAGS[@]} -gt 0 ]; then
echo "" >> $GITHUB_STEP_SUMMARY
echo "### ⚠️ Warning: Existing Tags Detected" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "The following tags already exist on the remote:" >> $GITHUB_STEP_SUMMARY
for tag in "${EXISTING_TAGS[@]}"; do
echo "- $tag" >> $GITHUB_STEP_SUMMARY
done
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ inputs.dry_run }}" = "false" ]; then
if [ "${{ inputs.skip_tag_creation }}" = "true" ]; then
echo "**Note:** Tag creation is disabled for this run." >> $GITHUB_STEP_SUMMARY
echo "Components will be (re)published, but no git tags will be pushed." >> $GITHUB_STEP_SUMMARY
else
echo "**Tag behavior with \`create-git-tag\` (SHA-match invariant):**" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- If a pre-existing tag points at the SAME commit this run is publishing, the tag step is a no-op (benign skip). The artifact is (re)published; the tag stays." >> $GITHUB_STEP_SUMMARY
echo "- If a pre-existing tag points at a DIFFERENT commit, the tag step hard-fails with recovery instructions. The artifact publish still runs (registries are idempotent) but no tag is pushed." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "To recover from a wrong-target tag:" >> $GITHUB_STEP_SUMMARY
echo "1. Verify the intended release commit." >> $GITHUB_STEP_SUMMARY
echo "2. Delete the existing tag on origin: \`git push --delete origin <tag>\`" >> $GITHUB_STEP_SUMMARY
echo "3. Rerun this workflow." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "To republish without touching tags, set \`skip_tag_creation: true\` on the workflow dispatch." >> $GITHUB_STEP_SUMMARY
fi
fi
fi
if [ ${#NEW_TAGS[@]} -eq 0 ] && [ ${#EXISTING_TAGS[@]} -gt 0 ]; then
echo "### ℹ️ No New Tags to Create" >> $GITHUB_STEP_SUMMARY
echo "All specified components are already tagged at some commit. If tags match this run's commit, the rerun converges cleanly. If not, see the wrong-target recovery above or bump versions." >> $GITHUB_STEP_SUMMARY
elif [ ${#NEW_TAGS[@]} -gt 0 ]; then
if [ "${{ inputs.skip_tag_creation }}" = "true" ]; then
echo "### ℹ️ Tags That Would Be Created (Skipped)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "The following tags would be created if tag creation wasn't disabled:" >> $GITHUB_STEP_SUMMARY
else
echo "### ✅ Tags to be Created" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
fi
for tag in "${NEW_TAGS[@]}"; do
echo "- $tag" >> $GITHUB_STEP_SUMMARY
done
fi
# Fail-fast on wrong-target tags. If any tag_pattern resolved to a
# name that already exists on origin at a commit different from
# this run's target, create-git-tag would hard-fail at tag push
# time anyway - but only after spending 20-40 min publishing
# artifacts. Catching it here converts that waste into a fast
# diagnostic at the top of the run. Kept as the LAST thing this
# step does so the operator summary block above is already
# populated before we exit.
if [ "${#WRONG_TARGET_TAGS[@]}" -gt 0 ]; then
{
echo ""
echo "### ❌ Wrong-target tags detected"
echo ""
echo "One or more tags already exist on the remote at a commit DIFFERENT from this run's target (\`${{ needs.validate.outputs.commit }}\`):"
echo ""
for entry in "${WRONG_TARGET_TAGS[@]}"; do
tag="${entry%|*}"
sha="${entry#*|}"
echo "- \`$tag\` currently points at \`$sha\`"
done
echo ""
echo "Failing fast to save 20-40 minutes of wasted publishing work; \`create-git-tag\` would hard-fail at tag push time anyway."
echo ""
echo "Recovery (verify the intended release commit first):"
echo "1. Delete the wrong tag(s) on origin: \`git push --delete origin <tag>\`"
echo "2. Or bump the version(s) and rerun the workflow"
echo "3. Or rerun with \`skip_tag_creation: true\` to republish artifacts only"
} >> $GITHUB_STEP_SUMMARY
echo "❌ Wrong-target tags detected: ${WRONG_TARGET_TAGS[*]}"
echo "See the Wrong-target tags block in the step summary for recovery steps."
if [ "${{ inputs.skip_tag_creation }}" != "true" ]; then
exit 1
fi
echo "ℹ️ skip_tag_creation=true, continuing despite wrong-target tags (operator opt-out)."
fi
build-python-wheels:
name: Build Python wheels
needs: [validate, plan, check-tags]
if: |
needs.validate.outputs.has_targets == 'true' &&
needs.plan.outputs.has_python == 'true'
uses: ./.github/workflows/_build_python_wheels.yml
with:
upload_artifacts: true
use_latest_ci: ${{ inputs.use_latest_ci }}
commit: ${{ needs.validate.outputs.commit }}
# Sequential Rust crate publishing to handle dependencies properly
publish-rust-crates:
name: Publish Rust crates
needs: [validate, plan, check-tags]
if: |
needs.validate.outputs.has_targets == 'true' &&
contains(inputs.publish_crates, 'rust-')
uses: ./.github/workflows/_publish_rust_crates.yml
with:
crates: ${{ inputs.publish_crates }}
dry_run: ${{ inputs.dry_run }}
commit: ${{ needs.validate.outputs.commit }}
use_latest_ci: ${{ inputs.use_latest_ci }}
skip_tag_creation: ${{ inputs.skip_tag_creation }}
secrets:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
# Docker publishing on native runners (no QEMU emulation)
publish-docker:
name: Docker ${{ matrix.name }} (${{ matrix.arch }})
needs: [validate, plan, check-tags, build-python-wheels, publish-rust-crates]
if: |
always() &&
needs.validate.outputs.has_targets == 'true' &&
needs.plan.outputs.has_docker == 'true' &&
fromJson(needs.plan.outputs.docker_matrix).include[0].key != 'noop' &&
(needs.build-python-wheels.result == 'success' || needs.build-python-wheels.result == 'skipped') &&
(needs.publish-rust-crates.result == 'success' || needs.publish-rust-crates.result == 'skipped')
runs-on: ${{ matrix.runner }}
timeout-minutes: 60
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.plan.outputs.docker_matrix) }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Download latest copy script from master
if: inputs.use_latest_ci
run: |
curl -sSL "https://raw.githubusercontent.com/${{ github.repository }}/master/scripts/copy-latest-from-master.sh" \
-o /tmp/copy-latest-from-master.sh
chmod +x /tmp/copy-latest-from-master.sh
echo "✅ Downloaded latest copy script from master"
- name: Checkout at commit
uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
fetch-depth: 0
- name: Save and apply latest CI from master
if: inputs.use_latest_ci
run: |
/tmp/copy-latest-from-master.sh save \
.github \
scripts \
web/Dockerfile \
core/server/Dockerfile \
core/ai/mcp/Dockerfile \
core/connectors/runtime/Dockerfile \
core/bench/dashboard/server/Dockerfile
/tmp/copy-latest-from-master.sh apply
- name: Ensure version extractor is executable
run: |
test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
- name: Extract version
id: ver
run: |
VERSION=$(scripts/extract-version.sh "${{ matrix.key }}")
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "✅ Resolved ${{ matrix.key }} -> version=$VERSION"
- name: Determine libc for component
id: libc
run: |
# Connectors runtime must use glibc because it dlopen()s glibc plugins
if [ "${{ matrix.key }}" = "rust-connectors" ]; then
echo "libc=glibc" >> "$GITHUB_OUTPUT"
else
echo "libc=musl" >> "$GITHUB_OUTPUT"
fi
- name: Publish Docker image
id: docker
uses: ./.github/actions/utils/docker-buildx
with:
task: publish
libc: ${{ steps.libc.outputs.libc }}
component: ${{ matrix.key }}
version: ${{ steps.ver.outputs.version }}
platform: ${{ matrix.platform }}
dry_run: ${{ inputs.dry_run }}
gha-cache: "false" # Use registry cache only to save GHA cache space
- name: Export digest
if: ${{ !inputs.dry_run }}
shell: bash
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.docker.outputs.digest }}"
if [ -n "$digest" ]; then
touch "${{ runner.temp }}/digests/${digest#sha256:}"
echo "Exported digest: $digest"
else
echo "::error::No digest available"
exit 1
fi
- name: Upload digest
if: ${{ !inputs.dry_run }}
uses: actions/upload-artifact@v4
with:
name: docker-digest-${{ matrix.key }}-${{ matrix.arch }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
# Create multi-arch Docker manifests after platform-specific images are pushed
docker-manifests:
name: Docker manifests
needs: [validate, plan, publish-docker]
if: |
always() &&
needs.validate.outputs.has_targets == 'true' &&
needs.plan.outputs.has_docker == 'true' &&
fromJson(needs.plan.outputs.docker_components).include[0].key != 'noop' &&
needs.publish-docker.result == 'success' &&
inputs.dry_run == false
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.plan.outputs.docker_components) }}
env:
DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
# create-git-tag's shallow-safe fallback at action.yml:86-96
# will `git fetch --no-tags --depth=1 origin <commit>` if the
# commit is not in the local clone. GitHub allows single-commit
# fetches via allowReachableSHA1InWant=true, so fetch-depth:1
# here is enough - no need to pay for full history.
fetch-depth: 1
- name: Ensure version extractor is executable
run: |
test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
- name: Extract version & tag
id: ver
shell: bash
env:
MATRIX_KEY: ${{ matrix.key }}
MATRIX_TAG_PATTERN: ${{ matrix.tag_pattern }}
CREATE_EDGE_DOCKER_TAG: ${{ inputs.create_edge_docker_tag }}
run: |
set -euo pipefail
VERSION=$(scripts/extract-version.sh "$MATRIX_KEY")
TAG=""
if [ -n "$MATRIX_TAG_PATTERN" ] && [ "$MATRIX_TAG_PATTERN" != "null" ]; then
TAG=$(scripts/extract-version.sh "$MATRIX_KEY" --tag)
fi
# Base should_tag rule (SNAPSHOT + tag_pattern presence) is owned by
# extract-version.sh so it stays in sync with the SDK matrix below.
# The Docker-only auto-publish stable-skip rule layers on top: in
# auto-publish mode (create_edge_docker_tag=true) stable versions
# only get the rolling :edge tag, never a versioned git tag. This
# mirrors the manifest push policy a few steps below.
SHOULD_TAG=$(scripts/extract-version.sh "$MATRIX_KEY" --should-tag)
if [ "$SHOULD_TAG" = "true" ] \
&& [ "$CREATE_EDGE_DOCKER_TAG" = "true" ] \
&& [ "$(scripts/extract-version.sh "$MATRIX_KEY" --is-pre-release)" != "true" ]; then
SHOULD_TAG=false
fi
{
echo "version=$VERSION"
echo "tag=$TAG"
echo "should_tag=$SHOULD_TAG"
} >> "$GITHUB_OUTPUT"
echo "✅ Resolved $MATRIX_KEY -> version=$VERSION tag=${TAG:-<none>} should_tag=$SHOULD_TAG"
- name: Resolve image from config
id: config
shell: bash
run: |
if ! command -v yq >/dev/null 2>&1; then
YQ_VERSION="v4.47.1"
YQ_CHECKSUM="0fb28c6680193c41b364193d0c0fc4a03177aecde51cfc04d506b1517158c2fb"
curl -sSL -o /usr/local/bin/yq https://github.com/mikefarah/yq/releases/download/${YQ_VERSION}/yq_linux_amd64
echo "${YQ_CHECKSUM} /usr/local/bin/yq" | sha256sum -c - || exit 1
chmod +x /usr/local/bin/yq
fi
image=$(yq ".components.${{ matrix.key }}.image" .github/config/publish.yml)
echo "image=$image" >> "$GITHUB_OUTPUT"
echo "📦 Image: $image"
- name: Download amd64 digest
uses: actions/download-artifact@v4
with:
name: docker-digest-${{ matrix.key }}-amd64
path: ${{ runner.temp }}/digests
- name: Download arm64 digest
uses: actions/download-artifact@v4
with:
name: docker-digest-${{ matrix.key }}-arm64
path: ${{ runner.temp }}/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Login to Docker Hub
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ env.DOCKERHUB_USER }}
password: ${{ env.DOCKERHUB_TOKEN }}
- name: Create and push manifest
working-directory: ${{ runner.temp }}/digests
run: |
IMAGE="${{ steps.config.outputs.image }}"
VERSION="${{ steps.ver.outputs.version }}"
echo "Creating manifests for $IMAGE from digests:"
ls -la
# Create :edge tag if requested (for auto-publish from post-merge)
if [ "${{ inputs.create_edge_docker_tag }}" = "true" ]; then
docker buildx imagetools create \
-t "${IMAGE}:edge" \
$(printf "${IMAGE}@sha256:%s " *)
echo "✅ Pushed manifest: ${IMAGE}:edge"
fi
# Create versioned tag
# When called from auto-publish (create_edge_docker_tag=true), only create
# versioned tag for pre-release versions to match original post-merge behavior.
# Manual publish always creates versioned tag.
if [ "${{ inputs.create_edge_docker_tag }}" = "true" ]; then
if [[ "$VERSION" =~ -(edge|rc) ]]; then
docker buildx imagetools create \
-t "${IMAGE}:${VERSION}" \
$(printf "${IMAGE}@sha256:%s " *)
echo "✅ Pushed manifest: ${IMAGE}:${VERSION}"
else
echo "ℹ️ Skipping versioned tag for stable version in auto-publish mode"
fi
else
docker buildx imagetools create \
-t "${IMAGE}:${VERSION}" \
$(printf "${IMAGE}@sha256:%s " *)
echo "✅ Pushed manifest: ${IMAGE}:${VERSION}"
fi
# Create :latest for stable releases (not edge/rc)
# Only create :latest in manual publish mode - auto-publish should never update :latest
if [ "${{ inputs.create_edge_docker_tag }}" != "true" ] && [[ ! "$VERSION" =~ -(edge|rc) ]]; then
echo "Creating 'latest' manifest"
docker buildx imagetools create \
-t "${IMAGE}:latest" \
$(printf "${IMAGE}@sha256:%s " *)
echo "✅ Pushed manifest: ${IMAGE}:latest"
fi
- name: Inspect manifest
run: |
IMAGE="${{ steps.config.outputs.image }}"
VERSION="${{ steps.ver.outputs.version }}"
# In auto-publish mode with stable version, we only pushed :edge
if [ "${{ inputs.create_edge_docker_tag }}" = "true" ] && [[ ! "$VERSION" =~ -(edge|rc) ]]; then
echo "Inspecting :edge manifest (versioned tag was skipped for stable version)"
docker buildx imagetools inspect "${IMAGE}:edge"
else
docker buildx imagetools inspect "${IMAGE}:${VERSION}"
fi
# Inline per-component tagging: tightly couple the git tag to the
# multi-arch manifest that just shipped. should_tag was computed in the
# version step above and already encodes the SNAPSHOT and auto-publish
# stable-Docker skip rules. dry_run is gated at the job level.
- name: Tag Docker release (${{ matrix.key }})
if: |
success() &&
inputs.skip_tag_creation == false &&
steps.ver.outputs.should_tag == 'true'
uses: ./.github/actions/utils/create-git-tag
with:
tag: ${{ steps.ver.outputs.tag }}
commit: ${{ needs.validate.outputs.commit }}
message: |
Release ${{ matrix.key }} ${{ steps.ver.outputs.version }}
Component: ${{ matrix.key }}
Tag: ${{ steps.ver.outputs.tag }}
Commit: ${{ needs.validate.outputs.commit }}
Released by: GitHub Actions (workflow ${{ github.run_id }})
# Non-Docker, non-Rust publishing (Python, Node, Java, C#, Go SDKs)
# Note: This job runs in parallel with Docker publishing - no dependency between them
publish:
name: ${{ matrix.name }}
needs: [validate, plan, check-tags, build-python-wheels, publish-rust-crates]
if: |
always() &&
needs.validate.outputs.has_targets == 'true' &&
fromJson(needs.plan.outputs.non_docker_targets).include[0].key != 'noop' &&
(needs.build-python-wheels.result == 'success' || needs.build-python-wheels.result == 'skipped') &&
(needs.publish-rust-crates.result == 'success' || needs.publish-rust-crates.result == 'skipped')
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.plan.outputs.non_docker_targets) }}
env:
CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
NEXUS_USER: ${{ secrets.NEXUS_USER }}
NEXUS_PW: ${{ secrets.NEXUS_PW }}
JAVA_GPG_SIGNING_KEY: ${{ secrets.JAVA_GPG_SIGNING_KEY }}
JAVA_GPG_PASSWORD: ${{ secrets.JAVA_GPG_PASSWORD }}
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
DRY_RUN: ${{ inputs.dry_run }}
outputs:
status: ${{ steps.status.outputs.status }}
version: ${{ steps.ver.outputs.version }}
tag: ${{ steps.ver.outputs.tag }}
steps:
- name: Download latest copy script from master
if: inputs.use_latest_ci
run: |
curl -sSL "https://raw.githubusercontent.com/${{ github.repository }}/master/scripts/copy-latest-from-master.sh" \
-o /tmp/copy-latest-from-master.sh
chmod +x /tmp/copy-latest-from-master.sh
echo "✅ Downloaded latest copy script from master"
- name: Checkout at commit
uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
# create-git-tag falls back to a shallow fetch when the commit
# is missing locally, so fetch-depth:1 is sufficient.
fetch-depth: 1
- name: Save and apply latest CI from master
if: inputs.use_latest_ci
run: |
/tmp/copy-latest-from-master.sh save \
.github \
scripts \
web/Dockerfile \
core/server/Dockerfile \
core/ai/mcp/Dockerfile \
core/connectors/runtime/Dockerfile \
core/bench/dashboard/server/Dockerfile
/tmp/copy-latest-from-master.sh apply
- name: Ensure version extractor is executable
run: |
test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
# matrix.type == 'rust' is dead: Rust crates are routed to the
# dedicated publish-rust-crates reusable workflow and never reach
# this SDK matrix (see plan job's hasRustCrates split at L349-354).
# Python uses maturin which depends on Rust, so python rows still
# need the toolchain.
- name: Setup Rust toolchain (if needed)
if: matrix.type == 'python'
uses: ./.github/actions/utils/setup-rust-with-cache
- name: Debug matrix
run: echo '${{ toJson(matrix) }}'
- name: Extract version & tag
id: ver
shell: bash
env:
MATRIX_KEY: ${{ matrix.key }}
MATRIX_TAG_PATTERN: ${{ matrix.tag_pattern }}
CREATE_EDGE_DOCKER_TAG: ${{ inputs.create_edge_docker_tag }}
run: |
set -euo pipefail
VERSION=$(scripts/extract-version.sh "$MATRIX_KEY")
# If a tag pattern exists for this component, ask the script for a tag as well
if [ -n "$MATRIX_TAG_PATTERN" ] && [ "$MATRIX_TAG_PATTERN" != "null" ]; then
TAG=$(scripts/extract-version.sh "$MATRIX_KEY" --tag)
else
TAG=""
fi
# Single source of truth for the SNAPSHOT/no-tag-pattern skip rule.
SHOULD_TAG=$(scripts/extract-version.sh "$MATRIX_KEY" --should-tag)
# Symmetric auto-publish stable-skip override matching
# docker-manifests at L786-791. In auto-publish mode
# (create_edge_docker_tag=true), stable versions never get
# versioned git tags, only the rolling :edge Docker tag. Latent
# today because post-merge.yml filters stable SDK versions out
# of publish_other before calling publish.yml, but the symmetry
# with docker-manifests protects against future auto-publish
# callers that do not pre-filter and keeps the two matrices
# aligned on taggability rules.
if [ "$SHOULD_TAG" = "true" ] \
&& [ "$CREATE_EDGE_DOCKER_TAG" = "true" ] \
&& [ "$(scripts/extract-version.sh "$MATRIX_KEY" --is-pre-release)" != "true" ]; then
SHOULD_TAG=false
fi
{
echo "version=$VERSION"
echo "tag=$TAG"
echo "should_tag=$SHOULD_TAG"
} >> "$GITHUB_OUTPUT"
echo "✅ Resolved $MATRIX_KEY -> version=$VERSION tag=${TAG:-<none>} should_tag=$SHOULD_TAG"
# ─────────────────────────────────────────
# Python SDK Publishing
# ─────────────────────────────────────────
- name: Publish Python SDK
if: matrix.type == 'python'
uses: ./.github/actions/python-maturin/post-merge
with:
version: ${{ steps.ver.outputs.version }}
dry_run: ${{ inputs.dry_run }}
wheels_artifact: python-wheels-all
wheels_path: dist
# ─────────────────────────────────────────
# Node SDK Publishing
# ─────────────────────────────────────────
- name: Publish Node SDK
if: matrix.type == 'node'
uses: ./.github/actions/node-npm/post-merge
with:
version: ${{ steps.ver.outputs.version }}
dry_run: ${{ inputs.dry_run }}
# ─────────────────────────────────────────
# Java SDK Publishing
# ─────────────────────────────────────────
- name: Publish Java SDK
if: matrix.type == 'java'
uses: ./.github/actions/java-gradle/post-merge
with:
version: ${{ steps.ver.outputs.version }}
dry_run: ${{ inputs.dry_run }}
# ─────────────────────────────────────────
# C# SDK Publishing
# ─────────────────────────────────────────
- name: Publish C# SDK
if: matrix.type == 'csharp'
uses: ./.github/actions/csharp-dotnet/post-merge
with:
version: ${{ steps.ver.outputs.version }}
dry_run: ${{ inputs.dry_run }}
# ─────────────────────────────────────────
# Go Module (Tag-only)
# ─────────────────────────────────────────
- name: Prepare Go tag
if: matrix.type == 'go'
uses: ./.github/actions/go/post-merge
with:
version: ${{ steps.ver.outputs.version }}
dry_run: ${{ inputs.dry_run }}
# ─────────────────────────────────────────
# Wait gates: do not push the git tag until the registry is actually
# serving the new version. The Rust path already does this via
# wait-for-crate; mirroring it here closes the "artifact published,
# tag never created" rc1 failure class for foreign SDKs.
#
# Go has no registry wait: the tag IS the release (the Go proxy
# fetches from git on demand).
# ─────────────────────────────────────────
- name: Wait for PyPI availability
if: |
success() &&
inputs.dry_run == false &&
matrix.type == 'python' &&
steps.ver.outputs.should_tag == 'true'
uses: ./.github/actions/utils/wait-for-url
with:
url: https://pypi.org/pypi/apache-iggy/${{ steps.ver.outputs.version }}/json
description: apache-iggy ${{ steps.ver.outputs.version }} on PyPI
max_attempts: "15"
initial_sleep_seconds: "3"
- name: Wait for npm availability
if: |
success() &&
inputs.dry_run == false &&
matrix.type == 'node' &&
steps.ver.outputs.should_tag == 'true'
uses: ./.github/actions/utils/wait-for-url
with:
url: https://registry.npmjs.org/apache-iggy/${{ steps.ver.outputs.version }}
description: apache-iggy ${{ steps.ver.outputs.version }} on npm
max_attempts: "15"
initial_sleep_seconds: "3"
# Java publishes to ASF Nexus staging via `./gradlew publish`
# (repository.apache.org/service/local/staging/deploy/maven2). The
# staging -> Maven Central handoff requires a Nexus Close+Release
# action which in the Apache governance model is operator-driven
# (often behind a dev@ release vote). No in-tree automation performs
# that handoff today. wait-for-url here polls the downstream Maven
# Central mirror (repo1.maven.org), which will not serve the
# artifact until Close+Release completes.
#
# CRITICAL DESIGN CARVE-OUT — DO NOT REMOVE `continue-on-error: true`
# BELOW without reading this:
#
# This wait step is the ONLY wait gate in the publish chain that uses
# `continue-on-error: true`, and the `Tag SDK release` step further
# down uses `if: success()`. GitHub Actions step semantics:
# continue-on-error=true on a failing step
# -> step outcome = failure
# -> step conclusion = success
# -> downstream `if: success()` STILL evaluates true
# So when Maven Central has not caught up within the ~24-minute
# budget (Central propagation is frequently slow, rarely >45 min),
# the Java git tag is STILL pushed after `gradle publish` returned
# 0 at the staging step above. This matches pre-PR behavior (the
# Java tag was always pushed after `gradle publish` returned 0)
# and is INTENTIONAL: without the carve-out, every Java release
# would time-out at this wait and the operator would have to push
# the tag manually — a worse UX than the current best-effort
# shape, and still subject to the same invariant hole anyway.
#
# Follow-up to remove the carve-out: automate staging -> Central
# via `io.github.gradle-nexus.publish-plugin` with
# `closeAndReleaseStagingRepositories`, then drop the
# continue-on-error below and the Java path will match the
# PyPI/npm/NuGet "wait then tag" shape. This is how Kafka / Camel
# / Pulsar / Beam handle it. Blocked on confirming ASF governance
# allows automated promotion for iggy.
# TODO(#NNNN): track the gradle-nexus-publish-plugin adoption and
# drop `continue-on-error` from the step below once it lands.
- name: Wait for Maven Central availability
if: |
success() &&
inputs.dry_run == false &&
matrix.type == 'java' &&
steps.ver.outputs.should_tag == 'true'
continue-on-error: true
uses: ./.github/actions/utils/wait-for-url
with:
url: https://repo1.maven.org/maven2/org/apache/iggy/iggy/${{ steps.ver.outputs.version }}/iggy-${{ steps.ver.outputs.version }}.pom
description: org.apache.iggy:iggy:${{ steps.ver.outputs.version }} on Maven Central
max_attempts: "50"
initial_sleep_seconds: "5"
max_sleep_seconds: "30"
# NuGet v3 flat container URLs use lowercase package id and version;
# Apache.Iggy → apache.iggy.
- name: Wait for NuGet availability
if: |
success() &&
inputs.dry_run == false &&
matrix.type == 'csharp' &&
steps.ver.outputs.should_tag == 'true'
uses: ./.github/actions/utils/wait-for-url
with:
url: https://api.nuget.org/v3-flatcontainer/apache.iggy/${{ steps.ver.outputs.version }}/apache.iggy.${{ steps.ver.outputs.version }}.nupkg
description: Apache.Iggy ${{ steps.ver.outputs.version }} on NuGet
max_attempts: "20"
initial_sleep_seconds: "3"
# Inline per-row tagging: tightly couple the git tag to the publish that
# just succeeded for THIS matrix row. should_tag was computed by
# extract-version.sh and encodes both the SNAPSHOT skip (mutable Java
# releases) and the "no tag_pattern" skip in one place — keeping the
# rule symmetric with the docker-manifests job above. The wait gates
# above ensure the tag is never pushed before the registry is serving
# the artifact (Go is exempt: the tag IS the release).
- name: Tag SDK release (${{ matrix.key }})
if: |
success() &&
inputs.dry_run == false &&
inputs.skip_tag_creation == false &&
steps.ver.outputs.should_tag == 'true'
uses: ./.github/actions/utils/create-git-tag
with:
tag: ${{ steps.ver.outputs.tag }}
commit: ${{ needs.validate.outputs.commit }}
message: |
Release ${{ matrix.key }} ${{ steps.ver.outputs.version }}
Component: ${{ matrix.key }}
Tag: ${{ steps.ver.outputs.tag }}
Commit: ${{ needs.validate.outputs.commit }}
Released by: GitHub Actions (workflow ${{ github.run_id }})
- name: Set status output
id: status
if: always()
run: echo "status=${{ job.status }}" >> "$GITHUB_OUTPUT"
summary:
name: Publish Summary
needs:
[
validate,
plan,
check-tags,
build-python-wheels,
publish-rust-crates,
publish-docker,
docker-manifests,
publish,
]
if: always() && needs.validate.outputs.has_targets == 'true'
runs-on: ubuntu-latest
steps:
- name: Download latest copy script from master
if: inputs.use_latest_ci
run: |
curl -sSL "https://raw.githubusercontent.com/${{ github.repository }}/master/scripts/copy-latest-from-master.sh" \
-o /tmp/copy-latest-from-master.sh
chmod +x /tmp/copy-latest-from-master.sh
echo "✅ Downloaded latest copy script from master"
- uses: actions/checkout@v4
with:
ref: ${{ needs.validate.outputs.commit }}
- name: Save and apply latest CI from master
if: inputs.use_latest_ci
run: |
/tmp/copy-latest-from-master.sh save \
.github \
scripts
/tmp/copy-latest-from-master.sh apply
- name: Ensure version extractor is executable
run: |
test -x scripts/extract-version.sh || chmod +x scripts/extract-version.sh
- name: Generate summary
run: |
{
echo "# 📦 Publish Summary"
echo
echo "## Configuration"
echo
echo "| Setting | Value |"
echo "|---------|-------|"
echo "| **Commit** | \`${{ needs.validate.outputs.commit }}\` |"
echo "| **Dry run** | \`${{ inputs.dry_run }}\` |"
echo "| **Skip tag creation** | \`${{ inputs.skip_tag_creation }}\` |"
echo "| **Total components** | ${{ needs.plan.outputs.count }} |"
echo
# Extract version information for all requested components
echo "## Component Versions"
echo
echo "| Component | Version | Tag | Registry | Status |"
echo "|-----------|---------|-----|----------|--------|"
# Parse the targets from plan job
TARGETS_JSON='${{ needs.plan.outputs.targets }}'
echo "$TARGETS_JSON" | jq -r '.include[] | select(.key!="noop") | @base64' | while read -r row; do
_jq() { echo "$row" | base64 -d | jq -r "$1"; }
KEY=$(_jq '.key')
NAME=$(_jq '.name')
REGISTRY=$(_jq '.registry')
TAG_PATTERN=$(_jq '.tag_pattern')
VERSION=$(scripts/extract-version.sh "$KEY" 2>/dev/null || echo "N/A")
# Get tag if pattern exists
TAG=""
if [ -n "$TAG_PATTERN" ] && [ "$TAG_PATTERN" != "null" ]; then
TAG=$(scripts/extract-version.sh "$KEY" --tag 2>/dev/null || echo "N/A")
else
TAG="N/A"
fi
# Determine status emoji based on dry run
if [ "${{ inputs.dry_run }}" = "true" ]; then
STATUS="🔍 Dry run"
else
STATUS="✅ Published"
fi
# Format registry display
case "$REGISTRY" in
crates) REGISTRY_DISPLAY="crates.io" ;;
dockerhub) REGISTRY_DISPLAY="Docker Hub" ;;
pypi) REGISTRY_DISPLAY="PyPI" ;;
npm) REGISTRY_DISPLAY="npm" ;;
maven) REGISTRY_DISPLAY="Maven" ;;
nuget) REGISTRY_DISPLAY="NuGet" ;;
none) REGISTRY_DISPLAY="Tag only" ;;
*) REGISTRY_DISPLAY="$REGISTRY" ;;
esac
echo "| $NAME | \`$VERSION\` | \`$TAG\` | $REGISTRY_DISPLAY | $STATUS |"
done
echo
if [ -n "${{ inputs.publish_crates }}" ]; then
echo "### 🦀 Rust Crates Requested"
echo '```'
echo "${{ inputs.publish_crates }}"
echo '```'
fi
if [ -n "${{ inputs.publish_dockerhub }}" ]; then
echo "### 🐳 Docker Images Requested"
echo '```'
echo "${{ inputs.publish_dockerhub }}"
echo '```'
fi
if [ -n "${{ inputs.publish_other }}" ]; then
echo "### 📦 Other SDKs Requested"
echo '```'
echo "${{ inputs.publish_other }}"
echo '```'
fi
echo
echo "## Results"
echo
# Python wheels building status
if [ "${{ needs.plan.outputs.has_python }}" = "true" ]; then
echo "### Python Wheels Building"
case "${{ needs.build-python-wheels.result }}" in
success) echo "✅ **Python wheels built successfully for all platforms**" ;;
failure) echo "❌ **Python wheel building failed**" ;;
skipped) echo "⏭️ **Python wheel building was skipped**" ;;
esac
echo
fi
# Rust crates publishing status
if [ -n "${{ inputs.publish_crates }}" ]; then
echo "### Rust Crates Publishing (Sequential)"
case "${{ needs.publish-rust-crates.result }}" in
success) echo "✅ **Rust crates published successfully in dependency order**" ;;
failure) echo "❌ **Rust crates publishing failed - check logs for details**" ;;
skipped) echo "⏭️ **Rust crates publishing was skipped**" ;;
esac
echo
fi
# Docker publishing status
if [ "${{ needs.plan.outputs.has_docker }}" = "true" ]; then
echo "### Docker Publishing (Native Runners)"
case "${{ needs.publish-docker.result }}" in
success) echo "✅ **Docker images built and pushed successfully on native runners**" ;;
failure) echo "❌ **Docker image publishing failed - check logs for details**" ;;
skipped) echo "⏭️ **Docker publishing was skipped**" ;;
esac
echo
echo "### Docker Manifests"
case "${{ needs.docker-manifests.result }}" in
success) echo "✅ **Multi-arch manifests created successfully**" ;;
failure) echo "❌ **Manifest creation failed - check logs for details**" ;;
skipped) echo "⏭️ **Manifest creation was skipped**" ;;
esac
echo
fi
# SDK publishing status
echo "### SDK Publishing"
case "${{ needs.publish.result }}" in
success) echo "✅ **SDK publishing completed successfully**" ;;
failure) echo "❌ **SDK publishing failed - check logs for details**" ;;
cancelled) echo "🚫 **SDK publishing was cancelled**" ;;
*) echo "⏭️ **SDK publishing was skipped**" ;;
esac
if [ "${{ inputs.dry_run }}" = "true" ]; then
echo
echo "**ℹ️ This was a dry run - no actual publishing occurred**"
elif [ "${{ inputs.skip_tag_creation }}" = "true" ]; then
echo
echo "**ℹ️ Tag creation was skipped as requested**"
else
echo
echo "**ℹ️ Git tags are created inline with each successful publish; see individual job logs for tag status**"
fi
echo
echo "---"
echo "*Workflow completed at $(date -u +"%Y-%m-%d %H:%M:%S UTC")*"
} >> "$GITHUB_STEP_SUMMARY"
notify-failure:
name: Notify on failure
needs:
[
validate,
plan,
build-python-wheels,
publish-rust-crates,
publish-docker,
docker-manifests,
publish,
summary,
]
if: failure() && inputs.dry_run == false
runs-on: ubuntu-latest
steps:
- name: Notify failure
run: |
echo "❌ Publishing workflow failed!"
echo "Check the workflow run for details: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"