test: add golden tests for a partial class template specialization on… #4236
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Continuous Integration | |
| on: | |
| push: | |
| branches: | |
| - develop | |
| - master | |
| - '*' | |
| tags: | |
| - "v*.*.*" | |
| # pull_request runs the matrix/build on the PR head with the fork-scoped token | |
| # (no comment perms on base repo). | |
| pull_request: | |
| branches: | |
| - develop | |
| concurrency: | |
| group: ${{format('{0}:{1}', github.repository, github.ref)}} | |
| cancel-in-progress: true | |
| jobs: | |
| cpp-matrix: | |
| runs-on: ubuntu-24.04 | |
| container: | |
| image: ubuntu:24.04 | |
| name: Generate Test Matrix | |
| outputs: | |
| matrix: ${{ steps.cpp-matrix.outputs.matrix }} | |
| llvm-matrix: ${{ steps.llvm-matrix.outputs.llvm-matrix }} | |
| releases-matrix: ${{ steps.releases-matrix.outputs.releases-matrix }} | |
| steps: | |
| - name: Install prerequisites | |
| run: | | |
| set -e | |
| apt-get update | |
| apt-get install -y git ca-certificates curl nodejs npm | |
| if ! command -v node >/dev/null 2>&1 && command -v nodejs >/dev/null 2>&1; then | |
| ln -s /usr/bin/nodejs /usr/bin/node | |
| fi | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| - name: Generate Test Matrix | |
| uses: alandefreitas/cpp-actions/cpp-matrix@v1.9.2 | |
| id: cpp-matrix | |
| with: | |
| compilers: | | |
| gcc >=14 | |
| clang >=18 | |
| msvc >=14.40 | |
| apple-clang * | |
| standards: '23' | |
| latest-factors: | | |
| gcc UBSan | |
| clang UBSan ASan MSan Coverage | |
| apple-clang UBSan ASan | |
| factors: '' | |
| runs-on: | | |
| apple-clang: macos-15 | |
| containers: | | |
| clang: ubuntu:24.04 | |
| build-types: | | |
| gcc: Release | |
| clang: Release | |
| apple-clang: Release | |
| msvc: RelWithDebInfo | |
| install: | | |
| gcc: git cmake ninja-build build-essential pkg-config python3 curl unzip openjdk-11-jdk-headless libncurses-dev libxml2-utils libxml2-dev | |
| clang: git cmake ninja-build build-essential pkg-config python3 curl unzip openjdk-11-jdk-headless libncurses-dev libxml2-utils libxml2-dev libstdc++-14-dev | |
| clang Coverage: git cmake ninja-build build-essential pkg-config python3 curl unzip openjdk-11-jdk-headless libncurses-dev libxml2-utils libxml2-dev libstdc++-14-dev elfutils llvm-21-tools | |
| msvc: '' | |
| extra-values: | | |
| # libc++ runtimes: clang + (ASan or MSan) needs instrumented libc++ | |
| # built separately from the main LLVM build | |
| use-libcxx: {{#if (and (ieq compiler 'clang') (or msan asan)) }}true{{else}}false{{/if}} | |
| libcxx-runtimes: libcxx{{#if (ne compiler 'msvc')}};libcxxabi{{/if}} | |
| llvm-runtimes: {{#if (ine use-libcxx 'true') }}{{{ libcxx-runtimes }}}{{/if}} | |
| # LLVM build configuration | |
| llvm-hash: dc4cef81d47c7bc4a3c4d58fbacf8a6359683fae | |
| llvm-build-preset-prefix: {{{lowercase build-type}}} | |
| llvm-build-preset-os: {{#if (ieq os 'windows') }}win{{else}}unix{{/if}} | |
| llvm-build-preset: {{{ llvm-build-preset-prefix }}}-{{{ llvm-build-preset-os }}} | |
| # LLVM cache key: encodes hash, build type, OS, and sanitizer | |
| # so each distinct LLVM configuration gets its own cache entry. | |
| # Clang + (ASan or MSan) appends compiler version and sanitizer | |
| # because those builds include instrumented libc++ runtimes. | |
| llvm-os-key: {{#if container}}{{{ replace (replace (lowercase container) ":" "-") "." "-" }}}{{else}}{{{ replace (lowercase runs-on) "." "-" }}}{{/if}} | |
| llvm-archive-sanitizer-str: {{#if (ieq compiler 'clang')}}{{#if ubsan}}ubsan{{else if asan}}asan{{else if msan}}msan{{/if}}{{/if}} | |
| llvm-archive-basename: llvm-{{{ substr llvm-hash 0 7 }}}-{{{ llvm-build-preset-prefix }}}-{{{ llvm-os-key }}}{{#if (and (ieq compiler 'clang') (or msan asan)) }}-{{{ compiler }}}-{{{ version }}}-{{{ llvm-archive-sanitizer-str }}}{{/if}} | |
| # LLVM archive (for the llvm-releases job that uploads pre-built LLVM) | |
| llvm-archive-extension: {{#if (ieq os 'windows') }}7z{{else}}tar.bz2{{/if}} | |
| llvm-archive-filename: {{{ llvm-archive-basename }}}.{{{ llvm-archive-extension }}} | |
| # Maps sanitizer factor to LLVM_USE_SANITIZER cmake value | |
| llvm-sanitizer-config: {{#if (and (ne compiler 'clang') (ne compiler 'apple-clang'))}}{{else if asan}}Address{{else if msan}}MemoryWithOrigins{{/if}} | |
| # Compiler and linker flags passed to the MrDocs build (not deps). | |
| # common-flags-base: clang-specific flags (e.g. -gz=zstd for compression) | |
| # common-flags: adds MSan origin tracking on top of base | |
| # mrdocs-flags: warnings-as-errors, static linking (gcc), coverage (clang) | |
| warning-flags: {{#if (eq compiler 'msvc') }}/WX /W4 {{else}}-Werror -Wall {{/if}} | |
| common-flags-base: {{#if (ieq compiler 'clang')}}-gz=zstd {{/if}} | |
| common-flags: {{{ common-flags-base }}}{{#if msan }}-fsanitize-memory-track-origins {{/if}} | |
| common-ccflags: {{{ ccflags }}} {{{ common-flags }}} | |
| mrdocs-flags: {{{ warning-flags }}}{{#if (and (eq compiler 'gcc') (not asan)) }}-static {{/if}}{{#if (and (ieq compiler 'clang') coverage)}}-fprofile-instr-generate -fcoverage-mapping {{/if}} | |
| mrdocs-ccflags: {{{ common-ccflags }}} {{{ mrdocs-flags }}} | |
| # Packaging and release configuration | |
| mrdocs-package-generators: {{#if (ieq os 'windows') }}7Z ZIP WIX{{else}}TGZ TXZ{{/if}} | |
| mrdocs-release-package-artifact: release-packages-{{{ lowercase os }}} | |
| # Bottleneck builds: skip expensive tests to save CI time | |
| is-bottleneck: {{#if (or msan (and (ieq compiler 'apple-clang') (or asan ubsan))) }}true{{/if}} | |
| # Sanitizer name for bootstrap's --sanitizer flag | |
| bootstrap-sanitizer: {{#if asan}}address{{else if msan}}memory{{else if ubsan}}undefined{{else if tsan}}thread{{/if}} | |
| output-file: matrix.json | |
| trace-commands: true | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| # Set up the version as expected by the LLVM matrix script and @actions/core | |
| - name: Setup Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: 20 | |
| - name: Generate LLVM Test Matrix | |
| id: llvm-matrix | |
| run: | | |
| set -x | |
| cd .github | |
| npm ci | |
| cd .. | |
| node .github/llvm-matrix.js | |
| - name: Generate Releases Test Matrix | |
| id: releases-matrix | |
| run: | | |
| set -x | |
| cd .github | |
| npm ci | |
| cd .. | |
| node .github/releases-matrix.js | |
| build: | |
| needs: cpp-matrix | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: ${{ fromJSON(needs.cpp-matrix.outputs.matrix) }} | |
| defaults: | |
| run: | |
| shell: bash | |
| name: ${{ matrix.name }} | |
| runs-on: ${{ matrix.runs-on }} | |
| container: ${{ matrix.container }} | |
| env: ${{ matrix.env }} | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Add LLVM apt repository | |
| if: matrix.coverage | |
| run: | | |
| apt-get update && apt-get install -y --no-install-recommends ca-certificates wget gnupg lsb-release | |
| wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc | |
| codename=$(lsb_release -cs) | |
| echo "deb http://apt.llvm.org/$codename/ llvm-toolchain-$codename-21 main" >> /etc/apt/sources.list | |
| - name: Install System Packages | |
| uses: alandefreitas/cpp-actions/package-install@v1.9.2 | |
| if: matrix.install != '' | |
| id: package-install | |
| env: | |
| DEBIAN_FRONTEND: 'noninteractive' | |
| TZ: 'Etc/UTC' | |
| with: | |
| apt-get: ${{ matrix.install }} | |
| - name: Clone MrDocs | |
| uses: actions/checkout@v4 | |
| - name: Configure Git Safe Directory | |
| if: matrix.container != '' | |
| run: git config --global --add safe.directory "$(pwd)" | |
| - name: Setup C++ | |
| uses: alandefreitas/cpp-actions/setup-cpp@v1.9.2 | |
| id: setup-cpp | |
| with: | |
| compiler: ${{ matrix.compiler }} | |
| version: ${{ matrix.version }} | |
| - name: Configure symbolizer paths | |
| if: matrix.compiler != 'msvc' | |
| shell: bash | |
| run: | | |
| set -e | |
| candidates=() | |
| # 1) Anything on PATH | |
| if command -v llvm-symbolizer >/dev/null 2>&1; then | |
| candidates+=("$(command -v llvm-symbolizer)") | |
| fi | |
| uname_out="$(uname -s || true)" | |
| # 2) Platform-specific common locations | |
| case "$uname_out" in | |
| Darwin) | |
| if xcrun --find llvm-symbolizer >/dev/null 2>&1; then | |
| candidates+=("$(xcrun --find llvm-symbolizer)") | |
| fi | |
| candidates+=("/opt/homebrew/opt/llvm/bin/llvm-symbolizer") | |
| ;; | |
| Linux) | |
| for dir in /usr/lib/llvm-* /usr/lib/llvm; do | |
| if [ -x "$dir/bin/llvm-symbolizer" ]; then | |
| candidates+=("$dir/bin/llvm-symbolizer") | |
| fi | |
| done | |
| ;; | |
| MINGW*|MSYS*|CYGWIN*) | |
| for dir in "/c/Program Files/LLVM/bin" "/c/ProgramData/chocolatey/lib/llvm/tools/llvm/bin"; do | |
| if [ -x "$dir/llvm-symbolizer.exe" ]; then | |
| candidates+=("$dir/llvm-symbolizer.exe") | |
| fi | |
| done | |
| ;; | |
| esac | |
| sym="" | |
| for c in "${candidates[@]}"; do | |
| if [ -n "$c" ] && [ -x "$c" ]; then | |
| sym="$c" | |
| break | |
| fi | |
| done | |
| if [ -n "$sym" ]; then | |
| echo "Using llvm-symbolizer at: $sym" | |
| echo "LLVM_SYMBOLIZER_PATH=$sym" >> "$GITHUB_ENV" | |
| echo "ASAN_SYMBOLIZER_PATH=$sym" >> "$GITHUB_ENV" | |
| else | |
| echo "Warning: llvm-symbolizer not found; ASan stacks may be unsymbolized." >&2 | |
| fi | |
| - name: Select Xcode 16.4 | |
| if: matrix.compiler == 'apple-clang' | |
| run: | | |
| set -x | |
| sudo ls -1 /Applications | grep Xcode | |
| sudo xcode-select -s /Applications/Xcode_16.4.app/Contents/Developer | |
| ${{ steps.setup-cpp.outputs.cxx }} -v | |
| ${{ steps.setup-cpp.outputs.cxx }} --print-targets | |
| ${{ steps.setup-cpp.outputs.cxx }} --print-target-triple | |
| # Compute absolute paths and the LLVM cache key. | |
| # Paths depend on the runner's working directory, so they can't be | |
| # set in the matrix template. The cache key is generated by | |
| # bootstrap so the recipe file is the single source of truth for | |
| # the LLVM revision. | |
| - name: Resolve absolute paths | |
| id: paths | |
| run: | | |
| set -euvx | |
| third_party_dir="$(realpath $(pwd)/..)/third-party" | |
| if [[ "${{ runner.os }}" == 'Windows' ]]; then | |
| third_party_dir="$(echo "$third_party_dir" | sed 's/\\/\//g; s|^/d/|D:/|')" | |
| fi | |
| echo "third-party-dir=$third_party_dir" >> $GITHUB_OUTPUT | |
| echo "llvm-path=$third_party_dir/llvm" >> $GITHUB_OUTPUT | |
| - name: Cached LLVM Binaries | |
| id: llvm-cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ steps.paths.outputs.llvm-path }} | |
| key: ${{ matrix.llvm-archive-basename }} | |
| # Bootstrap handles all dependency installation: LLVM, libc++, | |
| # JerryScript, Lua, Boost.Mp11, Boost.Describe, and libxml2 (MSVC only). | |
| # When LLVM is restored from cache, bootstrap detects the existing | |
| # install directory and skips rebuilding it. | |
| # The --env-file flag writes computed _ROOT paths and flags (libc++, | |
| # sanitizer ldflags) to a file that is sourced into GITHUB_ENV so | |
| # downstream steps use bootstrap as the single source of truth. | |
| - name: Install Dependencies via Bootstrap | |
| env: | |
| PYTHONIOENCODING: utf-8 | |
| run: | | |
| set -eux | |
| python=$(command -v python3 || command -v python) | |
| args=( | |
| --yes | |
| --skip-build --no-build-tests --no-run-configs | |
| --build-type "${{ matrix.build-type }}" | |
| --cc "${{ steps.setup-cpp.outputs.cc || matrix.cc }}" | |
| --cxx "${{ steps.setup-cpp.outputs.cxx || matrix.cxx }}" | |
| # Install deps to the CI cache path so actions/cache can save/restore them | |
| --cache-dir "${{ steps.paths.outputs.third-party-dir }}" | |
| # Write _ROOT paths and flags for GITHUB_ENV injection | |
| --env-file bootstrap-env.txt | |
| ) | |
| # Pass sanitizer flag when a sanitizer is active | |
| if [[ -n "${{ matrix.bootstrap-sanitizer }}" ]]; then | |
| args+=(--sanitizer "${{ matrix.bootstrap-sanitizer }}") | |
| fi | |
| # Pass through common compiler flags for dependency builds | |
| # (e.g. -gz=zstd for clang, -fsanitize-memory-track-origins for msan) | |
| common_flags="${{ matrix.common-flags }}" | |
| if [[ -n "$common_flags" ]]; then | |
| args+=(--cflags "$common_flags" --cxxflags "$common_flags") | |
| fi | |
| # On non-Windows, exclude libxml2 (system libxml2-dev is used instead) | |
| if [[ "${{ runner.os }}" != 'Windows' ]]; then | |
| args+=(--recipe-filter llvm,boost_mp11,boost_describe,jerryscript,lua) | |
| fi | |
| "$python" bootstrap.py "${args[@]}" | |
| # Source bootstrap-computed paths and flags into GITHUB_ENV | |
| if [[ -f bootstrap-env.txt ]]; then | |
| cat bootstrap-env.txt >> "$GITHUB_ENV" | |
| echo "--- bootstrap-env.txt ---" | |
| cat bootstrap-env.txt | |
| fi | |
| - name: Install Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '20' | |
| # Bootstrap's --env-file writes _ROOT paths and computed flags | |
| # (libc++, sanitizer) into GITHUB_ENV. The cmake-workflow step | |
| # references them via ${{ env.* }} so bootstrap is the single | |
| # source of truth for dependency locations and link flags. | |
| - name: CMake Workflow | |
| uses: alandefreitas/cpp-actions/cmake-workflow@v1.9.2 | |
| env: | |
| LLVM_PROFILE_FILE: mrdocs-%b-%m.profraw | |
| with: | |
| cmake-version: '>=3.26' | |
| cxxstd: ${{ matrix.cxxstd }} | |
| cc: ${{ steps.setup-cpp.outputs.cc || matrix.cc }} | |
| ccflags: ${{ matrix.mrdocs-ccflags }} | |
| cxx: ${{ steps.setup-cpp.outputs.cxx || matrix.cxx }} | |
| cxxflags: ${{ matrix.mrdocs-ccflags }} ${{ env.BOOTSTRAP_CXXFLAGS }} | |
| generator: Ninja | |
| toolchain: ${{ steps.package-install.outputs.vcpkg_toolchain || steps.package-install.outputs.vcpkg-toolchain }} | |
| build-type: ${{ matrix.build-type }} | |
| install-prefix: .local | |
| extra-args: | | |
| -D MRDOCS_BUILD_DOCS=OFF | |
| -D MRDOCS_EXPENSIVE_TESTS=${{ matrix.is-bottleneck && 'OFF' || 'ON' }} | |
| -D CMAKE_EXE_LINKER_FLAGS="${{ env.BOOTSTRAP_LDFLAGS }}" | |
| -D LLVM_ROOT="${{ env.LLVM_ROOT }}" | |
| -D jerryscript_ROOT="${{ env.jerryscript_ROOT }}" | |
| -D LUA_ROOT="${{ env.Lua_ROOT }}" | |
| -D Lua_ROOT="${{ env.Lua_ROOT }}" | |
| -D lua_ROOT="${{ env.Lua_ROOT }}" | |
| -D boost_mp11_ROOT="${{ env.boost_mp11_ROOT }}" | |
| -D boost_describe_ROOT="${{ env.boost_describe_ROOT }}" | |
| ${{ env.LibXml2_ROOT && format('-D LibXml2_ROOT="{0}"', env.LibXml2_ROOT) || '' }} | |
| export-compile-commands: true | |
| run-tests: true | |
| install: true | |
| package: ${{ matrix.is-main }} | |
| package-dir: packages | |
| package-generators: ${{ matrix.mrdocs-package-generators }} | |
| package-artifact: false | |
| ctest-timeout: 9000 | |
| - name: Upload GitHub Release Artifacts | |
| if: ${{ matrix.is-main && matrix.compiler != 'clang' }} | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: ${{ matrix.mrdocs-release-package-artifact }} | |
| path: | | |
| build/packages | |
| !build/packages/_CPack_Packages | |
| retention-days: 1 | |
| - name: FlameGraph | |
| uses: alandefreitas/cpp-actions/flamegraph@v1.9.2 | |
| if: matrix.time-trace | |
| with: | |
| build-dir: build | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Codecov | |
| id: codecov | |
| if: matrix.coverage | |
| run: | | |
| set -euvx | |
| cd build | |
| build_id=$(eu-readelf -n bin/mrdocs-test | awk '/Build ID:/{print $NF}') | |
| llvm-profdata-${{ matrix.major }} merge -sparse mrdocs-${build_id}-*.profraw -o default.profdata | |
| llvm-cov-${{ matrix.major }} export -format=lcov -instr-profile=default.profdata bin/mrdocs-test > "mrdocs-test.raw.info" | |
| # Strip lines annotated with LCOV_EXCL_LINE / LCOV_EXCL_START..STOP | |
| # from the LCOV data, since llvm-cov doesn't process these markers. | |
| python3 -c " | |
| import re, sys | |
| excl_lines = {} | |
| for path in set(re.findall(r'^SF:(.+)$', open('mrdocs-test.raw.info', errors='replace').read(), re.M)): | |
| try: | |
| src = open(path, errors='replace').readlines() | |
| except OSError: | |
| continue | |
| skip = False | |
| for i, line in enumerate(src, 1): | |
| if 'LCOV_EXCL_STOP' in line: | |
| skip = False | |
| if skip or 'LCOV_EXCL_LINE' in line: | |
| excl_lines.setdefault(path, set()).add(i) | |
| if 'LCOV_EXCL_START' in line: | |
| skip = True | |
| cur_sf = None | |
| for line in open('mrdocs-test.raw.info', errors='replace'): | |
| if line.startswith('SF:'): | |
| cur_sf = line[3:].strip() | |
| if line.startswith('DA:'): | |
| lineno = int(line.split(',')[0][3:]) | |
| if cur_sf in excl_lines and lineno in excl_lines[cur_sf]: | |
| continue | |
| if line.startswith('BRDA:'): | |
| lineno = int(line.split(',')[0][5:]) | |
| if cur_sf in excl_lines and lineno in excl_lines[cur_sf]: | |
| continue | |
| sys.stdout.write(line) | |
| " > "mrdocs-test.info" | |
| echo "file=$(realpath "mrdocs-test.info")" >> $GITHUB_OUTPUT | |
| - name: Upload Coverage as Artifact | |
| if: matrix.coverage | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: Coverage | |
| path: ${{ steps.codecov.outputs.file }} | |
| retention-days: 30 | |
| - name: Codecov Upload | |
| uses: codecov/codecov-action@v5 | |
| if: matrix.coverage | |
| with: | |
| fail_ci_if_error: true | |
| files: ${{ steps.codecov.outputs.file }} | |
| flags: cpp | |
| disable_search: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| verbose: true | |
| # Utility checks that only need to run once, not per compiler. | |
| # Tests bootstrap Python code, Danger.js rules, and YAML schema. | |
| utility-tests: | |
| runs-on: ubuntu-24.04 | |
| name: Utility Tests | |
| steps: | |
| - name: Clone MrDocs | |
| uses: actions/checkout@v4 | |
| - name: Install Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '20' | |
| - name: Bootstrap Python Tests | |
| run: | | |
| set -eux | |
| pip install --quiet coverage | |
| python3 -m coverage run -m unittest discover -s util/bootstrap/tests/ | |
| python3 -m coverage report | |
| python3 -m coverage report --fail-under=84 | |
| python3 -m coverage xml -o bootstrap-coverage.xml | |
| - name: Upload Bootstrap Coverage to Codecov | |
| uses: codecov/codecov-action@v5 | |
| with: | |
| files: bootstrap-coverage.xml | |
| flags: bootstrap | |
| fail_ci_if_error: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| verbose: true | |
| - name: Danger.js Tests | |
| working-directory: util/danger | |
| run: | | |
| npm ci --ignore-scripts | |
| npx vitest run | |
| - name: Check YAML schema | |
| run: | | |
| python3 ./util/generate-yaml-schema.py --check | |
| npx -y -p ajv-cli -- ajv compile -s docs/mrdocs.schema.json | |
| - name: Verify snippet .cpp files match golden tests | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| shopt -s nullglob | |
| SRC="docs/website/snippets" | |
| DST="test-files/golden-tests/snippets" | |
| [[ -d "$SRC" ]] || { echo "Source directory not found: $SRC"; exit 2; } | |
| [[ -d "$DST" ]] || { echo "Destination directory not found: $DST"; exit 2; } | |
| missing=() | |
| mismatched=() | |
| while IFS= read -r -d '' src; do | |
| rel="${src#$SRC/}" | |
| dst="$DST/$rel" | |
| if [[ ! -f "$dst" ]]; then | |
| missing+=("$rel") | |
| continue | |
| fi | |
| if ! git diff --no-index --ignore-cr-at-eol --quiet -- "$src" "$dst"; then | |
| mismatched+=("$rel") | |
| fi | |
| done < <(find "$SRC" -type f -name '*.cpp' -print0) | |
| if (( ${#missing[@]} || ${#mismatched[@]} )); then | |
| if (( ${#missing[@]} )); then | |
| echo "Missing corresponding golden files:" | |
| printf ' %s\n' "${missing[@]}" | |
| fi | |
| if (( ${#mismatched[@]} )); then | |
| echo "Content mismatches:" | |
| printf ' %s\n' "${mismatched[@]}" | |
| fi | |
| exit 1 | |
| fi | |
| echo "All snippet .cpp files are present and match." | |
| releases: | |
| needs: [ cpp-matrix, build ] | |
| if: ${{ needs.cpp-matrix.outputs.releases-matrix != '[]' && needs.cpp-matrix.outputs.releases-matrix != '' }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: ${{ fromJSON(needs.cpp-matrix.outputs.releases-matrix) }} | |
| defaults: | |
| run: | |
| shell: bash | |
| name: ${{ matrix.os }} MrDocs Releases | |
| runs-on: ${{ matrix.runs-on }} | |
| container: ${{ matrix.container }} | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Resolve absolute paths and cache key | |
| id: paths | |
| run: | | |
| set -euvx | |
| third_party_dir="$(realpath $(pwd)/..)/third-party" | |
| if [[ "${{ runner.os }}" == 'Windows' ]]; then | |
| third_party_dir="$(echo "$third_party_dir" | sed 's/\\/\//g; s|^/d/|D:/|')" | |
| fi | |
| echo "third-party-dir=$third_party_dir" >> $GITHUB_OUTPUT | |
| echo "llvm-path=$third_party_dir/llvm" >> $GITHUB_OUTPUT | |
| - name: Ensure Node | |
| if: matrix.container != '' && env.ACT == 'true' | |
| run: | | |
| set -e | |
| apt-get update | |
| apt-get install -y nodejs npm | |
| if ! command -v node >/dev/null 2>&1 && command -v nodejs >/dev/null 2>&1; then | |
| ln -s /usr/bin/nodejs /usr/bin/node | |
| fi | |
| - name: Install packages | |
| uses: alandefreitas/cpp-actions/package-install@v1.9.2 | |
| id: package-install | |
| with: | |
| apt-get: build-essential asciidoctor cmake bzip2 git rsync | |
| - name: Clone MrDocs | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| fetch-tags: true | |
| - name: Set Repository Ownership | |
| run: | | |
| git config --global --add safe.directory "$(pwd)" | |
| - name: Install Node.js | |
| uses: actions/setup-node@v4 | |
| with: | |
| node-version: '18' | |
| - name: Setup Ninja | |
| uses: seanmiddleditch/gha-setup-ninja@v5 | |
| if: ${{ runner.os == 'Windows' }} | |
| - name: Setup C++ | |
| uses: alandefreitas/cpp-actions/setup-cpp@v1.9.2 | |
| id: setup-cpp | |
| with: | |
| compiler: ${{ matrix.compiler }} | |
| version: ${{ matrix.version }} | |
| - name: Cached LLVM Binaries | |
| id: llvm-cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ steps.paths.outputs.llvm-path }} | |
| key: ${{ matrix.llvm-archive-basename }} | |
| fail-on-cache-miss: true | |
| - name: Download MrDocs package | |
| uses: actions/download-artifact@v4 | |
| with: | |
| name: ${{ matrix.mrdocs-release-package-artifact }} | |
| path: packages | |
| - name: Install MrDocs from Package | |
| run: | | |
| set -euvx | |
| # Delete packages/_CPack_Packages files from previous runs | |
| rm -rf packages/_CPack_Packages | |
| # Print tree structure | |
| find packages -print | sed 's;[^/]*/;|____;g;s;____|; |;g' | |
| dest_dir="${{ steps.paths.outputs.llvm-path }}" | |
| if [[ ${{ runner.os }} != 'Windows' ]]; then | |
| find packages -maxdepth 1 -name 'MrDocs-*.tar.gz' -exec tar -vxzf {} -C $dest_dir --strip-components=1 \; | |
| else | |
| package=$(find packages -maxdepth 1 -name "MrDocs-*.7z" -print -quit) | |
| filename=$(basename "$package") | |
| name="${filename%.*}" | |
| 7z x "${package}" -o${dest_dir} | |
| set +e | |
| robocopy "${dest_dir}/${name}" "${dest_dir}" //move //e //np //nfl | |
| exit_code=$? | |
| set -e | |
| if (( exit_code >= 8 )); then | |
| exit 1 | |
| fi | |
| fi | |
| MRDOCS_ROOT="$dest_dir" | |
| echo -e "MRDOCS_ROOT=$MRDOCS_ROOT" >> $GITHUB_ENV | |
| echo -e "$MRDOCS_ROOT/bin" >> $GITHUB_PATH | |
| $MRDOCS_ROOT/bin/mrdocs --version | |
| - name: Clone Boost.URL | |
| uses: alandefreitas/cpp-actions/boost-clone@v1.9.2 | |
| id: boost-url-clone | |
| with: | |
| branch: develop | |
| modules: url | |
| boost-dir: boost | |
| modules-scan-paths: '"test example"' | |
| modules-exclude-paths: '' | |
| trace-commands: true | |
| - name: Set up llvm-symbolizer | |
| if: ${{ runner.os != 'Windows' }} | |
| run: | | |
| set -x | |
| if [[ $RUNNER_OS == 'macOS' ]]; then | |
| # Step 1: Check if llvm-symbolizer is installed | |
| if ! command -v llvm-symbolizer &> /dev/null; then | |
| echo "llvm-symbolizer is not installed. Installing via Homebrew..." | |
| # Step 2: Install llvm if not installed | |
| if command -v brew &> /dev/null; then | |
| brew install llvm | |
| else | |
| echo "Homebrew is not installed. Please install Homebrew first: https://brew.sh/" | |
| exit 1 | |
| fi | |
| fi | |
| # Step 3: Ensure llvm-symbolizer is in your PATH | |
| llvm_bin_path=$(brew --prefix)/opt/llvm/bin | |
| PATH="$PATH:$llvm_bin_path" | |
| LLVM_SYMBOLIZER_PATH=$(which llvm-symbolizer) | |
| if [ -z "$LLVM_SYMBOLIZER_PATH" ]; then | |
| echo "llvm-symbolizer installation failed or it's not in the PATH." | |
| exit 1 | |
| else | |
| echo "llvm-symbolizer found at: $LLVM_SYMBOLIZER_PATH" | |
| fi | |
| elif [[ $RUNNER_OS == 'Linux' ]]; then | |
| # Step 1: Check if llvm-symbolizer is installed | |
| if ! command -v llvm-symbolizer &> /dev/null; then | |
| echo "llvm-symbolizer is not installed. Installing via apt-get..." | |
| apt-get update | |
| apt-get install -y llvm | |
| fi | |
| # Step 2: Ensure llvm-symbolizer is in your PATH | |
| LLVM_SYMBOLIZER_PATH=$(which llvm-symbolizer) | |
| if [ -z "$LLVM_SYMBOLIZER_PATH" ]; then | |
| echo "llvm-symbolizer installation failed or it's not in the PATH." | |
| exit 1 | |
| else | |
| echo "llvm-symbolizer found at: $LLVM_SYMBOLIZER_PATH" | |
| fi | |
| else | |
| echo "Unsupported OS: $RUNNER_OS" | |
| exit 1 | |
| fi | |
| # Step 4: Export LLVM_SYMBOLIZER_PATH environment variable | |
| export LLVM_SYMBOLIZER_PATH="$LLVM_SYMBOLIZER_PATH" | |
| echo -e "LLVM_SYMBOLIZER_PATH=$LLVM_SYMBOLIZER_PATH" >> $GITHUB_ENV | |
| echo "Environment variable LLVM_SYMBOLIZER_PATH set to: $LLVM_SYMBOLIZER_PATH" | |
| - name: Generate Landing Page | |
| working-directory: docs/website | |
| run: | | |
| npm ci | |
| node render.js | |
| mkdir -p ../../build/website | |
| cp index.html ../../build/website/index.html | |
| cp robots.txt ../../build/website/robots.txt | |
| cp styles.css ../../build/website/styles.css | |
| cp -r assets ../../build/website/assets | |
| - name: Generate Antora UI | |
| working-directory: docs/ui | |
| run: | | |
| # This playbook renders the documentation | |
| # content for the website. It includes | |
| # master, develop, and tags. | |
| GH_TOKEN="${{ secrets.GITHUB_TOKEN }}" | |
| export GH_TOKEN | |
| npm ci | |
| npx gulp lint | |
| npx gulp | |
| # Website publishing gate: | |
| # - Only publish on pushes to master/develop and on tags | |
| # - Only on Linux runners (the publish steps assume GNU tooling) | |
| # | |
| # Use `if: env.PUBLISH_WEBSITE == 'true'` for all steps that | |
| # write into `build/website/` and get deployed to the website. | |
| - name: Set website publish gate | |
| run: | | |
| is_publish_ref='false' | |
| if [[ "${{ github.event_name }}" == 'push' ]]; then | |
| if [[ "${{ github.ref_name }}" == 'master' || "${{ github.ref_name }}" == 'develop' ]]; then | |
| is_publish_ref='true' | |
| fi | |
| if [[ "${{ github.ref }}" == refs/tags/* ]]; then | |
| is_publish_ref='true' | |
| fi | |
| fi | |
| publish_website="$is_publish_ref" | |
| if [[ "${{ runner.os }}" != 'Linux' ]]; then | |
| publish_website='false' | |
| fi | |
| { | |
| echo "IS_PUBLISH_REF=$is_publish_ref" | |
| echo "PUBLISH_WEBSITE=$publish_website" | |
| } >> "$GITHUB_ENV" | |
| - name: Ensure all refs for Antora | |
| if: env.PUBLISH_WEBSITE == 'true' | |
| run: | | |
| set -euo pipefail | |
| # Make sure Antora sees every branch and tag from the upstream repo, | |
| # regardless of who triggered the workflow. | |
| git remote set-url origin https://github.com/cppalliance/mrdocs.git | |
| git fetch --prune --prune-tags origin \ | |
| '+refs/heads/*:refs/remotes/origin/*' \ | |
| '+refs/tags/*:refs/tags/*' | |
| - name: Generate Remote Documentation | |
| # This step fetches and builds develop, master and all tags. That's | |
| # unrelated to a PR, and is only needed for website publishing. So, skip | |
| # it for a PR. | |
| if: github.event_name != 'pull_request' | |
| working-directory: docs | |
| run: | | |
| # This playbook renders the documentation | |
| # content for the website. It includes | |
| # master, develop, and tags. | |
| GH_TOKEN="${{ secrets.GITHUB_TOKEN }}" | |
| export GH_TOKEN | |
| set -x | |
| npm ci | |
| npx antora --clean --fetch antora-playbook.yml --log-level=debug | |
| mkdir -p ../build/website/docs | |
| cp -vr build/site/* ../build/website/docs | |
| - name: Upload Website as Artifact | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: Website ${{ runner.os }} | |
| path: build/website | |
| retention-days: 30 | |
| - name: Generate Local Documentation | |
| working-directory: docs | |
| run: | | |
| # This playbook allows us to render the | |
| # documentation content and visualize it | |
| # before a workflow that pushes to the | |
| # website is triggered. | |
| GH_TOKEN="${{ secrets.GITHUB_TOKEN }}" | |
| export GH_TOKEN | |
| set -x | |
| npm ci | |
| npx antora antora-playbook.yml --attribute branchesarray=HEAD --stacktrace --log-level=debug | |
| mkdir -p ../build/docs-local | |
| cp -vr build/site/* ../build/docs-local | |
| - name: Clone Beman.Optional | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: steve-downey/optional | |
| ref: main | |
| path: beman-optional | |
| - name: Clone Fmt | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: fmtlib/fmt | |
| ref: main | |
| path: fmt | |
| - name: Clone Nlohmann.Json | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: nlohmann/json | |
| ref: develop | |
| path: nlohmann-json | |
| - name: Clone MpUnits | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: mpusz/mp-units | |
| ref: master | |
| path: mp-units | |
| - name: Patch Demo Projects | |
| shell: bash | |
| run: | | |
| set -euo pipefail | |
| set -x | |
| for project in beman-optional fmt nlohmann-json mp-units; do | |
| src="./examples/third-party/$project" | |
| dst="./$project" | |
| [ -d "$src" ] || { echo "Source not found: $src" >&2; exit 1; } | |
| mkdir -p "$dst" | |
| # Mirror contents of $src into $dst, overwriting existing files | |
| tar -C "$src" -cf - . | tar -C "$dst" -xpf - | |
| done | |
| - name: Generate Demos | |
| run: | | |
| set -x | |
| declare -a generators=( | |
| "adoc" | |
| ${{ github.event_name != 'pull_request' && '"xml" | |
| "html"' || '' }} | |
| ) | |
| demo_failures="" | |
| # Generate the demos for each variant and generator | |
| for variant in single multi; do | |
| for generator in "${generators[@]}"; do | |
| [[ $generator = xml && $variant = multi ]] && continue | |
| [[ $variant = multi ]] && multipage="true" || multipage="false" | |
| for project_args in \ | |
| "boost-url|$(pwd)/boost/libs/url/doc/mrdocs.yml|../CMakeLists.txt" \ | |
| "beman-optional|$(pwd)/beman-optional/docs/mrdocs.yml|" \ | |
| "nlohmann-json|$(pwd)/nlohmann-json/docs/mrdocs.yml|" \ | |
| "mp-units|$(pwd)/mp-units/docs/mrdocs.yml|" \ | |
| "fmt|$(pwd)/fmt/doc/mrdocs.yml|" \ | |
| "mrdocs|$(pwd)/docs/mrdocs.yml|$(pwd)/CMakeLists.txt" \ | |
| ; do | |
| IFS='|' read -r project config extra <<< "$project_args" | |
| outdir="$(pwd)/demos/$project/$variant/$generator" | |
| cmd=(mrdocs --config="$config" $extra --output="$outdir" --multipage=$multipage --generator="$generator" --log-level=debug) | |
| if ! "${cmd[@]}"; then | |
| echo "FAILED: $project/$variant/$generator" | |
| demo_failures="$demo_failures $project/$variant/$generator\n ${cmd[*]}\n" | |
| rm -rf "$outdir" | |
| fi | |
| done | |
| done | |
| # Render the asciidoc files to html using asciidoctor | |
| if [[ ${{ runner.os }} == 'Linux' ]]; then | |
| for project in boost-url beman-optional mrdocs fmt nlohmann-json mp-units; do | |
| root="$(pwd)/demos/$project/$variant" | |
| src="$root/adoc" | |
| dst="$root/adoc-asciidoc" | |
| stylesheet="$(pwd)/share/mrdocs/addons/generator/common/layouts/style.css" | |
| # Skip if adoc generation failed for this project | |
| [[ -d "$src" ]] || continue | |
| # Create the top-level output dir | |
| mkdir -p "$dst" | |
| # Find every .adoc (recursively), mirror the directory structure, and render | |
| find "$src" -type f -name '*.adoc' -print0 | | |
| while IFS= read -r -d '' f; do | |
| rel="${f#"$src/"}" # path relative to $src | |
| outdir="$dst/$(dirname "$rel")" # mirror subdir inside $dst | |
| mkdir -p "$outdir" | |
| asciidoctor -a stylesheet="${stylesheet}" -D "$outdir" "$f" | |
| done | |
| done | |
| fi | |
| done | |
| # Compress demos for the artifact | |
| tar -cjf $(pwd)/demos.tar.gz -C $(pwd)/demos --strip-components 1 . | |
| echo "demos_path=$(pwd)/demos.tar.gz" >> $GITHUB_ENV | |
| if [[ -n "$demo_failures" ]]; then | |
| echo "The following demos failed:" | |
| printf "$demo_failures" | |
| exit 1 | |
| fi | |
| - name: Upload Demos as Artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: demos${{ (contains(fromJSON('["master", "develop"]'), github.ref_name ) && format('-{0}', github.ref_name)) || '' }}-${{ runner.os }} | |
| path: demos.tar.gz | |
| # develop and master are retained for longer so that they can be compared | |
| retention-days: ${{ contains(fromJSON('["master", "develop"]'), github.ref_name) && '30' || '1' }} | |
| - name: Download Previous Demos | |
| if: startsWith(github.ref, 'refs/tags/') && runner.os == 'Linux' | |
| id: download-prev-demos | |
| uses: actions/download-artifact@v4 | |
| continue-on-error: true | |
| with: | |
| name: demos-develop-${{ runner.os }} | |
| path: demos-previous | |
| - name: Compare demos | |
| if: startsWith(github.ref, 'refs/tags/') && steps.download-prev-demos.outputs.cache-hit == 'true' && runner.os == 'Linux' | |
| id: compare-demos | |
| run: | | |
| set -x | |
| # Define URLs and directories | |
| LOCAL_DEMOS_DIR="./demos/" | |
| PREV_DEMOS_DIR="./demos-previous/" | |
| DIFF_DIR="./demos-diff/" | |
| # Check if PREV_DEMOS_DIR exists and is not empty | |
| if [[ ! -d $PREV_DEMOS_DIR || -z $(ls -A $PREV_DEMOS_DIR) ]]; then | |
| echo "No previous demos found." | |
| echo "diff=false" >> $GITHUB_OUTPUT | |
| exit 0 | |
| fi | |
| # Create directories if they don't exist | |
| mkdir -p $PREV_DEMOS_DIR $DIFF_DIR | |
| # Iterate over the previous files and compare them with the corresponding local files | |
| find $PREV_DEMOS_DIR -type f | while read previous_file; do | |
| # Derive the corresponding local file path | |
| local_file="${LOCAL_DEMOS_DIR}${previous_file#$PREV_DEMOS_DIR}" | |
| diff_output="$DIFF_DIR${previous_file#$PREV_DEMOS_DIR}" | |
| if [[ -f $local_file ]]; then | |
| mkdir -p "$(dirname "$diff_output")" | |
| diff "$previous_file" "$local_file" > "$diff_output" | |
| if [[ ! -s $diff_output ]]; then | |
| rm "$diff_output" | |
| fi | |
| else | |
| echo "LOCAL FILE $local_file DOES NOT EXITS." > "$diff_output" | |
| echo "PREVIOUS CONTENT OF THE FILE WAS:" >> "$diff_output" | |
| cat "$previous_file" >> "$diff_output" | |
| fi | |
| done | |
| # Iterate over the local files to find new files | |
| find $LOCAL_DEMOS_DIR -type f | while read local_file; do | |
| previous_file="${PREV_DEMOS_DIR}${local_file#$LOCAL_DEMOS_DIR}" | |
| diff_output="$DIFF_DIR${local_file#$LOCAL_DEMOS_DIR}" | |
| if [[ ! -f $previous_file ]]; then | |
| echo "PREVIOUS $previous_file DOES NOT EXIST." > "$diff_output" | |
| echo "IT HAS BEEN INCLUDED IN THIS VERSION." >> "$diff_output" | |
| echo "NEW CONTENT OF THE FILE IS:" >> "$diff_output" | |
| fi | |
| done | |
| # Check if the diff directory is empty | |
| if [[ -z $(ls -A $DIFF_DIR) ]]; then | |
| echo "No differences found." | |
| # Store this as an output for the next step | |
| echo "diff=false" >> $GITHUB_OUTPUT | |
| else | |
| # Calculate number of files in the diff directory | |
| N_FILES=$(find $DIFF_DIR -type f | wc -l) | |
| echo "Differences found in $N_FILES output files." | |
| echo "diff=true" >> $GITHUB_OUTPUT | |
| fi | |
| - name: Upload Demo Diff as Artifacts | |
| if: startsWith(github.ref, 'refs/tags/') && steps.download-prev-demos.outputs.cache-hit == 'true' && steps.compare-demos.outputs.diff == 'true' && runner.os == 'Linux' | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: demos-diff | |
| path: demos-diff | |
| retention-days: 30 | |
| - name: Publish Website to GitHub Pages | |
| if: env.PUBLISH_WEBSITE == 'true' | |
| uses: peaceiris/actions-gh-pages@v3 | |
| with: | |
| github_token: ${{ secrets.GITHUB_TOKEN }} | |
| publish_dir: build/website | |
| force_orphan: true | |
| - name: Publish website | |
| if: env.PUBLISH_WEBSITE == 'true' | |
| env: | |
| SSH_AUTH_SOCK: /tmp/ssh_agent.sock | |
| run: | | |
| set -euvx | |
| # Add SSH key | |
| mkdir -p /home/runner/.ssh | |
| ssh-keyscan dev-websites.cpp.al >> /home/runner/.ssh/known_hosts | |
| chmod 600 /home/runner/.ssh/known_hosts | |
| echo "${{ secrets.DEV_WEBSITES_SSH_KEY }}" > /home/runner/.ssh/github_actions | |
| chmod 600 /home/runner/.ssh/github_actions | |
| ssh-agent -a $SSH_AUTH_SOCK > /dev/null | |
| ssh-add /home/runner/.ssh/github_actions | |
| rsyncopts=(--recursive --delete --links --times --chmod=D0755,F0755 --compress --compress-choice=zstd --rsh="ssh -o StrictHostKeyChecking=no" --human-readable) | |
| website_dir="ubuntu@dev-websites.cpp.al:/var/www/mrdox.com" | |
| demo_dir="$website_dir/demos/${{ github.ref_name }}" | |
| # Copy files: This step will copy the landing page and the documentation to www.mrdocs.com | |
| time rsync "${rsyncopts[@]}" --exclude=llvm+clang/ --exclude=demos/ --exclude=roadmap/ $(pwd)/build/website/ "$website_dir"/ | |
| # Copy demos: This step will copy the demos to www.mrdocs.com/demos | |
| time rsync "${rsyncopts[@]}" $(pwd)/demos/ "$demo_dir"/ | |
| - name: Create changelog | |
| uses: alandefreitas/cpp-actions/create-changelog@v1.9.2 | |
| with: | |
| output-path: CHANGELOG.md | |
| thank-non-regular: ${{ startsWith(github.ref, 'refs/tags/') }} | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| limit: 150 | |
| update-summary: ${{ runner.os == 'Linux' && 'true' || 'false' }} | |
| - name: Create GitHub Package Release | |
| if: env.IS_PUBLISH_REF == 'true' | |
| uses: softprops/action-gh-release@v2 | |
| with: | |
| files: packages/MrDocs-*.*.*-*.* | |
| fail_on_unmatched_files: true | |
| name: ${{ github.ref_name || github.ref }} | |
| tag_name: ${{ github.ref_name || github.ref }}${{ ((!startsWith(github.ref, 'refs/tags/')) && '-release') || '' }} | |
| body_path: CHANGELOG.md | |
| prerelease: false | |
| draft: false | |
| token: ${{ github.token }} | |
| llvm-releases: | |
| needs: [ cpp-matrix, build ] | |
| if: ${{ needs.cpp-matrix.outputs.llvm-matrix != '[]' && needs.cpp-matrix.outputs.llvm-matrix != '' }} | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: ${{ fromJSON(needs.cpp-matrix.outputs.llvm-matrix) }} | |
| defaults: | |
| run: | |
| shell: bash | |
| name: ${{ matrix.name }} LLVM Release | |
| runs-on: ${{ matrix.runs-on }} | |
| container: ${{ matrix.container }} | |
| env: ${{ matrix.env }} | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Ensure Node | |
| if: matrix.container != '' && env.ACT == 'true' | |
| run: | | |
| set -e | |
| apt-get update | |
| apt-get install -y nodejs npm | |
| if ! command -v node >/dev/null 2>&1 && command -v nodejs >/dev/null 2>&1; then | |
| ln -s /usr/bin/nodejs /usr/bin/node | |
| fi | |
| - name: Resolve absolute paths and cache key | |
| id: paths | |
| run: | | |
| set -euvx | |
| third_party_dir=$(realpath $(pwd)/..)/third-party | |
| if [[ "${{ runner.os }}" == 'Windows' ]]; then | |
| third_party_dir=$(echo "$third_party_dir" | sed 's/\\/\//g; s|^/d/|D:/|') | |
| fi | |
| echo "third-party-dir=$third_party_dir" >> $GITHUB_OUTPUT | |
| echo "llvm-path=$third_party_dir/llvm" >> $GITHUB_OUTPUT | |
| - name: Install packages | |
| uses: alandefreitas/cpp-actions/package-install@v1.9.2 | |
| id: package-install | |
| with: | |
| apt-get: ${{ matrix.install }} | |
| - name: LLVM Binaries | |
| id: llvm-cache | |
| uses: actions/cache@v4 | |
| with: | |
| path: ${{ steps.paths.outputs.llvm-path }} | |
| key: ${{ matrix.llvm-archive-basename }} |