1- # # !/bin/bash
1+ #! /bin/bash
22
33# *******************************************************************************
44# Copyright 2024-2025 Arm Limited and affiliates.
2323# The logic in this script should mirror the upstream build pipelines as closely
2424# as possible, along with Tool-Solutions specific changes we want to test (e.g.
2525# installing tbb) or improving local development in a way that doesn't affect
26- # the result (e,.g. unsetting MAX_JOBS). Currently the upstream logic is defined
27- # in pytorch/.github/workflows/_binary-build-linux.yml and
28- # pytorch/.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
26+ # the result. Currently the upstream logic is defined in
27+ # pytorch/.github/workflows/_binary-build-linux.yml
28+ # and
29+ # pytorch/.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
2930
3031set -eux -o pipefail
3132
3233PYTHON_VERSION=" 3.10"
3334OPENBLAS_VERSION=" v0.3.30"
3435ACL_VERSION=" v52.6.0"
3536
37+ PYTHON_TAG=" cp$( echo " $PYTHON_VERSION " | tr -d .) -cp$( echo " $PYTHON_VERSION " | tr -d .) "
38+ PYTHON_BIN=" /opt/python/${PYTHON_TAG} /bin"
39+
3640# Specify DOCKER_IMAGE_MIRROR if you want to use a mirror of hub.docker.com
3741IMAGE_NAME=" ${DOCKER_IMAGE_MIRROR:- } pytorch/manylinux2_28_aarch64-builder:cpu-aarch64-d8be0384e085f551506bd739678109fa0f5ee7ac"
3842TORCH_BUILD_CONTAINER_ID_FILE=" ${PWD} /.torch_build_container_id"
@@ -42,22 +46,21 @@ OUTPUT_DIR=${OUTPUT_DIR:-"${PWD}/results"}
4246
4347# Where folders sit locally
4448PYTORCH_HOST_DIR=" ${PWD} /pytorch"
45- ACL_HOST_DIR=" ${PWD} /ComputeLibrary"
4649OPENSSL_HOST_DIR=" /opt/openssl"
47- PYTORCH_FINAL_PACKAGE_DIR =" ${OUTPUT_DIR} "
50+ PYTORCH_FINAL_PACKAGE_HOST_DIR =" ${OUTPUT_DIR} "
4851
4952# Where folders sit mounted in the container
5053PYTORCH_ROOT=/pytorch
51- ACL_ROOT=/acl
5254UTILS=/utils
5355COMMON_UTILS=/common_utils
56+ PYTORCH_FINAL_PACKAGE_DIR=/artifacts
5457
5558# Want a CPU build
5659DESIRED_CUDA=cpu
5760GPU_ARCH_TYPE=cpu-aarch64
5861
5962# Affects the number of jobs used in install_acl.sh and install_openblas.sh
60- NPROC =${NPROC :- $(nproc --ignore=2)}
63+ MAX_JOBS =${MAX_JOBS :- $(nproc --ignore=2)}
6164
6265if [ -f " $TORCH_BUILD_CONTAINER_ID_FILE " ]; then
6366 TORCH_BUILD_CONTAINER=$( cat $TORCH_BUILD_CONTAINER_ID_FILE )
@@ -71,7 +74,7 @@ if ! docker container inspect $TORCH_BUILD_CONTAINER >/dev/null 2>&1 ; then
7174 # Based on environment used in pytorch/.github/workflows/_binary-build-linux.yml
7275 # and pytorch/.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
7376 TORCH_BUILD_CONTAINER=$( docker run -t -d \
74- -e NPROC =${NPROC } \
77+ -e MAX_JOBS =${MAX_JOBS } \
7578 -e OPENBLAS_VERSION=${OPENBLAS_VERSION} \
7679 -e ACL_VERSION=${ACL_VERSION} \
7780 -e BINARY_ENV_FILE=/tmp/env \
@@ -84,12 +87,10 @@ if ! docker container inspect $TORCH_BUILD_CONTAINER >/dev/null 2>&1 ; then
8487 -e PYTORCH_FINAL_PACKAGE_DIR=" ${PYTORCH_FINAL_PACKAGE_DIR} " \
8588 -e PYTORCH_ROOT=" ${PYTORCH_ROOT} " \
8689 -e SKIP_ALL_TESTS=1 \
87- -e PYTORCH_EXTRA_INSTALL_REQUIREMENTS="nvidia-cuda-nvrtc-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-runtime-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cuda-cupti-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cudnn-cu12==8.9.2.26; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cublas-cu12==12.1.3.1; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cufft-cu12==11.0.2.54; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-curand-cu12==10.3.2.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusolver-cu12==11.4.5.107; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-cusparse-cu12==12.1.0.106; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nccl-cu12==2.19.3; platform_system == 'Linux' and platform_machine == 'x86_64' | nvidia-nvtx-cu12==12.1.105; platform_system == 'Linux' and platform_machine == 'x86_64'" \
8890 -e OPENSSL_ROOT_DIR=" ${OPENSSL_HOST_DIR} " \
8991 -e CMAKE_INCLUDE_PATH=" ${OPENSSL_HOST_DIR} /include" \
9092 -v " ${PYTORCH_HOST_DIR} :${PYTORCH_ROOT} " \
91- -v " ${PYTORCH_FINAL_PACKAGE_DIR} :/artifacts" \
92- -v " ${ACL_HOST_DIR} :${ACL_ROOT} " \
93+ -v " ${PYTORCH_FINAL_PACKAGE_HOST_DIR} :${PYTORCH_FINAL_PACKAGE_DIR} " \
9394 -v " ${PWD} /utils:${UTILS} " \
9495 -v " ${PWD} /../utils:${COMMON_UTILS} " \
9596 -w / \
@@ -98,36 +99,44 @@ if ! docker container inspect $TORCH_BUILD_CONTAINER >/dev/null 2>&1 ; then
9899 # Currently changes in these scripts will not be applied without a clean
99100 # build, which is not ideal for dev work. But we have to balance this with
100101 # extra time/network traffic when rebuilding many times.
101- docker exec -t $TORCH_BUILD_CONTAINER bash -c " $PYTORCH_ROOT /.circleci/scripts/binary_populate_env.sh"
102- docker exec -t $TORCH_BUILD_CONTAINER bash -c " $PYTORCH_ROOT /.ci/aarch64_linux/aarch64_ci_setup.sh"
102+ docker exec $TORCH_BUILD_CONTAINER bash " ${PYTORCH_ROOT} /.circleci/scripts/binary_populate_env.sh"
103+
104+ # Install scons for ACL build
105+ docker exec $TORCH_BUILD_CONTAINER ${PYTHON_BIN} /python3 -m pip install scons==4.7.0
106+ docker exec $TORCH_BUILD_CONTAINER ln -sf ${PYTHON_BIN} /scons /usr/local/bin
103107
104- # This must be in this if block because it cannot handle being called twice
105- docker exec -t $TORCH_BUILD_CONTAINER bash -c " bash $PYTORCH_ROOT /.ci/docker/common/install_acl.sh "
106- docker exec -t $TORCH_BUILD_CONTAINER bash -c " bash $ PYTORCH_ROOT /.ci/docker/common/install_openblas .sh"
108+ # Affected by ACL_VERSION set as an environment variable above
109+ echo " Overriding Arm Compute Library version: ${ACL_VERSION} "
110+ docker exec " $TORCH_BUILD_CONTAINER " " ${ PYTORCH_ROOT} /.ci/docker/common/install_acl .sh"
107111
108- echo " Storing torch build container id in $TORCH_BUILD_CONTAINER_ID_FILE for reuse: $TORCH_BUILD_CONTAINER "
109- echo $TORCH_BUILD_CONTAINER > " $TORCH_BUILD_CONTAINER_ID_FILE "
112+ # Affected by OPENBLAS_VERSION set as an environment variable above
113+ echo " Installing OpenBLAS version: ${OPENBLAS_VERSION} "
114+ docker exec " $TORCH_BUILD_CONTAINER " " ${PYTORCH_ROOT} /.ci/docker/common/install_openblas.sh"
115+
116+ echo " Storing torch build container ID in ${TORCH_BUILD_CONTAINER_ID_FILE} for reuse: ${TORCH_BUILD_CONTAINER} "
117+ echo $TORCH_BUILD_CONTAINER > " ${TORCH_BUILD_CONTAINER_ID_FILE} "
110118else
111119 docker restart $TORCH_BUILD_CONTAINER
112120fi
113121
114122# If there are multiple wheels in the dist directory, an old wheel can be
115123# erroneously copied to results, so we clear the directory to be sure
116- docker exec -t $TORCH_BUILD_CONTAINER bash -c " rm -rf $ PYTORCH_ROOT /dist"
124+ docker exec $TORCH_BUILD_CONTAINER rm -rf " ${ PYTORCH_ROOT} /dist"
117125
118126# We set OVERRIDE_PACKAGE_VERSION to be based on the date of the latest torch
119127# commit, this allows us to also install the matching torch* packages, set in
120128# the Dockerfile. This is what PyTorch does in its nightly pipeline, see
121129# pytorch/.ci/aarch64_linux/aarch64_wheel_ci_build.py for this logic.
122130build_date=$( cd $PYTORCH_HOST_DIR && git log --pretty=format:%cs -1 | tr -d ' -' )
123- version=$( cat $PYTORCH_HOST_DIR /version.txt| tr -d [:space:] )
131+ version=$( cat $PYTORCH_HOST_DIR /version.txt| tr -d " [:space:]" )
124132OVERRIDE_PACKAGE_VERSION=" ${version% ??} .dev${build_date}${TORCH_RELEASE_ID: +" +$TORCH_RELEASE_ID " } "
125- # We unset MAX_JOBS from 12 (written to /tmp/env by populate_binary_env.sh) to
126- # let any downstream functions to decide. Currently nproc when ninja is used,
127- # but ideally we would let ninja make the right choice, but nproc is good enough
128- docker exec -t $TORCH_BUILD_CONTAINER \
129- bash -c " source /tmp/env && unset MAX_JOBS && OVERRIDE_PACKAGE_VERSION=$OVERRIDE_PACKAGE_VERSION bash $PYTORCH_ROOT /.ci/aarch64_linux/aarch64_ci_build.sh"
133+
134+ docker exec $TORCH_BUILD_CONTAINER bash -lc "
135+ source /tmp/env &&
136+ BUILD_TEST=0 \
137+ OVERRIDE_PACKAGE_VERSION=$OVERRIDE_PACKAGE_VERSION \
138+ bash ${PYTORCH_ROOT} /.ci/manywheel/build.sh
139+ "
130140
131141# directories generated by the docker container are owned by root, so transfer ownership to user
132- docker exec $TORCH_BUILD_CONTAINER chown -R $( id -u) :$( id -g) $PYTORCH_ROOT
133- docker exec $TORCH_BUILD_CONTAINER chown -R $( id -u) :$( id -g) /artifacts
142+ docker exec $TORCH_BUILD_CONTAINER chown -R " $( id -u) " :" $( id -g) " " ${PYTORCH_ROOT} " /artifacts
0 commit comments