1717
1818set -eux -o pipefail
1919
20+ docker_exec () {
21+ docker exec " $TORCH_BUILD_CONTAINER " " $@ "
22+ }
23+
2024PYTHON_VERSION=" 3.12"
2125OPENBLAS_VERSION=" v0.3.30"
2226ACL_VERSION=" v52.8.0"
2327
24- PYTHON_TAG=" cp$( echo " $PYTHON_VERSION " | tr -d .) -cp$( echo " $PYTHON_VERSION " | tr -d .) "
25- PYTHON_BIN=" /opt/python/${PYTHON_TAG} /bin"
26-
2728# Specify DOCKER_IMAGE_MIRROR if you want to use a mirror of hub.docker.com
2829IMAGE_NAME=" ${DOCKER_IMAGE_MIRROR:- } pytorch/manylinux2_28_aarch64-builder:cpu-aarch64-69d4c1f80b5e7da224d4f9c2170ef100e75dfe03"
2930TORCH_BUILD_CONTAINER_ID_FILE=" ${PWD} /.torch_build_container_id"
3031
31- # Output dir for PyTorch wheel and other artifacts
32- OUTPUT_DIR=${OUTPUT_DIR:- " ${PWD} /results" }
32+ PYTHON_TAG=" cp$( echo " $PYTHON_VERSION " | tr -d .) -cp$( echo " $PYTHON_VERSION " | tr -d .) "
33+ PYTHON_CONTAINER_BIN=" /opt/python/${PYTHON_TAG} /bin"
34+
35+ # Output dir for PyTorch wheel and other artifacts. Rename with the "_LOCAL_DIR"
36+ # suffix for consistency with other variables
37+ OUTPUT_LOCAL_DIR=" ${OUTPUT_DIR:- " ${PWD} /results" } "
3338
3439# Where folders sit locally
35- PYTORCH_HOST_DIR=" ${PWD} /pytorch"
36- OPENSSL_HOST_DIR=" /opt/openssl"
37- PYTORCH_FINAL_PACKAGE_HOST_DIR=" ${OUTPUT_DIR} "
40+ PYTORCH_LOCAL_DIR=" ${PWD} /pytorch"
41+ PYTORCH_FINAL_PACKAGE_LOCAL_DIR=" ${OUTPUT_LOCAL_DIR} "
3842
39- # Where folders sit mounted in the container
40- PYTORCH_ROOT=/pytorch
41- UTILS=/utils
42- COMMON_UTILS=/common_utils
43- PYTORCH_FINAL_PACKAGE_DIR=/artifacts
43+ # Where folders sit in the container
44+ PYTORCH_CONTAINER_DIR=/pytorch
45+ PYTORCH_FINAL_PACKAGE_CONTAINER_DIR=/artifacts
46+ OPENSSL_CONTAINER_DIR=/opt/openssl
4447
4548# Enable ccache support by default.
4649# NOTE: The default behaviour is to have a project-specific cache directory that we cache
4750# build artefacts inside and can be easily wiped. These build artefacts are specific to the
4851# manylinux builder container (and thus compilers) that we use to build the torch wheel. As
4952# such, you may not want to populate the global ccache cache with them. However, if you wish
50- # to do so, simply set CCACHE_HOST_DIR to that directory.
51- CCACHE_HOST_DIR= ${CCACHE_HOST_DIR :- " ${PWD} /.ccache" }
52- LOCAL_CCACHE_DIR =/.ccache
53+ # to do so, simply set CCACHE_LOCAL_DIR to that directory.
54+ CCACHE_LOCAL_DIR= " ${CCACHE_LOCAL_DIR :- " ${PWD} /.ccache" }"
55+ CCACHE_CONTAINER_DIR =/.ccache
5356CCACHE_MAXSIZE=${CCACHE_MAXSIZE:- }
5457
5558# If the user wants to use ccache for build caching
@@ -58,10 +61,10 @@ if [[ "$*" == *--disable-ccache* ]]; then
5861 ccache_args+=(-e USE_CCACHE=0)
5962else
6063 ccache_args+=(-e USE_CCACHE=1)
61- mkdir -p " ${CCACHE_HOST_DIR } "
64+ mkdir -p " ${CCACHE_LOCAL_DIR } "
6265 ccache_args+=(
63- -e CCACHE_DIR=" ${LOCAL_CCACHE_DIR } "
64- -v " ${CCACHE_HOST_DIR } :${LOCAL_CCACHE_DIR } "
66+ -e CCACHE_DIR=" ${CCACHE_CONTAINER_DIR } "
67+ -v " ${CCACHE_LOCAL_DIR } :${CCACHE_CONTAINER_DIR } "
6568 )
6669fi
6770
@@ -73,14 +76,14 @@ GPU_ARCH_TYPE=cpu-aarch64
7376MAX_JOBS=${MAX_JOBS:- $(nproc --ignore=2)}
7477
7578if [ -f " $TORCH_BUILD_CONTAINER_ID_FILE " ]; then
76- TORCH_BUILD_CONTAINER=$( cat $TORCH_BUILD_CONTAINER_ID_FILE )
79+ TORCH_BUILD_CONTAINER=$( cat " $TORCH_BUILD_CONTAINER_ID_FILE " )
7780 echo " Found an existing torch build container id: $TORCH_BUILD_CONTAINER "
7881else
7982 TORCH_BUILD_CONTAINER=" "
8083 echo " Did not find torch build container id in $( readlink -f $TORCH_BUILD_CONTAINER_ID_FILE ) , we will create one later"
8184fi
8285
83- if ! docker container inspect $TORCH_BUILD_CONTAINER > /dev/null 2>&1 ; then
86+ if ! docker container inspect " $TORCH_BUILD_CONTAINER " > /dev/null 2>&1 ; then
8487 # Based on environment used in pytorch/.github/workflows/_binary-build-linux.yml
8588 # and pytorch/.github/workflows/generated-linux-aarch64-binary-manywheel-nightly.yml
8689 TORCH_BUILD_CONTAINER=$( docker run -t -d \
@@ -94,79 +97,80 @@ if ! docker container inspect $TORCH_BUILD_CONTAINER >/dev/null 2>&1 ; then
9497 -e GITHUB_ACTIONS=0 \
9598 -e GPU_ARCH_TYPE=${GPU_ARCH_TYPE} \
9699 -e PACKAGE_TYPE=manywheel \
97- -e PYTORCH_FINAL_PACKAGE_DIR=" ${PYTORCH_FINAL_PACKAGE_DIR } " \
98- -e PYTORCH_ROOT=" ${PYTORCH_ROOT } " \
100+ -e PYTORCH_FINAL_PACKAGE_DIR=" ${PYTORCH_FINAL_PACKAGE_CONTAINER_DIR } " \
101+ -e PYTORCH_ROOT=" ${PYTORCH_CONTAINER_DIR } " \
99102 -e SKIP_ALL_TESTS=1 \
100- -e OPENSSL_ROOT_DIR=" ${OPENSSL_HOST_DIR } " \
101- -e CMAKE_INCLUDE_PATH=" ${OPENSSL_HOST_DIR } /include" \
103+ -e OPENSSL_ROOT_DIR=" ${OPENSSL_CONTAINER_DIR } " \
104+ -e CMAKE_INCLUDE_PATH=" ${OPENSSL_CONTAINER_DIR } /include" \
102105 " ${ccache_args[@]} " \
103- -v " ${PYTORCH_HOST_DIR} :${PYTORCH_ROOT} " \
104- -v " ${PYTORCH_FINAL_PACKAGE_HOST_DIR} :${PYTORCH_FINAL_PACKAGE_DIR} " \
105- -v " ${PWD} /utils:${UTILS} " \
106- -v " ${PWD} /../utils:${COMMON_UTILS} " \
106+ -v " ${PYTORCH_LOCAL_DIR} :${PYTORCH_CONTAINER_DIR} " \
107+ -v " ${PYTORCH_FINAL_PACKAGE_LOCAL_DIR} :${PYTORCH_FINAL_PACKAGE_CONTAINER_DIR} " \
107108 -w / \
108109 " ${IMAGE_NAME} " )
109110
110111 # Provide ccache support
111112 if [[ " $* " != * --disable-ccache* ]]; then
112- docker exec " $TORCH_BUILD_CONTAINER " yum install -y ccache || true
113+ docker_exec yum install -y ccache || true
113114 if [ -n " ${CCACHE_MAXSIZE} " ]; then
114- docker exec " $TORCH_BUILD_CONTAINER " ccache --max-size=" $CCACHE_MAXSIZE " || true
115+ docker_exec ccache --max-size=" $CCACHE_MAXSIZE " || true
115116 fi
116- docker exec " $TORCH_BUILD_CONTAINER " ccache -z || true
117- docker exec " $TORCH_BUILD_CONTAINER " ccache -o compression=true || true
118- docker exec " $TORCH_BUILD_CONTAINER " ccache -o compression_level=6 || true
119- docker exec " $TORCH_BUILD_CONTAINER " ccache -s || true
117+ docker_exec ccache -z || true
118+ docker_exec ccache -o compression=true || true
119+ docker_exec ccache -o compression_level=6 || true
120+ docker_exec ccache -s || true
120121 fi
121122
122123 # Currently changes in these scripts will not be applied without a clean
123124 # build, which is not ideal for dev work. But we have to balance this with
124125 # extra time/network traffic when rebuilding many times.
125- docker exec $TORCH_BUILD_CONTAINER bash " ${PYTORCH_ROOT } /.circleci/scripts/binary_populate_env.sh"
126+ docker_exec bash " ${PYTORCH_CONTAINER_DIR } /.circleci/scripts/binary_populate_env.sh"
126127
127- # Install scons for ACL build
128- docker exec $TORCH_BUILD_CONTAINER ${PYTHON_BIN } /python3 -m pip install scons==4.7.0
129- docker exec $TORCH_BUILD_CONTAINER ln -sf ${PYTHON_BIN } /scons /usr/local/bin
128+ # Install scons for the Compute Library ( ACL) build
129+ docker_exec ${PYTHON_CONTAINER_BIN } /python3 -m pip install scons==4.7.0
130+ docker_exec ln -sf " ${PYTHON_CONTAINER_BIN } /scons" /usr/local/bin
130131
131- # The Docker image comes with a pre-built version of ACL, but we want to build our own
132- # so we remove the provided version here
133- docker exec " $TORCH_BUILD_CONTAINER " rm -rf /acl
132+ # The Docker image comes with a pre-built version of ACL, but we
133+ # want to build our own so we remove the provided version here
134+ docker_exec rm -rf /acl
134135
135136 # Affected by ACL_VERSION set as an environment variable above
136137 echo " Overriding Arm Compute Library version: ${ACL_VERSION} "
137- docker exec " $TORCH_BUILD_CONTAINER " " ${PYTORCH_ROOT } /.ci/docker/common/install_acl.sh"
138+ docker_exec " ${PYTORCH_CONTAINER_DIR } /.ci/docker/common/install_acl.sh"
138139
139140 # Affected by OPENBLAS_VERSION set as an environment variable above
140141 echo " Installing OpenBLAS version: ${OPENBLAS_VERSION} "
141- docker exec " $TORCH_BUILD_CONTAINER " " ${PYTORCH_ROOT } /.ci/docker/common/install_openblas.sh"
142+ docker_exec " ${PYTORCH_CONTAINER_DIR } /.ci/docker/common/install_openblas.sh"
142143
143144 echo " Storing torch build container ID in ${TORCH_BUILD_CONTAINER_ID_FILE} for reuse: ${TORCH_BUILD_CONTAINER} "
144- echo $TORCH_BUILD_CONTAINER > " ${TORCH_BUILD_CONTAINER_ID_FILE} "
145+ echo " $TORCH_BUILD_CONTAINER " > " ${TORCH_BUILD_CONTAINER_ID_FILE} "
145146else
146- docker restart $TORCH_BUILD_CONTAINER
147+ docker restart " $TORCH_BUILD_CONTAINER "
147148fi
148149
149150# If there are multiple wheels in the dist directory, an old wheel can be
150151# erroneously copied to results, so we clear the directory to be sure
151- docker exec $TORCH_BUILD_CONTAINER rm -rf " ${PYTORCH_ROOT } /dist"
152+ docker_exec rm -rf " ${PYTORCH_CONTAINER_DIR } /dist"
152153
153154# We set OVERRIDE_PACKAGE_VERSION to be based on the date of the latest torch
154155# commit, this allows us to also install the matching torch* packages, set in
155156# the Dockerfile. This is what PyTorch does in its nightly pipeline, see
156157# pytorch/.ci/aarch64_linux/aarch64_wheel_ci_build.py for this logic.
157- build_date=$( cd $PYTORCH_HOST_DIR && git log --pretty=format:%cs -1 | tr -d ' -' )
158- version=$( cat $PYTORCH_HOST_DIR /version.txt| tr -d " [:space:]" )
158+ build_date=$( cd " $PYTORCH_LOCAL_DIR " && git log --pretty=format:%cs -1 | tr -d ' -' )
159+ version=$( cat " $PYTORCH_LOCAL_DIR /version.txt" | tr -d " [:space:]" )
159160OVERRIDE_PACKAGE_VERSION=" ${version% ??} .dev${build_date}${TORCH_RELEASE_ID: +" +$TORCH_RELEASE_ID " } "
160161
161- docker exec $TORCH_BUILD_CONTAINER bash -lc "
162+ # Build the wheel!
163+ docker_exec bash -lc "
162164 source /tmp/env &&
163165 BUILD_TEST=0 \
164166 DO_SETUP_PY_CLEAN_BEFORE_BUILD=0 \
165167 WIPE_RH_CUDA_AFTER_BUILD=0 \
166168 OVERRIDE_PACKAGE_VERSION=$OVERRIDE_PACKAGE_VERSION \
167- bash ${PYTORCH_ROOT } /.ci/manywheel/build.sh
169+ bash ${PYTORCH_CONTAINER_DIR } /.ci/manywheel/build.sh
168170"
169171
170- # directories generated by the docker container are owned by root, so transfer ownership to user
171- docker exec " $TORCH_BUILD_CONTAINER " chown -R " $( id -u) " :" $( id -g) " \
172- " ${PYTORCH_ROOT} " " ${PYTORCH_FINAL_PACKAGE_DIR} " " ${LOCAL_CCACHE_DIR} "
172+ # Directories generated by the docker container are owned by root, so transfer ownership to user
173+ docker_exec chown -R " $( id -u) " :" $( id -g) " \
174+ " ${PYTORCH_CONTAINER_DIR} " \
175+ " ${PYTORCH_FINAL_PACKAGE_CONTAINER_DIR} " \
176+ " ${CCACHE_CONTAINER_DIR} "
0 commit comments