Skip to content

linux/amd64 (Zig-CC) #5

linux/amd64 (Zig-CC)

linux/amd64 (Zig-CC) #5

name: "linux/amd64 (Zig-CC)"
on:
workflow_dispatch:
permissions:
contents: write
jobs:
build-and-package-linux-amd64:
name: "Build PJRT (Zig Target glibc 2.34)"
runs-on: ubuntu-latest # Fastest runner, no queue issues
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: "Free disk space"
run: |
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc
df -h
- uses: actions/checkout@v4
- name: "Install Zig"
uses: mlugg/setup-zig@v2
with:
version: "latest" # Latest full release.
- name: "Clone and checkout XLA"
run: |
git clone https://github.com/openxla/xla xla
cd xla
XLA_HASH=$(cat ../XLA_COMMIT_HASH.txt)
git checkout $XLA_HASH
echo "XLA_HASH=$XLA_HASH" >> $GITHUB_ENV
- name: "Set up Python 3.11"
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: "xla/requirements_lock_3_11.txt"
- name: "Install Python dependencies"
run: |
pip install --upgrade pip
pip install six numpy
- name: "Setup Bazelisk"
uses: bazelbuild/setup-bazelisk@v3
- name: "Setup Zip as C++ tools"
run: |
# 1. Create a fake LLVM bin directory
export TOOLCHAIN_PATH="$(pwd)/zig-bin"
echo "TOOLCHAIN_PATH=$TOOLCHAIN_PATH" >> $GITHUB_ENV
mkdir -p "${TOOLCHAIN_PATH}"
ZIG_EXE=$(which zig)
# 2. Create wrapper for CC and CXX with the target embedded
# Use '-x c' and '-x c++' to force Zig to handle stdin (-) correctly
# We also add -Wno-unused-command-line-argument to keep the output clean
echo "#!/bin/bash" > $(pwd)/zig-bin/clang
echo "$ZIG_EXE cc -target x86_64-linux-gnu.2.34 -x c -Wno-unused-command-line-argument \"\$@\"" >> $(pwd)/zig-bin/clang
echo "#!/bin/bash" > $(pwd)/zig-bin/clang++
echo "$ZIG_EXE c++ -target x86_64-linux-gnu.2.34 -x c++ -Wno-unused-command-line-argument \"\$@\"" >> $(pwd)/zig-bin/clang++
chmod +x "${TOOLCHAIN_PATH}"/clang "${TOOLCHAIN_PATH}"/clang++
# 3. Symlink other required tools directly to zig
# Zig handles these calls automatically based on the symlink name
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/ld.lld
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/llvm-ar
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/llvm-nm
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/llvm-objcopy
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/llvm-objdump
ln -s $ZIG_EXE "${TOOLCHAIN_PATH}"/llvm-strip
- name: "Build PJRT C API CPU plugin"
run: |
export CC="$TOOLCHAIN_PATH/clang"
export CXX="$TOOLCHAIN_PATH/clang++"
export PATH="$TOOLCHAIN_PATH:$PATH"
cd xla
# Pass the fake clang path; Bazel will look in its folder for ld.lld, etc.
python3 ./configure.py \
--backend CPU \
--os LINUX \
--host_compiler CLANG \
--clang_path "$CC"
# 5. Build
# We force Bazel to use our wrappers for everything
bazel build -c opt \
--action_env=CC="$CC" \
--action_env=CXX="$CXX" \
--host_action_env=CC="$CC" \
--host_action_env=CXX="$CXX" \
--linkopt="-fuse-ld=lld" \
--local_ram_resources=HOST_RAM*.7 \
--local_cpu_resources=HOST_CPUS-1 \
//xla/pjrt/c:pjrt_c_api_cpu_plugin.so
- name: "Verify glibc requirements"
run: |
# This should now show 2.34 or LOWER
objdump -p xla/bazel-bin/xla/pjrt/c/pjrt_c_api_cpu_plugin.so | grep GLIBC
- name: "Package the binary (tar)"
run: |
# Paths and names
TARBALL_NAME="pjrt_cpu_linux_amd64.tar.gz"
TARBALL_NAME_AMAZONLINUX="pjrt_cpu_amazonlinux_amd64.tar.gz"
BINARY_DIR="xla/bazel-bin/xla/pjrt/c"
BINARY_NAME="pjrt_c_api_cpu_plugin.so"
NEW_BINARY_NAME="pjrt_c_api_cpu_${RELEASE_VERSION}_plugin.so"
# Create tarball
mv "${BINARY_DIR}/${BINARY_NAME}" "${NEW_BINARY_NAME}"
tar -czvf ${TARBALL_NAME} ${NEW_BINARY_NAME}
echo "Successfully created ${TARBALL_NAME} containing ${NEW_BINARY_NAME}"
echo "TARBALL_NAME=${TARBALL_NAME}" >> $GITHUB_ENV
# Same binary for AmazonLinux 2023
ln ${TARBALL_NAME} ${TARBALL_NAME_AMAZONLINUX}
echo "TARBALL_NAME_AMAZONLINUX=${TARBALL_NAME_AMAZONLINUX}" >> $GITHUB_ENV
- name: Upload release asset
uses: softprops/action-gh-release@v2
with:
# tag_name: ${{ env.RELEASE_VERSION }}
tag_name: v0.83.4-rc0
files: |
${{ env.TARBALL_NAME }}
${{ env.TARBALL_NAME_AMAZONLINUX }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}