Skip to content

Commit 22034f5

Browse files
committed
Add new workflow with Basic options for Linux platform (CUDA 12.4, 12.6, 12.8)
1 parent 5eac234 commit 22034f5

File tree

5 files changed

+350
-2
lines changed

5 files changed

+350
-2
lines changed
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
name: Build Wheels(CU124) for Linux(Basic)
2+
3+
on:
4+
workflow_dispatch: # Manual trigger
5+
6+
permissions:
7+
contents: write
8+
9+
jobs:
10+
build_wheels:
11+
name: Build Wheel ${{ matrix.os }} ${{ matrix.pyver }} ${{ matrix.cuda }} ${{ matrix.releasetag == 'wheels' && 'AVX2' || matrix.releasetag }}
12+
runs-on: ubuntu-22.04
13+
container: nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04
14+
strategy:
15+
matrix: # Define the build matrix directly here
16+
os: ["ubuntu-22.04"]
17+
pyver: ["3.10", "3.11", "3.12", "3.13"] # Python versions
18+
cuda: ["12.4.1"]
19+
releasetag: ["Basic"] # Controls CMAKE_ARGS for CPU features (even in CUDA build)
20+
cudaarch: ["all"] # Controls target CUDA architectures for nvcc
21+
22+
defaults:
23+
run:
24+
shell: bash
25+
26+
env:
27+
CUDAVER: ${{ matrix.cuda }}
28+
AVXVER: ${{ matrix.releasetag }}
29+
CUDAARCHVER: ${{ matrix.cudaarch }}
30+
31+
steps:
32+
- name: Install dependencies
33+
run: |
34+
apt update
35+
apt install -y build-essential ccache cmake curl git libgomp1 libjpeg-dev libssl-dev
36+
37+
- uses: actions/checkout@v4 # Checkout code
38+
with:
39+
submodules: "recursive"
40+
41+
# from astral-sh/setup-uv
42+
- name: Install the latest version of uv and set the python version
43+
uses: astral-sh/setup-uv@v6
44+
with:
45+
python-version: ${{ matrix.pyver }}
46+
activate-environment: true
47+
enable-cache: true
48+
49+
- run: nvcc -V
50+
51+
- name: Build Wheel With Cmake # Main build step: configures and builds the wheel
52+
env:
53+
LD_LIBRARY_PATH: "/usr/local/cuda/lib64:/usr/local/cuda/compat:/usr/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}"
54+
VERBOSE: 1 # Enable verbose build output
55+
CUDA_HOME: "/usr/local/cuda/" # Set CUDA_HOME
56+
CUDA_PATH: "${PATH}"
57+
CUDA_TOOLKIT_ROOT_DIR: "/usr/local/cuda/" # Set CUDA_TOOLKIT_ROOT_DIR
58+
run: |
59+
echo "VERBOSE=1" >> $GITHUB_ENV # Enable verbose build output for troubleshooting
60+
find /usr/ -name 'libcuda.so.*'
61+
echo $LD_LIBRARY_PATH
62+
63+
# Add project-specific and feature flags
64+
CMAKE_ARGS="-DGGML_CUDA=on -DCMAKE_CUDA_ARCHITECTURES='70-real;75-real;80-real;86-real;87-real;89-real'"
65+
CMAKE_ARGS="-DGGML_CUDA_FORCE_MMQ=on ${CMAKE_ARGS}"
66+
CMAKE_ARGS="${CMAKE_ARGS} -DLLAMA_CURL=off -DLLAMA_OPENSSL=on -DLLAMA_HTTPLIB=on"
67+
68+
# Basic options for compiling without AVX instructions
69+
if [ "${AVXVER}" = "Basic" ]; then
70+
CMAKE_ARGS = "${CMAKE_ARGS} -DGGML_AVX=off -DGGML_AVX2=off -DGGML_AVX512=off -DGGML_FMA=off -DGGML_F16C=off"
71+
fi
72+
73+
# Export CMAKE_ARGS environment variable so the python -m build command can use it
74+
echo ${CMAKE_ARGS}
75+
echo "CMAKE_ARGS=${CMAKE_ARGS}" >> $GITHUB_ENV
76+
77+
# Run the Python build command to generate the wheel
78+
uv pip install build setuptools wheel packaging
79+
# uv pip install -U torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu126
80+
CMAKE_ARGS=${CMAKE_ARGS} uv build --wheel
81+
82+
# --- Post-build steps to get info for release tag ---
83+
84+
# Find the generated wheel file in the 'dist' directory using bash
85+
# Assumes only one wheel is generated per build configuration run
86+
wheel_file=$(ls dist/*.whl | head -n 1)
87+
88+
# Extract the package version (e.g., 1.2.3) from the wheel filename
89+
# Filename format is typically: package_name-version-tag-specificators.whl
90+
# Using basename and cut to split by '-' and get the second field
91+
tag_ver=$(basename "$wheel_file" | cut -d'-' -f 2)
92+
echo "TAG_VERSION=$tag_ver" >> $GITHUB_ENV # Store version in env for release step
93+
94+
# Extract the short CUDA version (e.g., 124) from the full version (e.g., 12.4.1) from the matrix variable
95+
cuda_ver_short=$(echo "${CUDAVER}" | cut -d'.' -f 1,2 | sed 's/\.//g')
96+
echo "CUDA_VERSION=$cuda_ver_short" >> $GITHUB_ENV # Store short CUDA version in env
97+
98+
99+
- name: Get Current Date # Step to get current date for the release tag
100+
id: get-date
101+
run: |
102+
# Get date in YYYYMMDD format using bash date command
103+
currentDate=$(date +%Y%m%d)
104+
# Store the date in environment variable for the release step
105+
echo "BUILD_DATE=$currentDate" >> $GITHUB_ENV
106+
107+
- uses: softprops/action-gh-release@v2.2.2 # Action to create a GitHub Release
108+
with:
109+
files: dist/* # Upload the generated wheel files from the dist directory
110+
# Define the release tag name using the collected environment variables
111+
# Format: v<package_version>-cu<short_cuda_version>-<avx_tag>-linux-<build_date>
112+
tag_name: v${{ env.TAG_VERSION }}-cu${{ env.CUDA_VERSION }}-${{ env.AVXVER }}-linux-${{ env.BUILD_DATE }} # Release tag format for Linux
113+
# Note: This action will create a new release tag if it doesn't exist,
114+
# or upload assets to an existing tag. Be mindful of potential tag name conflicts.
115+
env:
116+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Use the secret provided by GitHub Actions for authentication

.github/workflows/build-wheels-cu124-linux.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ jobs:
3232
- name: Install dependencies
3333
run: |
3434
apt update
35-
apt install -y build-essential cmake ccache curl git libgomp1 libjpeg-dev libssl-dev
35+
apt install -y build-essential ccache cmake curl git libgomp1 libjpeg-dev libssl-dev
3636
3737
- uses: actions/checkout@v4 # Checkout code
3838
with:
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
name: Build Wheels(CU126) for Linux(Basic)
2+
3+
on:
4+
workflow_dispatch: # Manual trigger
5+
6+
permissions:
7+
contents: write
8+
9+
jobs:
10+
build_wheels:
11+
name: Build Wheel ${{ matrix.os }} ${{ matrix.pyver }} ${{ matrix.cuda }} ${{ matrix.releasetag == 'wheels' && 'AVX2' || matrix.releasetag }}
12+
runs-on: ubuntu-22.04
13+
container: nvidia/cuda:12.6.3-cudnn-devel-ubuntu22.04
14+
strategy:
15+
matrix: # Define the build matrix directly here
16+
os: ["ubuntu-22.04"]
17+
pyver: ["3.10", "3.11", "3.12", "3.13"] # Python versions
18+
cuda: ["12.6.3"]
19+
releasetag: ["Basic"] # Controls CMAKE_ARGS for CPU features (even in CUDA build)
20+
cudaarch: ["all"] # Controls target CUDA architectures for nvcc
21+
22+
defaults:
23+
run:
24+
shell: bash
25+
26+
env:
27+
CUDAVER: ${{ matrix.cuda }}
28+
AVXVER: ${{ matrix.releasetag }}
29+
CUDAARCHVER: ${{ matrix.cudaarch }}
30+
31+
steps:
32+
- name: Install dependencies
33+
run: |
34+
apt update
35+
apt install -y build-essential ccache cmake curl git libgomp1 libjpeg-dev libssl-dev
36+
37+
- uses: actions/checkout@v4 # Checkout code
38+
with:
39+
submodules: "recursive"
40+
41+
# from astral-sh/setup-uv
42+
- name: Install the latest version of uv and set the python version
43+
uses: astral-sh/setup-uv@v6
44+
with:
45+
python-version: ${{ matrix.pyver }}
46+
activate-environment: true
47+
enable-cache: true
48+
49+
- run: nvcc -V
50+
51+
- name: Build Wheel With Cmake # Main build step: configures and builds the wheel
52+
env:
53+
LD_LIBRARY_PATH: "/usr/local/cuda/lib64:/usr/local/cuda/compat:/usr/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}"
54+
VERBOSE: 1 # Enable verbose build output
55+
CUDA_HOME: "/usr/local/cuda/" # Set CUDA_HOME
56+
CUDA_PATH: "${PATH}"
57+
CUDA_TOOLKIT_ROOT_DIR: "/usr/local/cuda/" # Set CUDA_TOOLKIT_ROOT_DIR
58+
run: |
59+
echo "VERBOSE=1" >> $GITHUB_ENV # Enable verbose build output for troubleshooting
60+
find /usr/ -name 'libcuda.so.*'
61+
echo $LD_LIBRARY_PATH
62+
63+
# Add project-specific and feature flags
64+
CMAKE_ARGS="-DGGML_CUDA=on -DCMAKE_CUDA_ARCHITECTURES='70-real;75-real;80-real;86-real;87-real;89-real'"
65+
CMAKE_ARGS="-DGGML_CUDA_FORCE_MMQ=on ${CMAKE_ARGS}"
66+
CMAKE_ARGS="${CMAKE_ARGS} -DLLAMA_CURL=off -DLLAMA_OPENSSL=on -DLLAMA_HTTPLIB=on"
67+
68+
# Basic options for compiling without AVX instructions
69+
if [ "${AVXVER}" = "Basic" ]; then
70+
CMAKE_ARGS = "${CMAKE_ARGS} -DGGML_AVX=off -DGGML_AVX2=off -DGGML_AVX512=off -DGGML_FMA=off -DGGML_F16C=off"
71+
fi
72+
73+
# Export CMAKE_ARGS environment variable so the python -m build command can use it
74+
echo ${CMAKE_ARGS}
75+
echo "CMAKE_ARGS=${CMAKE_ARGS}" >> $GITHUB_ENV
76+
77+
# Run the Python build command to generate the wheel
78+
uv pip install build setuptools wheel packaging
79+
# uv pip install -U torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu126
80+
CMAKE_ARGS=${CMAKE_ARGS} uv build --wheel
81+
82+
# --- Post-build steps to get info for release tag ---
83+
84+
# Find the generated wheel file in the 'dist' directory using bash
85+
# Assumes only one wheel is generated per build configuration run
86+
wheel_file=$(ls dist/*.whl | head -n 1)
87+
88+
# Extract the package version (e.g., 1.2.3) from the wheel filename
89+
# Filename format is typically: package_name-version-tag-specificators.whl
90+
# Using basename and cut to split by '-' and get the second field
91+
tag_ver=$(basename "$wheel_file" | cut -d'-' -f 2)
92+
echo "TAG_VERSION=$tag_ver" >> $GITHUB_ENV # Store version in env for release step
93+
94+
# Extract the short CUDA version (e.g., 126) from the full version (e.g., 12.6.3) from the matrix variable
95+
cuda_ver_short=$(echo "${CUDAVER}" | cut -d'.' -f 1,2 | sed 's/\.//g')
96+
echo "CUDA_VERSION=$cuda_ver_short" >> $GITHUB_ENV # Store short CUDA version in env
97+
98+
99+
- name: Get Current Date # Step to get current date for the release tag
100+
id: get-date
101+
run: |
102+
# Get date in YYYYMMDD format using bash date command
103+
currentDate=$(date +%Y%m%d)
104+
# Store the date in environment variable for the release step
105+
echo "BUILD_DATE=$currentDate" >> $GITHUB_ENV
106+
107+
- uses: softprops/action-gh-release@v2.2.2 # Action to create a GitHub Release
108+
with:
109+
files: dist/* # Upload the generated wheel files from the dist directory
110+
# Define the release tag name using the collected environment variables
111+
# Format: v<package_version>-cu<short_cuda_version>-<avx_tag>-linux-<build_date>
112+
tag_name: v${{ env.TAG_VERSION }}-cu${{ env.CUDA_VERSION }}-${{ env.AVXVER }}-linux-${{ env.BUILD_DATE }} # Release tag format for Linux
113+
# Note: This action will create a new release tag if it doesn't exist,
114+
# or upload assets to an existing tag. Be mindful of potential tag name conflicts.
115+
env:
116+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Use the secret provided by GitHub Actions for authentication

.github/workflows/build-wheels-cu126-linux.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ jobs:
3232
- name: Install dependencies
3333
run: |
3434
apt update
35-
apt install -y build-essential cmake ccache curl git libgomp1 libjpeg-dev libssl-dev
35+
apt install -y build-essential ccache cmake curl git libgomp1 libjpeg-dev libssl-dev
3636
3737
- uses: actions/checkout@v4 # Checkout code
3838
with:
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
name: Build Wheels(CU128) for Linux(Basic)
2+
3+
on:
4+
workflow_dispatch: # Manual trigger
5+
6+
permissions:
7+
contents: write
8+
9+
jobs:
10+
build_wheels:
11+
name: Build Wheel ${{ matrix.os }} ${{ matrix.pyver }} ${{ matrix.cuda }} ${{ matrix.releasetag == 'wheels' && 'AVX2' || matrix.releasetag }}
12+
runs-on: ubuntu-22.04
13+
container: nvidia/cuda:12.8.1-cudnn-devel-ubuntu22.04
14+
strategy:
15+
matrix: # Define the build matrix directly here
16+
os: ["ubuntu-22.04"]
17+
pyver: ["3.10", "3.11", "3.12", "3.13"] # Python versions
18+
cuda: ["12.8.1"]
19+
releasetag: ["Basic"] # Controls CMAKE_ARGS for CPU features (even in CUDA build)
20+
cudaarch: ["all"] # Controls target CUDA architectures for nvcc
21+
22+
defaults:
23+
run:
24+
shell: bash
25+
26+
env:
27+
CUDAVER: ${{ matrix.cuda }}
28+
AVXVER: ${{ matrix.releasetag }}
29+
CUDAARCHVER: ${{ matrix.cudaarch }}
30+
31+
steps:
32+
- name: Install dependencies
33+
run: |
34+
apt update
35+
apt install -y build-essential ccache cmake curl git libgomp1 libjpeg-dev libssl-dev
36+
37+
- uses: actions/checkout@v4 # Checkout code
38+
with:
39+
submodules: "recursive"
40+
41+
# from astral-sh/setup-uv
42+
- name: Install the latest version of uv and set the python version
43+
uses: astral-sh/setup-uv@v6
44+
with:
45+
python-version: ${{ matrix.pyver }}
46+
activate-environment: true
47+
enable-cache: true
48+
49+
- run: nvcc -V
50+
51+
- name: Build Wheel With Cmake # Main build step: configures and builds the wheel
52+
env:
53+
LD_LIBRARY_PATH: "/usr/local/cuda/lib64:/usr/local/cuda/compat:/usr/lib/x86_64-linux-gnu:${LD_LIBRARY_PATH}"
54+
VERBOSE: 1 # Enable verbose build output
55+
CUDA_HOME: "/usr/local/cuda/" # Set CUDA_HOME
56+
CUDA_PATH: "${PATH}"
57+
CUDA_TOOLKIT_ROOT_DIR: "/usr/local/cuda/" # Set CUDA_TOOLKIT_ROOT_DIR
58+
run: |
59+
echo "VERBOSE=1" >> $GITHUB_ENV # Enable verbose build output for troubleshooting
60+
find /usr/ -name 'libcuda.so.*'
61+
echo $LD_LIBRARY_PATH
62+
63+
# Add project-specific and feature flags
64+
CMAKE_ARGS="-DGGML_CUDA=on -DCMAKE_CUDA_ARCHITECTURES='75-real;80-real;86-real;87-real;89-real;90-real;100-real;101-real;120-real'"
65+
CMAKE_ARGS="-DGGML_CUDA_FORCE_MMQ=on ${CMAKE_ARGS}"
66+
CMAKE_ARGS="${CMAKE_ARGS} -DLLAMA_CURL=off -DLLAMA_OPENSSL=on -DLLAMA_HTTPLIB=on"
67+
68+
# Basic options for compiling without AVX instructions
69+
if [ "${AVXVER}" = "Basic" ]; then
70+
CMAKE_ARGS = "${CMAKE_ARGS} -DGGML_AVX=off -DGGML_AVX2=off -DGGML_AVX512=off -DGGML_FMA=off -DGGML_F16C=off"
71+
fi
72+
73+
# Export CMAKE_ARGS environment variable so the python -m build command can use it
74+
echo ${CMAKE_ARGS}
75+
echo "CMAKE_ARGS=${CMAKE_ARGS}" >> $GITHUB_ENV
76+
77+
# Run the Python build command to generate the wheel
78+
uv pip install build setuptools wheel packaging
79+
# uv pip install -U torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cu128
80+
CMAKE_ARGS=${CMAKE_ARGS} uv build --wheel
81+
82+
# --- Post-build steps to get info for release tag ---
83+
84+
# Find the generated wheel file in the 'dist' directory using bash
85+
# Assumes only one wheel is generated per build configuration run
86+
wheel_file=$(ls dist/*.whl | head -n 1)
87+
88+
# Extract the package version (e.g., 1.2.3) from the wheel filename
89+
# Filename format is typically: package_name-version-tag-specificators.whl
90+
# Using basename and cut to split by '-' and get the second field
91+
tag_ver=$(basename "$wheel_file" | cut -d'-' -f 2)
92+
echo "TAG_VERSION=$tag_ver" >> $GITHUB_ENV # Store version in env for release step
93+
94+
# Extract the short CUDA version (e.g., 128) from the full version (e.g., 12.8.1) from the matrix variable
95+
cuda_ver_short=$(echo "${CUDAVER}" | cut -d'.' -f 1,2 | sed 's/\.//g')
96+
echo "CUDA_VERSION=$cuda_ver_short" >> $GITHUB_ENV # Store short CUDA version in env
97+
98+
99+
- name: Get Current Date # Step to get current date for the release tag
100+
id: get-date
101+
run: |
102+
# Get date in YYYYMMDD format using bash date command
103+
currentDate=$(date +%Y%m%d)
104+
# Store the date in environment variable for the release step
105+
echo "BUILD_DATE=$currentDate" >> $GITHUB_ENV
106+
107+
- uses: softprops/action-gh-release@v2.2.2 # Action to create a GitHub Release
108+
with:
109+
files: dist/* # Upload the generated wheel files from the dist directory
110+
# Define the release tag name using the collected environment variables
111+
# Format: v<package_version>-cu<short_cuda_version>-<avx_tag>-linux-<build_date>
112+
tag_name: v${{ env.TAG_VERSION }}-cu${{ env.CUDA_VERSION }}-${{ env.AVXVER }}-linux-${{ env.BUILD_DATE }} # Release tag format for Linux
113+
# Note: This action will create a new release tag if it doesn't exist,
114+
# or upload assets to an existing tag. Be mindful of potential tag name conflicts.
115+
env:
116+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Use the secret provided by GitHub Actions for authentication

0 commit comments

Comments
 (0)