From 956977fad8668ac350f8699a032ff0fa2c7fb4a6 Mon Sep 17 00:00:00 2001 From: Jinsong Ji Date: Tue, 4 Nov 2025 09:44:25 -0800 Subject: [PATCH 1/4] Remove unused workflow files --- .github/workflows/README.md | 1 - .github/workflows/bazel-checks.yml | 32 -- .../workflows/build-ci-container-windows.yml | 73 ---- .github/workflows/build-ci-container.yml | 117 ----- .github/workflows/build-metrics-container.yml | 78 ---- .github/workflows/check-ci.yml | 40 -- .../workflows/ci-post-commit-analyzer-run.py | 34 -- .github/workflows/ci-post-commit-analyzer.yml | 95 ----- .github/workflows/commit-access-greeter.yml | 40 -- .github/workflows/commit-access-review.py | 402 ------------------ .github/workflows/commit-access-review.yml | 34 -- .../github-action-ci-windows/Dockerfile | 100 ----- .../containers/github-action-ci/Dockerfile | 109 ----- .github/workflows/docs.yml | 215 ---------- .github/workflows/email-check.yaml | 48 --- .github/workflows/get-llvm-version/action.yml | 26 -- .github/workflows/gha-codeql.yml | 35 -- .github/workflows/hlsl-matrix.yaml | 30 -- .github/workflows/hlsl-test-all.yaml | 87 ---- .github/workflows/issue-release-workflow.yml | 69 --- .github/workflows/issue-subscriber.yml | 37 -- .github/workflows/issue-write.yml | 157 ------- .github/workflows/libc-fullbuild-tests.yml | 133 ------ .github/workflows/libc-overlay-tests.yml | 114 ----- .github/workflows/libclang-abi-tests.yml | 171 -------- .github/workflows/libclang-python-tests.yml | 57 --- .github/workflows/libcxx-build-and-test.yaml | 286 ------------- .github/workflows/libcxx-build-containers.yml | 71 ---- .../libcxx-check-generated-files.yml | 24 -- .github/workflows/libcxx-run-benchmarks.yml | 110 ----- .github/workflows/llvm-bugs.yml | 63 --- .github/workflows/llvm-tests.yml | 185 -------- .github/workflows/merged-prs.yml | 41 -- .github/workflows/mlir-spirv-tests.yml | 51 --- .github/workflows/new-issues.yml | 23 - .github/workflows/new-prs.yml | 75 ---- .github/workflows/pr-code-format.yml | 92 ---- .github/workflows/pr-code-lint.yml | 112 ----- .github/workflows/pr-request-release-note.yml | 49 --- .github/workflows/pr-subscriber.yml | 34 -- .github/workflows/premerge.yaml | 181 -------- .github/workflows/release-asset-audit.py | 102 ----- .github/workflows/release-asset-audit.yml | 59 --- .github/workflows/release-binaries-all.yml | 105 ----- .../release-binaries-save-stage/action.yml | 44 -- .../release-binaries-setup-stage/action.yml | 59 --- .github/workflows/release-binaries.yml | 357 ---------------- .github/workflows/release-documentation.yml | 92 ---- .github/workflows/release-doxygen.yml | 73 ---- .github/workflows/release-lit.yml | 79 ---- .github/workflows/release-sources.yml | 108 ----- .github/workflows/release-tasks.yml | 141 ------ .github/workflows/scorecard.yml | 62 --- .../workflows/set-release-binary-outputs.sh | 34 -- .github/workflows/spirv-tests.yml | 46 -- .../unprivileged-download-artifact/action.yml | 81 ---- .github/workflows/ur-build-hw.yml | 182 -------- .github/workflows/ur-build-offload.yml | 54 --- .github/workflows/ur-precommit.yml | 147 ------- .github/workflows/ur-source-checks.yml | 63 --- .github/workflows/version-check.py | 36 -- .github/workflows/version-check.yml | 31 -- 62 files changed, 5686 deletions(-) delete mode 100644 .github/workflows/README.md delete mode 100644 .github/workflows/bazel-checks.yml delete mode 100644 .github/workflows/build-ci-container-windows.yml delete mode 100644 .github/workflows/build-ci-container.yml delete mode 100644 .github/workflows/build-metrics-container.yml delete mode 100644 .github/workflows/check-ci.yml delete mode 100644 .github/workflows/ci-post-commit-analyzer-run.py delete mode 100644 .github/workflows/ci-post-commit-analyzer.yml delete mode 100644 .github/workflows/commit-access-greeter.yml delete mode 100644 .github/workflows/commit-access-review.py delete mode 100644 .github/workflows/commit-access-review.yml delete mode 100644 .github/workflows/containers/github-action-ci-windows/Dockerfile delete mode 100644 .github/workflows/containers/github-action-ci/Dockerfile delete mode 100644 .github/workflows/docs.yml delete mode 100644 .github/workflows/email-check.yaml delete mode 100644 .github/workflows/get-llvm-version/action.yml delete mode 100644 .github/workflows/gha-codeql.yml delete mode 100644 .github/workflows/hlsl-matrix.yaml delete mode 100644 .github/workflows/hlsl-test-all.yaml delete mode 100644 .github/workflows/issue-release-workflow.yml delete mode 100644 .github/workflows/issue-subscriber.yml delete mode 100644 .github/workflows/issue-write.yml delete mode 100644 .github/workflows/libc-fullbuild-tests.yml delete mode 100644 .github/workflows/libc-overlay-tests.yml delete mode 100644 .github/workflows/libclang-abi-tests.yml delete mode 100644 .github/workflows/libclang-python-tests.yml delete mode 100644 .github/workflows/libcxx-build-and-test.yaml delete mode 100644 .github/workflows/libcxx-build-containers.yml delete mode 100644 .github/workflows/libcxx-check-generated-files.yml delete mode 100644 .github/workflows/libcxx-run-benchmarks.yml delete mode 100644 .github/workflows/llvm-bugs.yml delete mode 100644 .github/workflows/llvm-tests.yml delete mode 100644 .github/workflows/merged-prs.yml delete mode 100644 .github/workflows/mlir-spirv-tests.yml delete mode 100644 .github/workflows/new-issues.yml delete mode 100644 .github/workflows/new-prs.yml delete mode 100644 .github/workflows/pr-code-format.yml delete mode 100644 .github/workflows/pr-code-lint.yml delete mode 100644 .github/workflows/pr-request-release-note.yml delete mode 100644 .github/workflows/pr-subscriber.yml delete mode 100644 .github/workflows/premerge.yaml delete mode 100644 .github/workflows/release-asset-audit.py delete mode 100644 .github/workflows/release-asset-audit.yml delete mode 100644 .github/workflows/release-binaries-all.yml delete mode 100644 .github/workflows/release-binaries-save-stage/action.yml delete mode 100644 .github/workflows/release-binaries-setup-stage/action.yml delete mode 100644 .github/workflows/release-binaries.yml delete mode 100644 .github/workflows/release-documentation.yml delete mode 100644 .github/workflows/release-doxygen.yml delete mode 100644 .github/workflows/release-lit.yml delete mode 100644 .github/workflows/release-sources.yml delete mode 100644 .github/workflows/release-tasks.yml delete mode 100644 .github/workflows/scorecard.yml delete mode 100644 .github/workflows/set-release-binary-outputs.sh delete mode 100644 .github/workflows/spirv-tests.yml delete mode 100644 .github/workflows/unprivileged-download-artifact/action.yml delete mode 100644 .github/workflows/ur-build-hw.yml delete mode 100644 .github/workflows/ur-build-offload.yml delete mode 100644 .github/workflows/ur-precommit.yml delete mode 100644 .github/workflows/ur-source-checks.yml delete mode 100755 .github/workflows/version-check.py delete mode 100644 .github/workflows/version-check.yml diff --git a/.github/workflows/README.md b/.github/workflows/README.md deleted file mode 100644 index ce34d2337e9c5..0000000000000 --- a/.github/workflows/README.md +++ /dev/null @@ -1 +0,0 @@ -Github action workflows should be stored in this directory. diff --git a/.github/workflows/bazel-checks.yml b/.github/workflows/bazel-checks.yml deleted file mode 100644 index 45d24fb86a8bc..0000000000000 --- a/.github/workflows/bazel-checks.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Bazel Checks - -permissions: - contents: read - -on: - push: - paths: - - '.github/workflows/bazel-checks.yml' - - 'utils/bazel/**' - branches: - - main - pull_request: - paths: - - '.github/workflows/bazel-checks.yml' - - 'utils/bazel/**' - -jobs: - buildifier: - name: "Buildifier" - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: Setup Buildifier - run: | - sudo curl -L https://github.com/bazelbuild/buildtools/releases/download/v8.2.1/buildifier-linux-amd64 -o /usr/bin/buildifier - sudo chmod +x /usr/bin/buildifier - - name: Run Buildifier - run: | - buildifier --mode=check $(find ./utils/bazel -name *BUILD*) diff --git a/.github/workflows/build-ci-container-windows.yml b/.github/workflows/build-ci-container-windows.yml deleted file mode 100644 index 19961d63924ca..0000000000000 --- a/.github/workflows/build-ci-container-windows.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: Build Windows CI Container - -permissions: - contents: read - -on: - push: - branches: - - main - paths: - - .github/workflows/build-ci-container-windows.yml - - '.github/workflows/containers/github-action-ci-windows/**' - pull_request: - paths: - - .github/workflows/build-ci-container-windows.yml - - '.github/workflows/containers/github-action-ci-windows/**' - -jobs: - build-ci-container-windows: - if: github.repository_owner == 'llvm' - runs-on: windows-2022 - outputs: - container-name: ${{ steps.vars.outputs.container-name }} - container-name-tag: ${{ steps.vars.outputs.container-name-tag }} - container-filename: ${{ steps.vars.outputs.container-filename }} - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: .github/workflows/containers/github-action-ci-windows - - name: Write Variables - id: vars - run: | - $tag = [int64](Get-Date -UFormat %s) - $container_name="ghcr.io/$env:GITHUB_REPOSITORY_OWNER/ci-windows-2022" - echo "container-name=${container_name}" >> $env:GITHUB_OUTPUT - echo "container-name-tag=${container_name}:${tag}" >> $env:GITHUB_OUTPUT - echo "container-filename=ci-windows-${tag}.tar" >> $env:GITHUB_OUTPUT - - name: Build Container - working-directory: .github/workflows/containers/github-action-ci-windows - run: | - docker build -t ${{ steps.vars.outputs.container-name-tag }} . - - name: Save container image - run: | - docker save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }} - - name: Upload container image - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: container - path: ${{ steps.vars.outputs.container-filename }} - retention-days: 14 - - push-ci-container: - if: github.event_name == 'push' - needs: - - build-ci-container-windows - permissions: - packages: write - runs-on: windows-2022 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Download container - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 - with: - name: container - - name: Push Container - run: | - docker load -i ${{ needs.build-ci-container-windows.outputs.container-filename }} - docker tag ${{ needs.build-ci-container-windows.outputs.container-name-tag }} ${{ needs.build-ci-container-windows.outputs.container-name }}:latest - docker login -u ${{ github.actor }} -p $env:GITHUB_TOKEN ghcr.io - docker push ${{ needs.build-ci-container-windows.outputs.container-name-tag }} - docker push ${{ needs.build-ci-container-windows.outputs.container-name }}:latest diff --git a/.github/workflows/build-ci-container.yml b/.github/workflows/build-ci-container.yml deleted file mode 100644 index 2231dfc005444..0000000000000 --- a/.github/workflows/build-ci-container.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: Build CI Container - -permissions: - contents: read - -on: - push: - branches: - - main - paths: - - .github/workflows/build-ci-container.yml - - '.github/workflows/containers/github-action-ci/**' - pull_request: - paths: - - .github/workflows/build-ci-container.yml - - '.github/workflows/containers/github-action-ci/**' - -jobs: - build-ci-container: - if: github.repository_owner == 'llvm' - runs-on: ${{ matrix.runs-on }} - strategy: - matrix: - include: - # The arch names should match the names used on dockerhub. - # See https://github.com/docker-library/official-images#architectures-other-than-amd64 - - arch: amd64 - runs-on: depot-ubuntu-24.04-16 - - arch: arm64v8 - runs-on: depot-ubuntu-24.04-arm-16 - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: .github/workflows/containers/github-action-ci/ - # podman is not installed by default on the ARM64 images. - - name: Install Podman - if: runner.arch == 'ARM64' - run: | - sudo apt-get install podman - - name: Write Variables - id: vars - run: | - tag=$(git rev-parse --short=12 HEAD) - container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/${{ matrix.arch }}/ci-ubuntu-24.04" - echo "container-name=$container_name" >> $GITHUB_OUTPUT - echo "container-name-agent=$container_name-agent" >> $GITHUB_OUTPUT - echo "container-name-tag=$container_name:$tag" >> $GITHUB_OUTPUT - echo "container-name-agent-tag=$container_name-agent:$tag" >> $GITHUB_OUTPUT - echo "container-filename=$(echo $container_name:$tag | sed -e 's/\//-/g' -e 's/:/-/g').tar" >> $GITHUB_OUTPUT - echo "container-agent-filename=$(echo $container_name-agent:$tag | sed -e 's/\//-/g' -e 's/:/-/g').tar" >> $GITHUB_OUTPUT - - name: Build container - working-directory: ./.github/workflows/containers/github-action-ci/ - run: | - podman build --target ci-container -t ${{ steps.vars.outputs.container-name-tag }} . - podman build --target ci-container-agent -t ${{ steps.vars.outputs.container-name-agent-tag }} . - - # Save the container so we have it in case the push fails. This also - # allows us to separate the push step into a different job so we can - # maintain minimal permissions while building the container. - - name: Save container image - run: | - podman save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }} - podman save ${{ steps.vars.outputs.container-name-agent-tag }} > ${{ steps.vars.outputs.container-agent-filename }} - - - name: Upload container image - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: container-${{ matrix.arch }} - path: "*.tar" - retention-days: 14 - - - name: Test Container - run: | - for image in ${{ steps.vars.outputs.container-name-tag }}; do - # Use --pull=never to ensure we are testing the just built image. - podman run --pull=never --rm -it $image /usr/bin/bash -x -c 'cd $HOME && printf '\''#include \nint main(int argc, char **argv) { std::cout << "Hello\\n"; }'\'' | clang++ -x c++ - && ./a.out | grep Hello' - done - - push-ci-container: - if: github.event_name == 'push' - needs: - - build-ci-container - permissions: - packages: write - runs-on: ubuntu-24.04 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Download container - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 - - - name: Push Container - run: | - function push_container { - image_name=$1 - latest_name=$(echo $image_name | sed 's/:[a-f0-9]\+$/:latest/g') - podman tag $image_name $latest_name - echo "Pushing $image_name ..." - podman push $image_name - echo "Pushing $latest_name ..." - podman push $latest_name - } - - podman login -u ${{ github.actor }} -p $GITHUB_TOKEN ghcr.io - for f in $(find . -iname *.tar); do - image_name=$(podman load -q -i $f | sed 's/Loaded image: //g') - push_container $image_name - - if echo $image_name | grep '/amd64/'; then - # For amd64, create an alias with the arch component removed. - # This matches the convention used on dockerhub. - default_image_name=$(echo $(dirname $(dirname $image_name))/$(basename $image_name)) - podman tag $image_name $default_image_name - push_container $default_image_name - fi - done diff --git a/.github/workflows/build-metrics-container.yml b/.github/workflows/build-metrics-container.yml deleted file mode 100644 index ee86d6a9cfadd..0000000000000 --- a/.github/workflows/build-metrics-container.yml +++ /dev/null @@ -1,78 +0,0 @@ -name: Build Metrics Container - -permissions: - contents: read - -on: - push: - branches: - - main - paths: - - .github/workflows/build-metrics-container.yml - - '.ci/metrics/**' - pull_request: - branches: - - main - paths: - - .github/workflows/build-metrics-container.yml - - '.ci/metrics/**' - -jobs: - build-metrics-container: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - outputs: - container-name: ${{ steps.vars.outputs.container-name }} - container-name-tag: ${{ steps.vars.outputs.container-name-tag }} - container-filename: ${{ steps.vars.outputs.container-filename }} - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: .ci/metrics/ - - name: Write Variables - id: vars - run: | - tag=`date +%s` - container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/metrics" - echo "container-name=$container_name" >> $GITHUB_OUTPUT - echo "container-name-tag=$container_name:$tag" >> $GITHUB_OUTPUT - echo "container-filename=$(echo $container_name:$tag | sed -e 's/\//-/g' -e 's/:/-/g').tar" >> $GITHUB_OUTPUT - - name: Build Container - working-directory: ./.ci/metrics - run: | - podman build -t ${{ steps.vars.outputs.container-name-tag }} -f Dockerfile . - # Save the container so we have it in case the push fails. This also - # allows us to separate the push step into a different job so we can - # maintain minimal permissions while building the container. - - name: Save Container Image - run: | - podman save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }} - - name: Upload Container Image - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: container - path: ${{ steps.vars.outputs.container-filename }} - retention-days: 14 - - push-metrics-container: - if: github.event_name == 'push' - needs: - - build-metrics-container - permissions: - packages: write - runs-on: ubuntu-24.04 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Download Container - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 - with: - name: container - - name: Push Container - run: | - podman load -i ${{ needs.build-metrics-container.outputs.container-filename }} - podman tag ${{ needs.build-metrics-container.outputs.container-name-tag }} ${{ needs.build-metrics-container.outputs.container-name }}:latest - podman login -u ${{ github.actor }} -p $GITHUB_TOKEN ghcr.io - podman push ${{ needs.build-metrics-container.outputs.container-name-tag }} - podman push ${{ needs.build-metrics-container.outputs.container-name }}:latest diff --git a/.github/workflows/check-ci.yml b/.github/workflows/check-ci.yml deleted file mode 100644 index 1a332aa3600a0..0000000000000 --- a/.github/workflows/check-ci.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Check CI Scripts - -permissions: - contents: read - -on: - push: - branches: - - main - paths: - - '.ci/**' - - '.github/workflows/check-ci.yml' - pull_request: - paths: - - '.ci/**' - - '.github/workflows/check-ci.yml' - -jobs: - test-python: - name: "Check Python Tests" - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: .ci - - name: Setup Python - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: 3.13 - cache: 'pip' - - name: Install Python Dependencies - run: | - pip3 install -r .ci/all_requirements.txt - pip3 install -r .ci/metrics/requirements.lock.txt - pip3 install pytest==8.4.1 - - name: Run Tests - working-directory: .ci - run: pytest diff --git a/.github/workflows/ci-post-commit-analyzer-run.py b/.github/workflows/ci-post-commit-analyzer-run.py deleted file mode 100644 index e5f52d3b2fa67..0000000000000 --- a/.github/workflows/ci-post-commit-analyzer-run.py +++ /dev/null @@ -1,34 +0,0 @@ -import json -import multiprocessing -import os -import re -import subprocess -import sys - - -def run_analyzer(data): - os.chdir(data["directory"]) - command = ( - data["command"] - + f" --analyze --analyzer-output html -o analyzer-results -Xclang -analyzer-config -Xclang max-nodes=75000" - ) - print(command) - subprocess.run(command, shell=True, check=True) - - -def pool_error(e): - print("Error analyzing file:", e) - - -def main(): - db_path = sys.argv[1] - database = json.load(open(db_path)) - - with multiprocessing.Pool() as pool: - pool.map_async(run_analyzer, [k for k in database], error_callback=pool_error) - pool.close() - pool.join() - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/ci-post-commit-analyzer.yml b/.github/workflows/ci-post-commit-analyzer.yml deleted file mode 100644 index 659ef3ba6f976..0000000000000 --- a/.github/workflows/ci-post-commit-analyzer.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: Post-Commit Static Analyzer - -permissions: - contents: read - -on: - push: - branches: - - 'release/**' - paths: - - 'clang/**' - - 'llvm/**' - - '.github/workflows/ci-post-commit-analyzer.yml' - pull_request: - types: - - opened - - synchronize - - reopened - - closed - paths: - - '.github/workflows/ci-post-commit-analyzer.yml' - - '.github/workflows/ci-post-commit-analyzer-run.py' - schedule: - - cron: '30 0 * * *' - -concurrency: - group: >- - llvm-project-${{ github.workflow }}-${{ github.event_name == 'pull_request' && - ( github.event.pull_request.number || github.ref) }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} - -jobs: - post-commit-analyzer: - if: >- - github.repository_owner == 'llvm' && - github.event.action != 'closed' - runs-on: ubuntu-24.04 - container: - image: 'ghcr.io/llvm/ci-ubuntu-24.04:latest' - env: - LLVM_VERSION: 18 - steps: - - name: Checkout Source - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - # A full build of llvm, clang, lld, and lldb takes about 250MB - # of ccache space. There's not much reason to have more than this, - # because we usually won't need to save cache entries from older - # builds. Also, there is an overall 10GB cache limit, and each - # run creates a new cache entry so we want to ensure that we have - # enough cache space for all the tests to run at once and still - # fit under the 10 GB limit. - # Default to 2G to workaround: https://github.com/hendrikmuhs/ccache-action/issues/174 - max-size: 2G - key: post-commit-analyzer - variant: sccache - - - name: Configure - run: | - cmake -B build -S llvm -G Ninja \ - -DLLVM_ENABLE_ASSERTIONS=ON \ - -DLLVM_ENABLE_PROJECTS=clang \ - -DLLVM_BUILD_LLVM_DYLIB=ON \ - -DLLVM_LINK_LLVM_DYLIB=ON \ - -DCMAKE_CXX_COMPILER=clang++ \ - -DCMAKE_C_COMPILER=clang \ - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -DCMAKE_C_COMPILER_LAUNCHER=sccache \ - -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ - -DLLVM_INCLUDE_TESTS=OFF \ - -DCLANG_INCLUDE_TESTS=OFF \ - -DCMAKE_BUILD_TYPE=Release - - - name: Build - run: | - # FIXME: We need to build all the generated header files in order to be able to run - # the analyzer on every file. Building libLLVM and libclang is probably overkill for - # this, but it's better than building every target. - ninja -v -C build libLLVM.so libclang.so - - # Run the analyzer. - python3 .github/workflows/ci-post-commit-analyzer-run.py build/compile_commands.json - - scan-build --generate-index-only build/analyzer-results - - - name: Upload Results - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - if: always() - with: - name: analyzer-results - path: 'build/analyzer-results/*' - diff --git a/.github/workflows/commit-access-greeter.yml b/.github/workflows/commit-access-greeter.yml deleted file mode 100644 index 834f802dec861..0000000000000 --- a/.github/workflows/commit-access-greeter.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Commit Access Greeter - -on: - issues: - types: - - labeled - -permissions: - contents: read - -jobs: - commit-access-greeter: - permissions: - issues: write - pull-requests: read - if: >- - github.repository_owner == 'llvm' && - github.event.label.name == 'infra:commit-access-request' - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/git/ - - - name: Setup Automation Script - working-directory: ./llvm/utils/git/ - run: | - pip install --require-hashes -r requirements.txt - - - name: Add comments to issue - working-directory: ./llvm/utils/git/ - env: - LABEL_NAME: ${{ github.event.label.name }} - GITHUB_TOKEN: ${{ github.token }} - ISSUE_NUMBER: ${{ github.event.issue.number }} - run: | - python3 ./github-automation.py \ - --token $GITHUB_TOKEN \ - commit-request-greeter \ - --issue-number $ISSUE_NUMBER diff --git a/.github/workflows/commit-access-review.py b/.github/workflows/commit-access-review.py deleted file mode 100644 index 4f539fe98004a..0000000000000 --- a/.github/workflows/commit-access-review.py +++ /dev/null @@ -1,402 +0,0 @@ -#!/usr/bin/env python3 -# ===-- commit-access-review.py --------------------------------------------===# -# -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ===------------------------------------------------------------------------===# -# -# ===------------------------------------------------------------------------===# - -import datetime -import github -import re -import requests -import time -import sys -import re - - -class User: - THRESHOLD = 5 - - def __init__(self, name, triage_list): - self.name = name - self.authored = 0 - self.merged = 0 - self.reviewed = 0 - self.triage_list = triage_list - - def add_authored(self, val=1): - self.authored += val - if self.meets_threshold(): - print(self.name, "meets the threshold with authored commits") - del self.triage_list[self.name] - - def set_authored(self, val): - self.authored = 0 - self.add_authored(val) - - def add_merged(self, val=1): - self.merged += val - if self.meets_threshold(): - print(self.name, "meets the threshold with merged commits") - del self.triage_list[self.name] - - def add_reviewed(self, val=1): - self.reviewed += val - if self.meets_threshold(): - print(self.name, "meets the threshold with reviewed commits") - del self.triage_list[self.name] - - def get_total(self): - return self.authored + self.merged + self.reviewed - - def meets_threshold(self): - return self.get_total() >= self.THRESHOLD - - def __repr__(self): - return "{} : a: {} m: {} r: {}".format( - self.name, self.authored, self.merged, self.reviewed - ) - - -def check_manual_requests( - gh: github.Github, start_date: datetime.datetime -) -> list[str]: - """ - Return a list of users who have been asked since ``start_date`` if they - want to keep their commit access or if they have applied for commit - access since ``start_date`` - """ - - query = """ - query ($query: String!, $after: String) { - search(query: $query, type: ISSUE, first: 100, after: $after) { - nodes { - ... on Issue { - author { - login - } - body - } - } - pageInfo { - hasNextPage - endCursor - } - } - } - """ - formatted_start_date = start_date.strftime("%Y-%m-%dT%H:%M:%S") - variables = { - "query": f"type:issue created:>{formatted_start_date} org:llvm repo:llvm-project label:infra:commit-access,infra:commit-access-request" - } - - has_next_page = True - users = [] - while has_next_page: - res_header, res_data = gh._Github__requester.graphql_query( - query=query, variables=variables - ) - data = res_data["data"] - for issue in data["search"]["nodes"]: - users.extend([user[1:] for user in re.findall("@[^ ,\n]+", issue["body"])]) - if issue["author"]: - users.append(issue["author"]["login"]) - has_next_page = data["search"]["pageInfo"]["hasNextPage"] - if has_next_page: - variables["after"] = data["search"]["pageInfo"]["endCursor"] - return users - - -def get_num_commits(gh: github.Github, user: str, start_date: datetime.datetime) -> int: - """ - Get number of commits that ``user`` has been made since ``start_date`. - """ - variables = { - "owner": "llvm", - "user": user, - "start_date": start_date.strftime("%Y-%m-%dT%H:%M:%S"), - } - - user_query = """ - query ($user: String!) { - user(login: $user) { - id - } - } - """ - - res_header, res_data = gh._Github__requester.graphql_query( - query=user_query, variables=variables - ) - data = res_data["data"] - variables["user_id"] = data["user"]["id"] - - query = """ - query ($owner: String!, $user_id: ID!, $start_date: GitTimestamp!){ - organization(login: $owner) { - teams(query: "llvm-committers" first:1) { - nodes { - repositories { - nodes { - ref(qualifiedName: "main") { - target { - ... on Commit { - history(since: $start_date, author: {id: $user_id }) { - totalCount - } - } - } - } - } - } - } - } - } - } - """ - count = 0 - res_header, res_data = gh._Github__requester.graphql_query( - query=query, variables=variables - ) - data = res_data["data"] - for repo in data["organization"]["teams"]["nodes"][0]["repositories"]["nodes"]: - count += int(repo["ref"]["target"]["history"]["totalCount"]) - if count >= User.THRESHOLD: - break - return count - - -def is_new_committer_query_repo( - gh: github.Github, user: str, start_date: datetime.datetime -) -> bool: - """ - Determine if ``user`` is a new committer. A new committer can keep their - commit access even if they don't meet the criteria. - """ - variables = { - "user": user, - } - - user_query = """ - query ($user: String!) { - user(login: $user) { - id - } - } - """ - - res_header, res_data = gh._Github__requester.graphql_query( - query=user_query, variables=variables - ) - data = res_data["data"] - variables["owner"] = "llvm" - variables["user_id"] = data["user"]["id"] - variables["start_date"] = start_date.strftime("%Y-%m-%dT%H:%M:%S") - - query = """ - query ($owner: String!, $user_id: ID!){ - organization(login: $owner) { - repository(name: "llvm-project") { - ref(qualifiedName: "main") { - target { - ... on Commit { - history(author: {id: $user_id }, first: 5) { - nodes { - committedDate - } - } - } - } - } - } - } - } - """ - - res_header, res_data = gh._Github__requester.graphql_query( - query=query, variables=variables - ) - data = res_data["data"] - repo = data["organization"]["repository"] - commits = repo["ref"]["target"]["history"]["nodes"] - if len(commits) == 0: - return True - committed_date = commits[-1]["committedDate"] - if datetime.datetime.strptime(committed_date, "%Y-%m-%dT%H:%M:%SZ") < start_date: - return False - return True - - -def is_new_committer( - gh: github.Github, user: str, start_date: datetime.datetime -) -> bool: - """ - Wrapper around is_new_commiter_query_repo to handle exceptions. - """ - try: - return is_new_committer_query_repo(gh, user, start_date) - except: - pass - return True - - -def get_review_count( - gh: github.Github, user: str, start_date: datetime.datetime -) -> int: - """ - Return the number of reviews that ``user`` has done since ``start_date``. - """ - query = """ - query ($query: String!) { - search(query: $query, type: ISSUE, first: 5) { - issueCount - } - } - """ - formatted_start_date = start_date.strftime("%Y-%m-%dT%H:%M:%S") - variables = { - "owner": "llvm", - "repo": "llvm-project", - "user": user, - "query": f"type:pr commenter:{user} -author:{user} merged:>{formatted_start_date} org:llvm", - } - - res_header, res_data = gh._Github__requester.graphql_query( - query=query, variables=variables - ) - data = res_data["data"] - return int(data["search"]["issueCount"]) - - -def count_prs(gh: github.Github, triage_list: dict, start_date: datetime.datetime): - """ - Fetch all the merged PRs for the project since ``start_date`` and update - ``triage_list`` with the number of PRs merged for each user. - """ - - query = """ - query ($query: String!, $after: String) { - search(query: $query, type: ISSUE, first: 100, after: $after) { - issueCount, - nodes { - ... on PullRequest { - author { - login - } - mergedBy { - login - } - } - } - pageInfo { - hasNextPage - endCursor - } - } - } - """ - date_begin = start_date - date_end = None - while date_begin < datetime.datetime.now(): - date_end = date_begin + datetime.timedelta(days=7) - formatted_date_begin = date_begin.strftime("%Y-%m-%dT%H:%M:%S") - formatted_date_end = date_end.strftime("%Y-%m-%dT%H:%M:%S") - variables = { - "query": f"type:pr is:merged merged:{formatted_date_begin}..{formatted_date_end} org:llvm", - } - has_next_page = True - while has_next_page: - print(variables) - res_header, res_data = gh._Github__requester.graphql_query( - query=query, variables=variables - ) - data = res_data["data"] - for pr in data["search"]["nodes"]: - # Users can be None if the user has been deleted. - if not pr["author"]: - continue - author = pr["author"]["login"] - if author in triage_list: - triage_list[author].add_authored() - - if not pr["mergedBy"]: - continue - merger = pr["mergedBy"]["login"] - if author == merger: - continue - if merger not in triage_list: - continue - triage_list[merger].add_merged() - - has_next_page = data["search"]["pageInfo"]["hasNextPage"] - if has_next_page: - variables["after"] = data["search"]["pageInfo"]["endCursor"] - date_begin = date_end - - -def main(): - token = sys.argv[1] - gh = github.Github(login_or_token=token) - org = gh.get_organization("llvm") - repo = org.get_repo("llvm-project") - one_year_ago = datetime.datetime.now() - datetime.timedelta(days=365) - triage_list = {} - for collaborator in repo.get_collaborators(permission="push"): - triage_list[collaborator.login] = User(collaborator.login, triage_list) - - print("Start:", len(triage_list), "triagers") - # Step 0 Check if users have requested commit access in the last year. - for user in check_manual_requests(gh, one_year_ago): - if user in triage_list: - print(user, "requested commit access in the last year.") - del triage_list[user] - print("After Request Check:", len(triage_list), "triagers") - - # Step 1 count all PRs authored or merged - count_prs(gh, triage_list, one_year_ago) - - print("After PRs:", len(triage_list), "triagers") - - if len(triage_list) == 0: - sys.exit(0) - - # Step 2 check for reviews - for user in list(triage_list.keys()): - review_count = get_review_count(gh, user, one_year_ago) - triage_list[user].add_reviewed(review_count) - - print("After Reviews:", len(triage_list), "triagers") - - if len(triage_list) == 0: - sys.exit(0) - - # Step 3 check for number of commits - for user in list(triage_list.keys()): - num_commits = get_num_commits(gh, user, one_year_ago) - # Override the total number of commits to not double count commits and - # authored PRs. - triage_list[user].set_authored(num_commits) - - print("After Commits:", len(triage_list), "triagers") - - # Step 4 check for new committers - for user in list(triage_list.keys()): - print("Checking", user) - if is_new_committer(gh, user, one_year_ago): - print("Removing new committer: ", user) - del triage_list[user] - - print("Complete:", len(triage_list), "triagers") - - with open("triagers.log", "w") as triagers_log: - for user in triage_list: - print(triage_list[user].__repr__()) - triagers_log.write(user + "\n") - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/commit-access-review.yml b/.github/workflows/commit-access-review.yml deleted file mode 100644 index b69b4b35dd377..0000000000000 --- a/.github/workflows/commit-access-review.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Commit Access Review - -on: - workflow_dispatch: - schedule: - # * is a special character in YAML so you have to quote this string - - cron: '0 7 1 * *' - -permissions: - contents: read - -jobs: - commit-access-review: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Install dependencies - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - - - name: Run Script - env: - GITHUB_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - run: | - python3 .github/workflows/commit-access-review.py $GITHUB_TOKEN - - - name: Upload Triage List - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - with: - name: triagers - path: triagers.log diff --git a/.github/workflows/containers/github-action-ci-windows/Dockerfile b/.github/workflows/containers/github-action-ci-windows/Dockerfile deleted file mode 100644 index 640d34da02532..0000000000000 --- a/.github/workflows/containers/github-action-ci-windows/Dockerfile +++ /dev/null @@ -1,100 +0,0 @@ -# Agent image for LLVM org cluster. -# .net 4.8 is required by chocolately package manager. -FROM mcr.microsoft.com/dotnet/framework/sdk:4.8-windowsservercore-ltsc2022 - -# Restore the default Windows shell for correct batch processing. -SHELL ["cmd", "/S", "/C"] - -# Download the Build Tools bootstrapper. -ADD https://aka.ms/vs/16/release/vs_buildtools.exe /TEMP/vs_buildtools.exe - -RUN powershell -Command Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1')) - -# Download channel for fixed install. -ARG CHANNEL_URL=https://aka.ms/vs/16/release/channel -ADD ${CHANNEL_URL} /TEMP/VisualStudio.chman - -# Install Build Tools with C++ workload. -# - Documentation for docker installation -# https://docs.microsoft.com/en-us/visualstudio/install/build-tools-container?view=vs-2019 -# - Documentation on workloads -# https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-build-tools?view=vs-2019#c-build-tools -# - Documentation on flags -# https://docs.microsoft.com/en-us/visualstudio/install/use-command-line-parameters-to-install-visual-studio?view=vs-2019 -RUN /TEMP/vs_buildtools.exe --quiet --wait --norestart --nocache \ - --channelUri C:\TEMP\VisualStudio.chman \ - --installChannelUri C:\TEMP\VisualStudio.chman \ - --installPath C:\BuildTools \ - --add Microsoft.VisualStudio.Workload.VCTools \ - --add Microsoft.VisualStudio.Component.VC.ATL \ - --includeRecommended \ - || IF "%ERRORLEVEL%"=="3010" EXIT 0 - -# Register DIA dll (Debug Interface Access) so it can be used to symbolize -# the stack traces. Register dll for 32 and 64 bit. -# see https://developercommunity.visualstudio.com/content/problem/290674/msdia140dll-is-not-registered-on-vs2017-hosts.html - -RUN regsvr32 /S "C:\BuildTools\DIA SDK\bin\amd64\msdia140.dll" & \ - regsvr32 /S "C:\BuildTools\DIA SDK\bin\msdia140.dll" - -# install tools as described in https://llvm.org/docs/GettingStartedVS.html -# and a few more that were not documented... -# Pin an older version of Python; the current Python 3.10 fails when -# doing "pip install" for the other dependencies, as it fails to find libxml -# while compiling some package. -# We version pin the other packages as well to ensure the container build is as -# reproducible as possible to prevent issues when upgrading only part of the -# container. -RUN choco install -y ninja --version 1.13.1 && \ - choco install -y git --version 2.50.1 && \ - choco install -y sccache --version 0.10.0 && \ - choco install -y python3 --version 3.9.7 - -# Testing requires psutil -RUN pip install psutil - -# configure Python encoding -ENV PYTHONIOENCODING=UTF-8 - -# update the path variable -# C:\Program Files\Git\usr\bin contains a usable bash and other unix tools. -# C:\llvm-mingw\bin contains Clang configured for mingw targets and -# corresponding sysroots. Both the 'llvm' package (with Clang defaulting -# to MSVC targets) and this directory contains executables named -# 'clang.exe' - add this last to let the other one have precedence. -# To use these compilers, use the triple prefixed form, e.g. -# x86_64-w64-mingw32-clang. -# C:\buildtools and SDK paths are ones that are set by c:\BuildTools\Common7\Tools\VsDevCmd.bat -arch=amd64 -host_arch=amd64 -RUN powershell -Command \ - [System.Environment]::SetEnvironmentVariable('PATH', \ - [System.Environment]::GetEnvironmentVariable('PATH', 'machine') + ';C:\Program Files\Git\usr\bin;C:\llvm-mingw\bin' \ - + ';C:\BuildTools\Common7\IDE\' \ - + ';C:\BuildTools\Common7\IDE\CommonExt ensions\Microsoft\TeamFoundation\Team Explorer' \ - + ';C:\BuildTools\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin' \ - + ';C:\BuildTools\Common7\IDE\CommonExtensions\Microsoft\CMake\Ninja' \ - + ';C:\BuildTools\Common7\IDE\CommonExtensions\Microsoft\TeamFoundation\Team Explorer' \ - + ';C:\BuildTools\Common7\IDE\CommonExtensions\Microsoft\TestWindow' \ - + ';C:\BuildTools\Common7\IDE\VC\VCPackages' \ - + ';C:\BuildTools\Common7\Tools\' \ - + ';C:\BuildTools\Common7\Tools\devinit' \ - + ';C:\BuildTools\MSBuild\Current\Bin' \ - + ';C:\BuildTools\MSBuild\Current\bin\Roslyn' \ - + ';C:\BuildTools\VC\Tools\MSVC\14.29.30133\bin\HostX64\x64' \ - + ';C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools\x64\' \ - + ';C:\Program Files (x86)\Windows Kits\10\bin\10.0.19041.0\x64' \ - + ';C:\Program Files (x86)\Windows Kits\10\bin\x64' \ - + ';C:\Windows\Microsoft.NET\Framework64\v4.0.30319' \ - ,'machine') - -# support long file names during git checkout -RUN git config --system core.longpaths true & \ - git config --global core.autocrlf false - -ARG RUNNER_VERSION=2.328.0 -ENV RUNNER_VERSION=$RUNNER_VERSION - -RUN powershell -Command \ - Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v${env:RUNNER_VERSION}/actions-runner-win-x64-${env:RUNNER_VERSION}.zip -OutFile actions-runner-win.zip ; \ - Add-Type -AssemblyName System.IO.Compression.FileSystem ; \ - [System.IO.Compression.ZipFile]::ExtractToDirectory('actions-runner-win.zip', $PWD) ;\ - rm actions-runner-win.zip diff --git a/.github/workflows/containers/github-action-ci/Dockerfile b/.github/workflows/containers/github-action-ci/Dockerfile deleted file mode 100644 index 8a888f3a411c0..0000000000000 --- a/.github/workflows/containers/github-action-ci/Dockerfile +++ /dev/null @@ -1,109 +0,0 @@ -FROM docker.io/library/ubuntu:24.04 as base -ENV LLVM_SYSROOT=/opt/llvm - -FROM base as stage1-toolchain -ENV LLVM_VERSION=20.1.8 - -RUN apt-get update && \ - apt-get install -y \ - wget \ - gcc \ - g++ \ - cmake \ - ninja-build \ - python3 \ - git \ - curl \ - zlib1g-dev && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -RUN curl -O -L https://github.com/llvm/llvm-project/archive/refs/tags/llvmorg-$LLVM_VERSION.tar.gz && \ - tar -xf llvmorg-$LLVM_VERSION.tar.gz && \ - rm -f llvmorg-$LLVM_VERSION.tar.gz - -WORKDIR /llvm-project-llvmorg-$LLVM_VERSION - -RUN cmake -B ./build -G Ninja ./llvm \ - -C ./clang/cmake/caches/BOLT-PGO.cmake \ - -DBOOTSTRAP_LLVM_ENABLE_LLD=ON \ - -DBOOTSTRAP_BOOTSTRAP_LLVM_ENABLE_LLD=ON \ - -DPGO_INSTRUMENT_LTO=Thin \ - -DLLVM_ENABLE_RUNTIMES="compiler-rt" \ - -DCMAKE_INSTALL_PREFIX="$LLVM_SYSROOT" \ - -DLLVM_ENABLE_PROJECTS="bolt;clang;lld;clang-tools-extra" \ - -DLLVM_DISTRIBUTION_COMPONENTS="lld;compiler-rt;clang-format;scan-build;llvm-symbolizer" \ - -DCLANG_DEFAULT_LINKER="lld" - -RUN ninja -C ./build stage2-clang-bolt stage2-install-distribution && ninja -C ./build install-distribution - -FROM base as ci-container - -COPY --from=stage1-toolchain $LLVM_SYSROOT $LLVM_SYSROOT - -# Need to install curl for hendrikmuhs/ccache-action -# Need nodejs for some of the GitHub actions. -# Need perl-modules for clang analyzer tests. -# Need git for SPIRV-Tools tests. -RUN apt-get update && \ - DEBIAN_FRONTEND=noninteractive apt-get install -y \ - binutils \ - cmake \ - curl \ - git \ - libstdc++-11-dev \ - ninja-build \ - nodejs \ - perl-modules \ - python3-psutil \ - sudo \ - # These are needed by the premerge pipeline. Pip is used to install - # dependent python packages. File and tzdata are used for tests. - # Having a symlink from python to python3 enables code sharing between - # the Linux and Windows pipelines. - python3-pip \ - file \ - tzdata \ - python-is-python3 && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -# We need sccache for caching. We cannot use the apt repository version because -# it is too old and has bugs related to features we require (particularly GCS -# caching), so we manually install it here. -# TODO(boomanaiden154): We should return to installing this from the apt -# repository once a version containing the necessary bug fixes is available. -RUN curl -L 'https://github.com/mozilla/sccache/releases/download/v0.10.0/sccache-v0.10.0-x86_64-unknown-linux-musl.tar.gz' > /tmp/sccache.tar.gz && \ - echo "1fbb35e135660d04a2d5e42b59c7874d39b3deb17de56330b25b713ec59f849b /tmp/sccache.tar.gz" | sha256sum -c && \ - tar xzf /tmp/sccache.tar.gz -O --wildcards '*/sccache' > '/usr/local/bin/sccache' && \ - rm /tmp/sccache.tar.gz && \ - chmod +x /usr/local/bin/sccache - -ENV LLVM_SYSROOT=$LLVM_SYSROOT -ENV PATH=${LLVM_SYSROOT}/bin:${PATH} -ENV CC=clang -ENV CXX=clang++ - -# Create a new user to avoid test failures related to a lack of expected -# permissions issues in some tests. Set the user id to 1001 as that is the -# user id that Github Actions uses to perform the checkout action. -RUN useradd gha -u 1001 -m -s /bin/bash - -# Also add the user to passwordless sudoers so that we can install software -# later on without having to rebuild the container. -RUN adduser gha sudo -RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers - -USER gha -WORKDIR /home/gha - -FROM ci-container as ci-container-agent - -ENV GITHUB_RUNNER_VERSION=2.328.0 - -RUN mkdir actions-runner && \ - cd actions-runner && \ - curl -O -L https://github.com/actions/runner/releases/download/v$GITHUB_RUNNER_VERSION/actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz && \ - tar xzf ./actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz && \ - rm ./actions-runner-linux-x64-$GITHUB_RUNNER_VERSION.tar.gz - diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index dcba17ff0113e..0000000000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,215 +0,0 @@ -# LLVM Documentation CI -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -name: "Test documentation build" - -permissions: - contents: read - -on: - push: - branches: - - 'main' - paths: - - 'llvm/docs/**' - - 'clang/docs/**' - - 'clang/include/clang/Basic/AttrDocs.td' - - 'clang/include/clang/Driver/ClangOptionDocs.td' - - 'clang/include/clang/Basic/DiagnosticDocs.td' - - 'clang-tools-extra/docs/**' - - 'lldb/docs/**' - - 'libunwind/docs/**' - - 'libcxx/docs/**' - - 'libc/docs/**' - - 'lld/docs/**' - - 'openmp/docs/**' - - 'polly/docs/**' - - 'flang/docs/**' - - 'flang/include/flang/Optimizer/Dialect/FIROps.td' - - '.github/workflows/docs.yml' - pull_request: - paths: - - 'llvm/docs/**' - - 'clang/docs/**' - - 'clang/include/clang/Basic/AttrDocs.td' - - 'clang/include/clang/Driver/ClangOptionDocs.td' - - 'clang/include/clang/Basic/DiagnosticDocs.td' - - 'clang-tools-extra/docs/**' - - 'lldb/docs/**' - - 'libunwind/docs/**' - - 'libcxx/docs/**' - - 'libc/docs/**' - - 'lld/docs/**' - - 'openmp/docs/**' - - 'polly/docs/**' - - 'flang/docs/**' - - 'flang/include/flang/Optimizer/Dialect/FIROps.td' - - '.github/workflows/docs.yml' - -jobs: - check-docs-build: - name: "Test documentation build" - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 2 - - name: Get subprojects that have doc changes - id: docs-changed-subprojects - uses: step-security/changed-files@95b56dadb92a30ca9036f16423fd3c088a71ee94 # v46.0.5 - with: - skip_initial_fetch: true - base_sha: 'HEAD~1' - sha: 'HEAD' - files_yaml: | - llvm: - - 'llvm/docs/**' - clang: - - 'clang/docs/**' - - 'clang/include/clang/Basic/AttrDocs.td' - - 'clang/include/clang/Driver/ClangOptionDocs.td' - - 'clang/include/clang/Basic/DiagnosticDocs.td' - clang-tools-extra: - - 'clang-tools-extra/docs/**' - lldb: - - 'lldb/docs/**' - libunwind: - - 'libunwind/docs/**' - libcxx: - - 'libcxx/docs/**' - libc: - - 'libc/docs/**' - lld: - - 'lld/docs/**' - openmp: - - 'openmp/docs/**' - polly: - - 'polly/docs/**' - flang: - - 'flang/docs/**' - - 'flang/include/flang/Optimizer/Dialect/FIROps.td' - workflow: - - '.github/workflows/docs.yml' - - name: Setup Python env - uses: actions/setup-python@v6.0.0 - with: - python-version: '3.11' - cache: 'pip' - cache-dependency-path: 'llvm/docs/requirements-hashed.txt' - - name: Install python dependencies - run: pip install -r llvm/docs/requirements-hashed.txt - - name: Install system dependencies - run: | - sudo apt-get update - # swig and graphviz are lldb specific dependencies - sudo apt-get install -y cmake ninja-build swig graphviz libhwloc-dev - - name: Setup output folder - run: mkdir built-docs - - name: Build LLVM docs - if: | - steps.docs-changed-subprojects.outputs.llvm_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B llvm-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C llvm-build docs-llvm-html docs-llvm-man - mkdir built-docs/llvm - cp -r llvm-build/docs/* built-docs/llvm/ - - name: Build Clang docs - if: | - steps.docs-changed-subprojects.outputs.clang_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B clang-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C clang-build docs-clang-html docs-clang-man - mkdir built-docs/clang - cp -r clang-build/docs/* built-docs/clang/ - - name: Build clang-tools-extra docs - if: | - steps.docs-changed-subprojects.outputs.clang-tools-extra_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B clang-tools-extra-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;clang-tools-extra" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C clang-tools-extra-build docs-clang-tools-html docs-clang-tools-man - mkdir built-docs/clang-tools-extra - cp -r clang-tools-extra-build/docs/* built-docs/clang-tools-extra/ - - name: Build LLDB docs - if: | - steps.docs-changed-subprojects.outputs.lldb_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B lldb-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;lldb" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C lldb-build docs-lldb-html docs-lldb-man - mkdir built-docs/lldb - cp -r lldb-build/docs/* built-docs/lldb/ - - name: Build libunwind docs - if: | - steps.docs-changed-subprojects.outputs.libunwind_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B libunwind-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libunwind" -DLLVM_ENABLE_SPHINX=ON ./runtimes - TZ=UTC ninja -C libunwind-build docs-libunwind-html - mkdir built-docs/libunwind - cp -r libunwind-build/libunwind/docs/* built-docs/libunwind - - name: Build libcxx docs - if: | - steps.docs-changed-subprojects.outputs.libcxx_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B libcxx-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libcxxabi;libcxx;libunwind" -DLLVM_ENABLE_SPHINX=ON ./runtimes - TZ=UTC ninja -C libcxx-build docs-libcxx-html - mkdir built-docs/libcxx - cp -r libcxx-build/libcxx/docs/* built-docs/libcxx/ - - name: Build libc docs - if: | - steps.docs-changed-subprojects.outputs.libc_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B libc-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libc" -DLLVM_ENABLE_SPHINX=ON ./runtimes - TZ=UTC ninja -C libc-build docs-libc-html - mkdir built-docs/libc - cp -r libc-build/libc/docs/* built-docs/libc/ - - name: Build LLD docs - if: | - steps.docs-changed-subprojects.outputs.lld_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B lld-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="lld" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C lld-build docs-lld-html - mkdir built-docs/lld - cp -r lld-build/docs/* built-docs/lld/ - - name: Build OpenMP docs - if: | - steps.docs-changed-subprojects.outputs.openmp_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B openmp-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;openmp" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C openmp-build docs-openmp-html - mkdir built-docs/openmp - cp -r openmp-build/docs/* built-docs/openmp/ - - name: Build Polly docs - if: | - steps.docs-changed-subprojects.outputs.polly_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B polly-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="polly" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C polly-build docs-polly-html docs-polly-man - mkdir built-docs/polly - cp -r polly-build/docs/* built-docs/polly/ - - name: Build Flang docs - if: | - steps.docs-changed-subprojects.outputs.flang_any_changed == 'true' || - steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' - run: | - cmake -B flang-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;mlir;flang" -DLLVM_ENABLE_SPHINX=ON ./llvm - TZ=UTC ninja -C flang-build docs-flang-html docs-flang-man - mkdir built-docs/flang - cp -r flang-build/docs/* built-docs/flang/ - - name: Upload docs - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: docs-output - path: built-docs/ diff --git a/.github/workflows/email-check.yaml b/.github/workflows/email-check.yaml deleted file mode 100644 index 817ece9c0b4d5..0000000000000 --- a/.github/workflows/email-check.yaml +++ /dev/null @@ -1,48 +0,0 @@ -name: "Check for private emails used in PRs" - -on: - pull_request: - branches: - - sycl - - sycl-rel-** - -permissions: - contents: read - -jobs: - validate_email: - runs-on: ubuntu-24.04 - if: github.repository == 'intel/llvm' - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ github.event.pull_request.head.sha }} - - - name: Extract author email - id: author - run: | - git log -1 - echo "EMAIL=$(git show -s --format='%ae' HEAD~0)" >> $GITHUB_OUTPUT - # Create empty comment file - echo "[]" > comments - - - name: Validate author email - if: ${{ endsWith(steps.author.outputs.EMAIL, 'noreply.github.com') }} - env: - COMMENT: >- - ⚠️ We detected that you are using a GitHub private e-mail address to contribute to the repo.
- Please turn off [Keep my email addresses private](https://github.com/settings/emails) setting in your account.
- See [LLVM Developer Policy](https://llvm.org/docs/DeveloperPolicy.html#email-addresses) and - [LLVM Discourse](https://discourse.llvm.org/t/hidden-emails-on-github-should-we-do-something-about-it) for more information. - run: | - cat << EOF > comments - [{"body" : "$COMMENT"}] - EOF - - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - if: always() - with: - name: workflow-args - path: | - comments diff --git a/.github/workflows/get-llvm-version/action.yml b/.github/workflows/get-llvm-version/action.yml deleted file mode 100644 index 2218d926fc13d..0000000000000 --- a/.github/workflows/get-llvm-version/action.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Get LLVM Version -description: >- - Get the LLVM version from the llvm-project source tree. This action assumes - the llvm-project sources have already been checked out into GITHUB_WORKSPACE. - -outputs: - major: - description: LLVM major version - value: ${{ steps.version.outputs.major }} - minor: - description: LLVM minor version - value: ${{ steps.version.outputs.minor }} - patch: - description: LLVM patch version - value: ${{ steps.version.outputs.patch }} - -runs: - using: "composite" - steps: - - name: Get Version - shell: bash - id: version - run: | - for v in major minor patch; do - echo "$v=`llvm/utils/release/get-llvm-version.sh --$v`" >> $GITHUB_OUTPUT - done diff --git a/.github/workflows/gha-codeql.yml b/.github/workflows/gha-codeql.yml deleted file mode 100644 index 5a7c79d021ade..0000000000000 --- a/.github/workflows/gha-codeql.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Github Actions CodeQL - -permissions: - contents: read - -on: - pull_request: - branches: - - main - schedule: - - cron: '30 0 * * *' - -concurrency: - group: ${{ github.workflow }} - cancel-in-progress: true - -jobs: - codeql: - name: 'Github Actions CodeQL' - runs-on: ubuntu-24.04 - permissions: - security-events: write - steps: - - name: Checkout LLVM - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - sparse-checkout: | - .github/ - - name: Initialize CodeQL - uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 - with: - languages: actions - queries: security-extended - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3 diff --git a/.github/workflows/hlsl-matrix.yaml b/.github/workflows/hlsl-matrix.yaml deleted file mode 100644 index c63a32acd2b3e..0000000000000 --- a/.github/workflows/hlsl-matrix.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: HLSL Tests - -permissions: - contents: read - -on: - workflow_dispatch: - pull_request: - branches: - - main - paths: - - llvm/**/DirectX/** - - .github/workflows/hlsl* - - clang/*HLSL*/**/* - - clang/**/*HLSL* - - llvm/**/Frontend/HLSL/**/* - -jobs: - HLSL-Tests: - strategy: - fail-fast: false - matrix: - runs-on: - - hlsl-macos - - uses: ./.github/workflows/hlsl-test-all.yaml - with: - SKU: hlsl-macos - TestTarget: check-hlsl-clang-mtl # TODO: This target changes based on SKU - LLVM-ref: ${{ github.ref }} diff --git a/.github/workflows/hlsl-test-all.yaml b/.github/workflows/hlsl-test-all.yaml deleted file mode 100644 index 5ab574dfe8dd7..0000000000000 --- a/.github/workflows/hlsl-test-all.yaml +++ /dev/null @@ -1,87 +0,0 @@ -name: HLSL Test - -permissions: - contents: read - -on: - workflow_call: - inputs: - OffloadTest-branch: - description: 'Test Suite Branch' - required: false - default: 'main' - type: string - LLVM-ref: - description: 'LLVM Branch' - required: false - default: 'main' - type: string - SKU: - required: true - type: string - TestTarget: - required: false - default: 'check-hlsl' - type: string - -jobs: - build: - runs-on: ${{ inputs.SKU }} - steps: - - name: Checkout DXC - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: Microsoft/DirectXShaderCompiler - ref: main - path: DXC - submodules: true - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ inputs.LLVM-branch }} - path: llvm-project - - name: Checkout OffloadTest - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: llvm/offload-test-suite - ref: main - path: OffloadTest - - name: Checkout Golden Images - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: llvm/offload-golden-images - ref: main - path: golden-images - - name: Setup Windows - if: runner.os == 'Windows' - uses: llvm/actions/setup-windows@main - with: - arch: amd64 - - name: Build DXC - run: | - cd DXC - mkdir build - cd build - cmake -G Ninja -DCMAKE_BUILD_TYPE=Release -C ${{ github.workspace }}/DXC/cmake/caches/PredefinedParams.cmake -C ${{ github.workspace }}/OffloadTest/cmake/caches/sccache.cmake -DHLSL_DISABLE_SOURCE_GENERATION=On ${{ github.workspace }}/DXC/ - ninja dxv llvm-dis - - name: Build LLVM - run: | - cd llvm-project - mkdir build - cd build - cmake -G Ninja -DDXIL_DIS=${{ github.workspace }}/DXC/build/bin/llvm-dis -DLLVM_INCLUDE_DXIL_TESTS=On -DCMAKE_BUILD_TYPE=Release -C ${{ github.workspace }}/llvm-project/clang/cmake/caches/HLSL.cmake -C ${{ github.workspace }}/OffloadTest/cmake/caches/sccache.cmake -DDXC_DIR=${{ github.workspace }}/DXC/build/bin -DLLVM_EXTERNAL_OFFLOADTEST_SOURCE_DIR=${{ github.workspace }}/OffloadTest -DLLVM_EXTERNAL_PROJECTS="OffloadTest" -DLLVM_LIT_ARGS="--xunit-xml-output=testresults.xunit.xml -v" -DGOLDENIMAGE_DIR=${{ github.workspace }}/golden-images ${{ github.workspace }}/llvm-project/llvm/ - ninja hlsl-test-depends llvm-test-depends clang-test-depends - - name: Run HLSL Tests - run: | - cd llvm-project - cd build - ninja check-llvm - ninja check-clang - ninja check-hlsl-unit - ninja ${{ inputs.TestTarget }} - - name: Publish Test Results - uses: EnricoMi/publish-unit-test-result-action/macos@34d7c956a59aed1bfebf31df77b8de55db9bbaaf # v2 - if: always() && runner.os == 'macOS' - with: - comment_mode: off - files: llvm-project/build/**/testresults.xunit.xml diff --git a/.github/workflows/issue-release-workflow.yml b/.github/workflows/issue-release-workflow.yml deleted file mode 100644 index 8e29947aa0c4d..0000000000000 --- a/.github/workflows/issue-release-workflow.yml +++ /dev/null @@ -1,69 +0,0 @@ -# This contains the workflow definitions that allow users to test backports -# to the release branch using comments on issues. -# -# /cherry-pick <...> -# -# This comment will attempt to cherry-pick the given commits to the latest -# release branch (release/Y.x) and if successful, push the result to a branch -# on github. -# -# /branch // -# -# This comment will create a pull request from to the latest release -# branch. - -name: Issue Release Workflow - -permissions: - contents: read - -on: - issue_comment: - types: - - created - - edited - issues: - types: - - opened - -env: - COMMENT_BODY: ${{ github.event.action == 'opened' && github.event.issue.body || github.event.comment.body }} - -jobs: - backport-commits: - name: Backport Commits - runs-on: ubuntu-24.04 - permissions: - issues: write - pull-requests: write - if: >- - (github.repository == 'llvm/llvm-project') && - !startswith(github.event.comment.body, '') && - contains(github.event.action == 'opened' && github.event.issue.body || github.event.comment.body, '/cherry-pick') - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: llvm/llvm-project - # GitHub stores the token used for checkout and uses it for pushes - # too, but we want to use a different token for pushing, so we need - # to disable persist-credentials here. - persist-credentials: false - fetch-depth: 0 - - - name: Setup Environment - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - ./llvm/utils/git/github-automation.py --token ${{ github.token }} setup-llvmbot-git - - - name: Backport Commits - run: | - printf "%s" "$COMMENT_BODY" | - ./llvm/utils/git/github-automation.py \ - --repo "$GITHUB_REPOSITORY" \ - --token "${{ secrets.RELEASE_WORKFLOW_PR_CREATE }}" \ - release-workflow \ - --branch-repo-token ${{ secrets.RELEASE_WORKFLOW_PUSH_SECRET }} \ - --issue-number ${{ github.event.issue.number }} \ - --requested-by ${{ (github.event.action == 'opened' && github.event.issue.user.login) || github.event.comment.user.login }} \ - auto diff --git a/.github/workflows/issue-subscriber.yml b/.github/workflows/issue-subscriber.yml deleted file mode 100644 index 37095da11fa37..0000000000000 --- a/.github/workflows/issue-subscriber.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Issue Subscriber - -on: - issues: - types: - - labeled - -permissions: - contents: read - -jobs: - auto-subscribe: - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Checkout Automation Script - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/git/ - ref: main - - - name: Setup Automation Script - working-directory: ./llvm/utils/git/ - run: | - pip install --require-hashes -r requirements.txt - - - name: Update watchers - working-directory: ./llvm/utils/git/ - # https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#using-an-intermediate-environment-variable - env: - LABEL_NAME: ${{ github.event.label.name }} - run: | - python3 ./github-automation.py \ - --token '${{ secrets.ISSUE_SUBSCRIBER_TOKEN }}' \ - issue-subscriber \ - --issue-number '${{ github.event.issue.number }}' \ - --label-name "$LABEL_NAME" diff --git a/.github/workflows/issue-write.yml b/.github/workflows/issue-write.yml deleted file mode 100644 index 8652b70bc4edc..0000000000000 --- a/.github/workflows/issue-write.yml +++ /dev/null @@ -1,157 +0,0 @@ -name: Comment on an issue - -on: - workflow_run: - workflows: - - "Check code formatting" - - "Check for private emails used in PRs" - - "PR Request Release Note" - types: - - completed - -permissions: - contents: read - -jobs: - pr-comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - if: > - github.event.workflow_run.event == 'pull_request' && - ( - github.event.workflow_run.conclusion == 'success' || - github.event.workflow_run.conclusion == 'failure' - ) - steps: - - name: Fetch Sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: | - .github/workflows/unprivileged-download-artifact/action.yml - sparse-checkout-cone-mode: false - - name: 'Download artifact' - uses: ./.github/workflows/unprivileged-download-artifact - id: download-artifact - with: - run-id: ${{ github.event.workflow_run.id }} - artifact-name: workflow-args - - - name: 'Comment on PR' - if: steps.download-artifact.outputs.artifact-id != '' - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - var fs = require('fs'); - const comments = JSON.parse(fs.readFileSync('./comments')); - if (!comments || comments.length == 0) { - return; - } - - let runInfo = await github.rest.actions.getWorkflowRun({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.payload.workflow_run.id - }); - - console.log(runInfo); - - - // Query to find the number of the pull request that triggered this job. - // The associated pull requests are based off of the branch name, so if - // you create a pull request for a branch, close it, and then create - // another pull request with the same branch, then this query will return - // two associated pull requests. This is why we have to fetch all the - // associated pull requests and then iterate through them to find the - // one that is open. - const gql_query = ` - query($repo_owner : String!, $repo_name : String!, $branch: String!) { - repository(owner: $repo_owner, name: $repo_name) { - ref (qualifiedName: $branch) { - associatedPullRequests(first: 100) { - nodes { - baseRepository { - owner { - login - } - } - number - state - } - } - } - } - } - ` - const gql_variables = { - repo_owner: runInfo.data.head_repository.owner.login, - repo_name: runInfo.data.head_repository.name, - branch: runInfo.data.head_branch - } - const gql_result = await github.graphql(gql_query, gql_variables); - console.log(gql_result); - // If the branch for the PR was deleted before this job has a chance - // to run, then the ref will be null. This can happen if someone: - // 1. Rebase the PR, which triggers some workflow. - // 2. Immediately merges the PR and deletes the branch. - // 3. The workflow finishes and triggers this job. - if (!gql_result.repository.ref) { - console.log("Ref has been deleted"); - return; - } - console.log(gql_result.repository.ref.associatedPullRequests.nodes); - - var pr_number = 0; - gql_result.repository.ref.associatedPullRequests.nodes.forEach((pr) => { - - // The largest PR number is the one we care about. The only way - // to have more than one associated pull requests is if all the - // old pull requests are in the closed state. - if (pr.baseRepository.owner.login = context.repo.owner && pr.number > pr_number) { - pr_number = pr.number; - } - }); - if (pr_number == 0) { - console.log("Error retrieving pull request number"); - return; - } - - await comments.forEach(function (comment) { - if (comment.id) { - // Security check: Ensure that this comment was created by - // the github-actions bot, so a malicious input won't overwrite - // a user's comment. - github.rest.issues.getComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: comment.id - }).then((old_comment) => { - console.log(old_comment); - if (old_comment.data.user.login != "github-actions[bot]") { - console.log("Invalid comment id: " + comment.id); - return; - } - github.rest.issues.updateComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pr_number, - comment_id: comment.id, - body: comment.body - }); - }); - } else { - github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pr_number, - body: comment.body - }); - } - }); - - - name: Dump comments file - if: >- - always() && - steps.download-artifact.outputs.artifact-id != '' - run: cat comments diff --git a/.github/workflows/libc-fullbuild-tests.yml b/.github/workflows/libc-fullbuild-tests.yml deleted file mode 100644 index dd259fbec16bc..0000000000000 --- a/.github/workflows/libc-fullbuild-tests.yml +++ /dev/null @@ -1,133 +0,0 @@ -# This workflow is for pre-commit testing of the LLVM-libc project. -name: LLVM-libc Pre-commit Fullbuild Tests -permissions: - contents: read -on: - pull_request: - branches: [ "main" ] - paths: - - 'libc/**' - - '.github/workflows/libc-fullbuild-tests.yml' - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - # Build basic linux configuration with Debug/Release/MinSizeRel and all - # other configurations in Debug only. - include: - - os: ubuntu-24.04 - build_type: Debug - c_compiler: clang-22 - cpp_compiler: clang++-22 - target: x86_64-unknown-linux-llvm - include_scudo: ON - - os: ubuntu-24.04 - build_type: Release - c_compiler: clang-22 - cpp_compiler: clang++-22 - target: x86_64-unknown-linux-llvm - include_scudo: ON - - os: ubuntu-24.04 - build_type: MinSizeRel - c_compiler: clang-22 - cpp_compiler: clang++-22 - target: x86_64-unknown-linux-llvm - include_scudo: ON - - os: ubuntu-24.04-arm - build_type: Debug - c_compiler: clang-22 - cpp_compiler: clang++-22 - target: aarch64-unknown-linux-llvm - include_scudo: ON - - os: ubuntu-24.04 - build_type: Debug - c_compiler: clang-22 - cpp_compiler: clang++-22 - target: x86_64-unknown-uefi-llvm - include_scudo: OFF - # TODO: add back gcc build when it is fixed - # - c_compiler: gcc - # cpp_compiler: g++ - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - # Libc's build is relatively small comparing with other components of LLVM. - # A fresh fullbuild takes about 190MiB of uncompressed disk space, which can - # be compressed into ~40MiB. Limiting the cache size to 1G should be enough. - # Prefer sccache as it is more modern. - # Do not use direct GHAC access even though it is supported by sccache. GHAC rejects - # frequent small object writes. - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: 1G - key: libc_fullbuild_${{ matrix.c_compiler }} - variant: sccache - - # Notice: - # - MPFR is required by some of the mathlib tests. - # - Debian has a multilib setup, so we need to symlink the asm directory. - # For more information, see https://wiki.debian.org/Multiarch/LibraryPathOverview - - name: Prepare dependencies (Ubuntu) - run: | - wget https://apt.llvm.org/llvm.sh - chmod +x llvm.sh - sudo ./llvm.sh 22 - sudo apt-get update - sudo apt-get install -y libmpfr-dev libgmp-dev libmpc-dev ninja-build linux-libc-dev - sudo ln -sf /usr/include/$(uname -p)-linux-gnu/asm /usr/include/asm - - - name: Set reusable strings - id: strings - shell: bash - run: | - echo "build-output-dir=${{ github.workspace }}/build" >> "$GITHUB_OUTPUT" - echo "build-install-dir=${{ github.workspace }}/install" >> "$GITHUB_OUTPUT" - - # Configure libc fullbuild with scudo. - # Use MinSizeRel to reduce the size of the build. - - name: Configure CMake - run: | - export RUNTIMES="libc" - - if [[ ${{ matrix.include_scudo}} == "ON" ]]; then - export RUNTIMES="$RUNTIMES;compiler-rt" - export CMAKE_FLAGS=" - -DLLVM_LIBC_INCLUDE_SCUDO=ON - -DCOMPILER_RT_BUILD_SCUDO_STANDALONE_WITH_LLVM_LIBC=ON - -DCOMPILER_RT_BUILD_GWP_ASAN=OFF - -DCOMPILER_RT_SCUDO_STANDALONE_BUILD_SHARED=OFF" - fi - - cmake -B ${{ steps.strings.outputs.build-output-dir }} \ - -DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }} \ - -DCMAKE_C_COMPILER=${{ matrix.c_compiler }} \ - -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \ - -DCMAKE_C_COMPILER_LAUNCHER=sccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -DCMAKE_INSTALL_PREFIX=${{ steps.strings.outputs.build-install-dir }} \ - -DLLVM_RUNTIME_TARGETS=${{ matrix.target }} \ - -DLLVM_ENABLE_RUNTIMES="$RUNTIMES" \ - -DLLVM_LIBC_FULL_BUILD=ON \ - -G Ninja \ - -S ${{ github.workspace }}/runtimes \ - $CMAKE_FLAGS - - - name: Build - run: > - cmake - --build ${{ steps.strings.outputs.build-output-dir }} - --parallel - --target install - - - name: Test - # Skip UEFI tests until we have testing set up. - if: ${{ ! endsWith(matrix.target, '-uefi-llvm') }} - run: > - cmake - --build ${{ steps.strings.outputs.build-output-dir }} - --parallel - --target check-libc diff --git a/.github/workflows/libc-overlay-tests.yml b/.github/workflows/libc-overlay-tests.yml deleted file mode 100644 index 17006733f1373..0000000000000 --- a/.github/workflows/libc-overlay-tests.yml +++ /dev/null @@ -1,114 +0,0 @@ -# This workflow is for pre-commit testing of the LLVM-libc project. -name: LLVM-libc Pre-commit Overlay Tests -permissions: - contents: read -on: - pull_request: - branches: [ "main" ] - paths: - - 'libc/**' - - '.github/workflows/libc-overlay-tests.yml' - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - # Set fail-fast to false to ensure that feedback is delivered for all matrix combinations. - fail-fast: false - matrix: - os: [ubuntu-24.04, ubuntu-24.04-arm, windows-2022, windows-2025, macos-14] - include: - # TODO: add linux gcc when it is fixed - - os: ubuntu-24.04 - compiler: - c_compiler: clang - cpp_compiler: clang++ - - os: ubuntu-24.04-arm - compiler: - c_compiler: clang - cpp_compiler: clang++ - - os: windows-2022 - compiler: - c_compiler: clang-cl - cpp_compiler: clang-cl - - os: windows-2025 - compiler: - c_compiler: clang-cl - cpp_compiler: clang-cl - - os: macos-14 - compiler: - c_compiler: clang - cpp_compiler: clang++ - - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - # Libc's build is relatively small comparing with other components of LLVM. - # A fresh linux overlay takes about 180MiB of uncompressed disk space, which can - # be compressed into ~40MiB. MacOS and Windows overlay builds are less than 10MiB - # after compression. Limiting the cache size to 1G should be enough. - # Prefer sccache as it is modern and it has a guarantee to work with MSVC. - # Do not use direct GHAC access even though it is supported by sccache. GHAC rejects - # frequent small object writes. - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: 1G - key: libc_overlay_build_${{ matrix.os }}_${{ matrix.compiler.c_compiler }} - variant: sccache - - # MPFR is required by some of the mathlib tests. - - name: Prepare dependencies (Ubuntu) - if: runner.os == 'Linux' - run: | - sudo apt-get update - sudo apt-get install -y libmpfr-dev libgmp-dev libmpc-dev ninja-build - - # Chocolatey is shipped with Windows runners. Windows Server 2025 recommends WinGet. - # Consider migrating to WinGet when Windows Server 2025 is available. - - name: Prepare dependencies (Windows) - if: runner.os == 'Windows' - run: | - choco install ninja - - - name: Prepare dependencies (macOS) - if: runner.os == 'macOS' - run: | - brew install ninja - - - name: Set reusable strings - id: strings - shell: bash - run: | - echo "build-output-dir=${{ github.workspace }}/build" >> "$GITHUB_OUTPUT" - - # Use MinSizeRel to reduce the size of the build. - # Notice that CMP0141=NEW and MSVC_DEBUG_INFORMATION_FORMAT=Embedded are required - # by the sccache tool. - - name: Configure CMake - run: > - cmake -B ${{ steps.strings.outputs.build-output-dir }} - -DCMAKE_CXX_COMPILER=${{ matrix.compiler.cpp_compiler }} - -DCMAKE_C_COMPILER=${{ matrix.compiler.c_compiler }} - -DCMAKE_BUILD_TYPE=Debug - -DCMAKE_C_COMPILER_LAUNCHER=sccache - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache - -DCMAKE_POLICY_DEFAULT_CMP0141=NEW - -DCMAKE_MSVC_DEBUG_INFORMATION_FORMAT=Embedded - -DLLVM_ENABLE_RUNTIMES=libc - -G Ninja - -S ${{ github.workspace }}/runtimes - - - name: Build - run: > - cmake - --build ${{ steps.strings.outputs.build-output-dir }} - --parallel - --target libc - - - name: Test - run: > - cmake - --build ${{ steps.strings.outputs.build-output-dir }} - --parallel - --target check-libc diff --git a/.github/workflows/libclang-abi-tests.yml b/.github/workflows/libclang-abi-tests.yml deleted file mode 100644 index f8cec4419d56c..0000000000000 --- a/.github/workflows/libclang-abi-tests.yml +++ /dev/null @@ -1,171 +0,0 @@ -name: libclang ABI Tests - -permissions: - contents: read - -on: - workflow_dispatch: - push: - branches: - - 'release/**' - paths: - - 'clang/**' - - '.github/workflows/libclang-abi-tests.yml' - pull_request: - branches: - - 'release/**' - paths: - - 'clang/**' - - '.github/workflows/libclang-abi-tests.yml' - -concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} - -jobs: - abi-dump-setup: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - outputs: - BASELINE_REF: ${{ steps.vars.outputs.BASELINE_REF }} - ABI_HEADERS: ${{ steps.vars.outputs.ABI_HEADERS }} - ABI_LIBS: ${{ steps.vars.outputs.ABI_LIBS }} - BASELINE_VERSION_MAJOR: ${{ steps.vars.outputs.BASELINE_VERSION_MAJOR }} - LLVM_VERSION_MAJOR: ${{ steps.version.outputs.major }} - LLVM_VERSION_MINOR: ${{ steps.version.outputs.minor }} - LLVM_VERSION_PATCH: ${{ steps.version.outputs.patch }} - steps: - - name: Checkout source - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 250 - - - name: Get LLVM version - id: version - uses: ./.github/workflows/get-llvm-version - - - name: Setup Variables - id: vars - run: | - remote_repo='https://github.com/llvm/llvm-project' - if [ ${{ steps.version.outputs.patch }} -eq 0 ]; then - major_version=$(( ${{ steps.version.outputs.major }} - 1)) - baseline_ref="llvmorg-$major_version.1.0" - - # If there is a minor release, we want to use that as the base line. - minor_ref=$(git ls-remote --refs -t "$remote_repo" llvmorg-"$major_version".[1-9].[0-9] | tail -n1 | grep -o 'llvmorg-.\+' || true) - if [ -n "$minor_ref" ]; then - baseline_ref="$minor_ref" - else - # Check if we have a release candidate - rc_ref=$(git ls-remote --refs -t "$remote_repo" llvmorg-"$major_version".[1-9].[0-9]-rc* | tail -n1 | grep -o 'llvmorg-.\+' || true) - if [ -n "$rc_ref" ]; then - baseline_ref="$rc_ref" - fi - fi - { - echo "BASELINE_VERSION_MAJOR=$major_version" - echo "BASELINE_REF=$baseline_ref" - echo "ABI_HEADERS=clang-c" - echo "ABI_LIBS=libclang.so" - } >> "$GITHUB_OUTPUT" - else - { - echo "BASELINE_VERSION_MAJOR=${{ steps.version.outputs.major }}" - echo "BASELINE_REF=llvmorg-${{ steps.version.outputs.major }}.1.0" - echo "ABI_HEADERS=." - echo "ABI_LIBS=libclang.so libclang-cpp.so" - } >> "$GITHUB_OUTPUT" - fi - - abi-dump: - if: github.repository_owner == 'llvm' - needs: abi-dump-setup - runs-on: ubuntu-24.04 - strategy: - matrix: - name: - - build-baseline - - build-latest - include: - - name: build-baseline - llvm_version_major: ${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }} - ref: ${{ needs.abi-dump-setup.outputs.BASELINE_REF }} - repo: llvm/llvm-project - - name: build-latest - llvm_version_major: ${{ needs.abi-dump-setup.outputs.LLVM_VERSION_MAJOR }} - ref: ${{ github.sha }} - repo: ${{ github.repository }} - steps: - - name: Install Ninja - uses: llvm/actions/install-ninja@main - - name: Install abi-compliance-checker - run: | - sudo apt-get update - sudo apt-get install -y abi-dumper autoconf pkg-config - - name: Install universal-ctags - run: | - git clone https://github.com/universal-ctags/ctags.git - cd ctags - ./autogen.sh - ./configure - sudo make install - - name: Download source code - uses: llvm/actions/get-llvm-project-src@main - with: - ref: ${{ matrix.ref }} - repo: ${{ matrix.repo }} - - name: Configure - run: | - mkdir install - cmake -B build -S llvm -G Ninja -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_BUILD_TYPE=Debug -DLLVM_TARGETS_TO_BUILD="" -DLLVM_BUILD_LLVM_DYLIB=ON -DLLVM_LINK_LLVM_DYLIB=ON -DCMAKE_C_FLAGS_DEBUG="-g1 -Og" -DCMAKE_CXX_FLAGS_DEBUG="-g1 -Og" -DCMAKE_INSTALL_PREFIX="$(pwd)"/install llvm - - name: Build - run: ninja -C build/ ${{ needs.abi-dump-setup.outputs.ABI_LIBS }} install-clang-headers - - name: Dump ABI - run: | - parallel abi-dumper -lver ${{ matrix.ref }} -skip-cxx -public-headers ./install/include/${{ needs.abi-dump-setup.outputs.ABI_HEADERS }} -o {}-${{ matrix.ref }}.abi ./build/lib/{} ::: ${{ needs.abi-dump-setup.outputs.ABI_LIBS }} - for lib in ${{ needs.abi-dump-setup.outputs.ABI_LIBS }}; do - # Remove symbol versioning from dumps, so we can compare across major versions. - sed -i 's/LLVM_[0-9]\+/LLVM_NOVERSION/' $lib-${{ matrix.ref }}.abi - done - - name: Upload ABI file - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: ${{ matrix.name }} - path: '*${{ matrix.ref }}.abi' - - abi-compare: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - needs: - - abi-dump-setup - - abi-dump - steps: - - name: Download baseline - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 - with: - name: build-baseline - path: build-baseline - - name: Download latest - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 - with: - name: build-latest - path: build-latest - - - name: Install abi-compliance-checker - run: | - sudo apt-get update - sudo apt-get install -y abi-compliance-checker - - name: Compare ABI - run: | - for lib in ${{ needs.abi-dump-setup.outputs.ABI_LIBS }}; do - abi-compliance-checker -lib $lib -old build-baseline/$lib*.abi -new build-latest/$lib*.abi - done - - name: Upload ABI Comparison - if: always() - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: compat-report-${{ github.sha }} - path: compat_reports/ diff --git a/.github/workflows/libclang-python-tests.yml b/.github/workflows/libclang-python-tests.yml deleted file mode 100644 index 0d66f5d595e0e..0000000000000 --- a/.github/workflows/libclang-python-tests.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: Libclang Python Binding Tests - -permissions: - contents: read - -on: - push: - branches: - - 'main' - paths: - - 'clang/bindings/python/**' - - 'clang/tools/libclang/**' - - 'clang/CMakeList.txt' - - '.github/workflows/libclang-python-tests.yml' - pull_request: - paths: - - 'clang/bindings/python/**' - - 'clang/tools/libclang/**' - - 'clang/CMakeList.txt' - - '.github/workflows/libclang-python-tests.yml' - -jobs: - check-clang-python: - # Build libclang and then run the libclang Python binding's unit tests. - # There is an issue running on "windows-2019". - # See https://github.com/llvm/llvm-project/issues/76601#issuecomment-1873049082. - name: Build and run Python unit tests - if: github.repository == 'llvm/llvm-project' - runs-on: ubuntu-24.04 - strategy: - fail-fast: false - matrix: - python-version: ["3.8", "3.13"] - steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Setup Python - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: ${{ matrix.python-version }} - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: 2G - key: spirv-ubuntu-24.04 - variant: sccache - - name: Build and Test - run: | - mkdir build - cmake -GNinja \ - -S llvm \ - -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DLLVM_ENABLE_ASSERTIONS=ON \ - -DCMAKE_C_COMPILER_LAUNCHER=sccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -DLLVM_ENABLE_PROJECTS=clang - ninja -C build check-clang-python diff --git a/.github/workflows/libcxx-build-and-test.yaml b/.github/workflows/libcxx-build-and-test.yaml deleted file mode 100644 index dcc625edc400b..0000000000000 --- a/.github/workflows/libcxx-build-and-test.yaml +++ /dev/null @@ -1,286 +0,0 @@ -# This file defines pre-commit CI for libc++, libc++abi, and libunwind (on Github). -# -# We split the configurations in multiple stages with the intent of saving compute time -# when a job fails early in the pipeline. This is why the jobs are marked as `continue-on-error: false`. -# We try to run the CI configurations with the most signal in the first stage. -# -# Stages 1 & 2 are meant to be "smoke tests", and are meant to catch most build/test failures quickly and without using -# too many resources. -# Stage 3 is "everything else", and is meant to catch breakages on more niche or unique configurations. -# -# Therefore, we "fail-fast" for any failures during stages 1 & 2, meaning any job failing cancels all other running jobs, -# under the assumption that if the "smoke tests" fail, then the other configurations will likely fail in the same way. -# However, stage 3 does not fail fast, as it's more likely that any one job failing is a flake or a configuration-specific -# -name: Build and Test libc++ -on: - pull_request: - paths: - - 'libcxx/**' - - 'libcxxabi/**' - - 'libunwind/**' - - 'runtimes/**' - - 'cmake/**' - - '.github/workflows/libcxx-build-and-test.yaml' - schedule: - # Run nightly at 08:00 UTC (aka 00:00 Pacific, aka 03:00 Eastern) - - cron: '0 8 * * *' - -permissions: - contents: read # Default everything to read-only - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} - cancel-in-progress: true - -jobs: - stage1: - if: github.repository_owner == 'llvm' - runs-on: llvm-premerge-libcxx-next-runners - continue-on-error: false - strategy: - fail-fast: false - matrix: - config: [ - 'frozen-cxx03-headers', - 'generic-cxx03', - 'generic-cxx26', - 'generic-modules' - ] - cc: [ 'clang-22' ] - cxx: [ 'clang++-22' ] - include: - - config: 'generic-gcc' - cc: 'gcc-15' - cxx: 'g++-15' - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: ${{ matrix.config }}.${{ matrix.cxx }} - run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} - env: - CC: ${{ matrix.cc }} - CXX: ${{ matrix.cxx }} - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - if: always() - with: - name: ${{ matrix.config }}-${{ matrix.cxx }}-results - path: | - **/test-results.xml - **/*.abilist - **/CMakeConfigureLog.yaml - **/CMakeError.log - **/CMakeOutput.log - **/crash_diagnostics/* - stage2: - if: github.repository_owner == 'llvm' - runs-on: llvm-premerge-libcxx-next-runners - needs: [ stage1 ] - continue-on-error: false - strategy: - fail-fast: false - matrix: - config: [ - 'generic-cxx11', - 'generic-cxx14', - 'generic-cxx17', - 'generic-cxx20', - 'generic-cxx23' - ] - cc: [ 'clang-22' ] - cxx: [ 'clang++-22' ] - include: - - config: 'generic-gcc-cxx11' - cc: 'gcc-15' - cxx: 'g++-15' - - config: 'generic-cxx26' - cc: 'clang-21' - cxx: 'clang++-21' - - config: 'generic-cxx26' - cc: 'clang-20' - cxx: 'clang++-20' - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: ${{ matrix.config }} - run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} - env: - CC: ${{ matrix.cc }} - CXX: ${{ matrix.cxx }} - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - if: always() # Upload artifacts even if the build or test suite fails - with: - name: ${{ matrix.config }}-${{ matrix.cxx }}-results - path: | - **/test-results.xml - **/*.abilist - **/CMakeConfigureLog.yaml - **/CMakeError.log - **/CMakeOutput.log - **/crash_diagnostics/* - stage3: - if: github.repository_owner == 'llvm' - needs: [ stage2 ] - continue-on-error: false - strategy: - fail-fast: false - max-parallel: 8 - matrix: - config: [ - 'generic-abi-unstable', - 'generic-hardening-mode-debug', - 'generic-hardening-mode-extensive', - 'generic-hardening-mode-extensive-observe-semantic', - 'generic-hardening-mode-fast', - 'generic-hardening-mode-fast-with-abi-breaks', - 'generic-merged', - 'generic-modules-cxx17-lsv', - 'generic-no-exceptions', - 'generic-no-experimental', - 'generic-no-filesystem', - 'generic-no-localization', - 'generic-no-terminal', - 'generic-no-random_device', - 'generic-no-threads', - 'generic-no-tzdb', - 'generic-no-unicode', - 'generic-no-wide-characters', - 'generic-no-rtti', - 'generic-optimized-speed', - 'generic-static', - 'bootstrapping-build' - ] - machine: [ 'llvm-premerge-libcxx-next-runners' ] - include: - - config: 'generic-cxx26' - machine: llvm-premerge-libcxx-next-runners - - config: 'generic-asan' - machine: llvm-premerge-libcxx-next-runners - - config: 'generic-tsan' - machine: llvm-premerge-libcxx-next-runners - - config: 'generic-ubsan' - machine: llvm-premerge-libcxx-next-runners - # Use a larger machine for MSAN to avoid timeout and memory allocation issues. - - config: 'generic-msan' - machine: llvm-premerge-libcxx-next-runners - runs-on: ${{ matrix.machine }} - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: ${{ matrix.config }} - run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} - env: - CC: clang-22 - CXX: clang++-22 - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - if: always() - with: - name: ${{ matrix.config }}-results - path: | - **/test-results.xml - **/*.abilist - **/CMakeConfigureLog.yaml - **/CMakeError.log - **/CMakeOutput.log - **/crash_diagnostics/* - - macos: - needs: [ stage2 ] - strategy: - fail-fast: false - matrix: - include: - - config: generic-cxx03 - os: macos-15 - - config: generic-cxx23 - os: macos-15 - - config: generic-modules - os: macos-15 - - config: apple-configuration - os: macos-15 - # TODO: These jobs are intended to test back-deployment (building against ToT libc++ but running against an - # older system-provided libc++.dylib). Doing this properly would require building the test suite on a - # recent macOS using a recent Clang (hence recent Xcode), and then running the actual test suite on an - # older mac. We could do that by e.g. sharing artifacts between the two jobs. - # - # However, our Lit configuration currently doesn't provide a good way to do that in a batch, so our only - # alternative is to actually build on the same host that we're going to run on. Sadly, that doesn't work - # since older macOSes don't support newer Xcodes. For now, we run the "backdeployment" jobs on recent - # macOS versions as a way to avoid rotting that configuration, but it doesn't provide a lot of additional - # coverage. - - config: apple-system - os: macos-15 - - config: apple-system-hardened - os: macos-15 - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 - with: - # https://github.com/actions/runner-images/blob/main/images/macos/macos-15-Readme.md - xcode-version: '16.3' - - uses: seanmiddleditch/gha-setup-ninja@3b1f8f94a2f8254bd26914c4ab9474d4f0015f67 # v6 - - name: Build and test - run: | - python3 -m venv .venv - source .venv/bin/activate - python -m pip install psutil - bash libcxx/utils/ci/run-buildbot ${{ matrix.config }} - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - if: always() # Upload artifacts even if the build or test suite fails - with: - name: macos-${{ matrix.config }}-results - path: | - **/test-results.xml - **/*.abilist - **/CMakeConfigureLog.yaml - **/CMakeError.log - **/CMakeOutput.log - **/crash_diagnostics/* - - windows: - runs-on: windows-2022 - needs: [ stage2 ] - strategy: - fail-fast: false - matrix: - include: - - { config: clang-cl-dll, mingw: false } - - { config: clang-cl-static, mingw: false } - - { config: clang-cl-no-vcruntime, mingw: false } - - { config: clang-cl-debug, mingw: false } - - { config: clang-cl-static-crt, mingw: false } - - { config: mingw-dll, mingw: true } - - { config: mingw-static, mingw: true } - - { config: mingw-dll-i686, mingw: true } - - { config: mingw-incomplete-sysroot, mingw: true } - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: Install dependencies - run: | - choco install -y ninja - pip install psutil - - name: Install a current LLVM - if: ${{ matrix.mingw != true }} - run: | - choco install -y llvm --version=20.1.8 --allow-downgrade - - name: Install llvm-mingw - if: ${{ matrix.mingw == true }} - run: | - curl -LO https://github.com/mstorsjo/llvm-mingw/releases/download/20250709/llvm-mingw-20250709-ucrt-x86_64.zip - powershell Expand-Archive llvm-mingw*.zip -DestinationPath . - del llvm-mingw*.zip - mv llvm-mingw* c:\llvm-mingw - echo "c:\llvm-mingw\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append - - name: Simulate a from-scratch build of llvm-mingw - if: ${{ matrix.config == 'mingw-incomplete-sysroot' }} - run: | - rm -r c:\llvm-mingw\include\c++ - rm -r c:\llvm-mingw\*-w64-mingw32\lib\libc++* - rm -r c:\llvm-mingw\*-w64-mingw32\lib\libunwind* - - name: Add Git Bash to the path - run: | - echo "c:\Program Files\Git\usr\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append - - name: Set up the MSVC dev environment - if: ${{ matrix.mingw != true }} - uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0 - - name: Build and test - run: | - bash libcxx/utils/ci/run-buildbot ${{ matrix.config }} diff --git a/.github/workflows/libcxx-build-containers.yml b/.github/workflows/libcxx-build-containers.yml deleted file mode 100644 index 41d5452599bf9..0000000000000 --- a/.github/workflows/libcxx-build-containers.yml +++ /dev/null @@ -1,71 +0,0 @@ -# This file defines an action that builds the various Docker images used to run -# libc++ CI whenever modifications to those Docker files are pushed to `main`. -# -# The images are pushed to the LLVM package registry at https://github.com/orgs/llvm/packages -# and tagged appropriately. The selection of which Docker image version is used by the libc++ -# CI nodes at any given point is controlled from the workflow files themselves. - -name: Build Docker images for libc++ CI - -permissions: - contents: read - -on: - push: - branches: - - main - paths: - - 'libcxx/utils/ci/**' - - '.github/workflows/libcxx-build-containers.yml' - pull_request: - paths: - - 'libcxx/utils/ci/**' - - '.github/workflows/libcxx-build-containers.yml' - -jobs: - build-and-push: - runs-on: ubuntu-24.04 - if: github.repository_owner == 'llvm' - permissions: - packages: write - - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Build the Linux builder image - working-directory: libcxx/utils/ci - run: | - docker compose build builder-base - docker compose build actions-builder - env: - TAG: ${{ github.sha }} - - # - name: Build the Android builder image - # working-directory: libcxx/utils/ci - # run: docker compose build android-buildkite-builder - # env: - # TAG: ${{ github.sha }} - - - name: Log in to GitHub Container Registry - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Push the Linux builder image - if: github.event_name == 'push' - working-directory: libcxx/utils/ci - run: | - docker compose push builder-base - docker compose push actions-builder - env: - TAG: ${{ github.sha }} - - # - name: Push the Android builder image - # if: github.event_name == 'push' - # working-directory: libcxx/utils/ci - # run: | - # docker compose push android-buildkite-builder - # env: - # TAG: ${{ github.sha }} diff --git a/.github/workflows/libcxx-check-generated-files.yml b/.github/workflows/libcxx-check-generated-files.yml deleted file mode 100644 index 40dcef47e6a6a..0000000000000 --- a/.github/workflows/libcxx-check-generated-files.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: "Check libc++ generated files" -on: - pull_request: - paths: - - 'libcxx/**' - -permissions: - contents: read - -jobs: - check_generated_files: - runs-on: ubuntu-24.04 - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Install dependencies - uses: aminya/setup-cpp@a276e6e3d1db9160db5edc458e99a30d3b109949 # v1.7.1 - with: - clangformat: 17.0.1 - ninja: true - - - name: Check generated files - run: libcxx/utils/ci/run-buildbot check-generated-output diff --git a/.github/workflows/libcxx-run-benchmarks.yml b/.github/workflows/libcxx-run-benchmarks.yml deleted file mode 100644 index 5714600b63a5e..0000000000000 --- a/.github/workflows/libcxx-run-benchmarks.yml +++ /dev/null @@ -1,110 +0,0 @@ -# This file defines a workflow that runs the libc++ benchmarks when a comment is added to the PR. -# -# The comment is of the form: -# -# /libcxx-bot benchmark -# -# That will cause the specified benchmarks to be run on the PR and on the pull-request target, and -# their results to be compared. - -name: Benchmark libc++ - -permissions: - contents: read - -on: - issue_comment: - types: - - created - - edited - -env: - CC: clang-22 - CXX: clang++-22 - -jobs: - run-benchmarks: - permissions: - pull-requests: write - - if: >- - github.event.issue.pull_request && - contains(github.event.comment.body, '/libcxx-bot benchmark') - - runs-on: llvm-premerge-libcxx-next-runners # TODO: This should run on a dedicated set of machines - steps: - - uses: actions/setup-python@v6 - with: - python-version: '3.10' - - - name: Extract information from the PR - id: vars - run: | - python3 -m venv .venv - source .venv/bin/activate - python -m pip install pygithub - - cat <> ${GITHUB_OUTPUT} - import github - repo = github.Github("${{ github.token }}").get_repo("${{ github.repository }}") - pr = repo.get_pull(${{ github.event.issue.number }}) - print(f"pr_base={pr.base.sha}") - print(f"pr_head={pr.head.sha}") - EOF - BENCHMARKS=$(echo "${{ github.event.comment.body }}" | sed -nE 's/\/libcxx-bot benchmark (.+)/\1/p') - echo "benchmarks=${BENCHMARKS}" >> ${GITHUB_OUTPUT} - - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - with: - ref: ${{ steps.vars.outputs.pr_head }} - fetch-depth: 0 - fetch-tags: true # This job requires access to all the Git branches so it can diff against (usually) main - path: repo # Avoid nuking the workspace, where we have the Python virtualenv - - - name: Run baseline - run: | - source .venv/bin/activate && cd repo - python -m pip install -r libcxx/utils/requirements.txt - baseline_commit=$(git merge-base ${{ steps.vars.outputs.pr_base }} ${{ steps.vars.outputs.pr_head }}) - ./libcxx/utils/test-at-commit --commit ${baseline_commit} -B build/baseline -- -sv -j1 --param optimization=speed ${{ steps.vars.outputs.benchmarks }} - ./libcxx/utils/consolidate-benchmarks build/baseline | tee baseline.lnt - - - name: Run candidate - run: | - source .venv/bin/activate && cd repo - ./libcxx/utils/test-at-commit --commit ${{ steps.vars.outputs.pr_head }} -B build/candidate -- -sv -j1 --param optimization=speed ${{ steps.vars.outputs.benchmarks }} - ./libcxx/utils/consolidate-benchmarks build/candidate | tee candidate.lnt - - - name: Compare baseline and candidate runs - run: | - source .venv/bin/activate && cd repo - ./libcxx/utils/compare-benchmarks baseline.lnt candidate.lnt | tee results.txt - - - name: Update comment with results - run: | - source .venv/bin/activate && cd repo - cat < - - Benchmark results: - - - \`\`\` - {benchmark_results} - \`\`\` - - - """ - - comment.edit(new_comment_text) - EOF diff --git a/.github/workflows/llvm-bugs.yml b/.github/workflows/llvm-bugs.yml deleted file mode 100644 index e23b17c2f8bef..0000000000000 --- a/.github/workflows/llvm-bugs.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: LLVM Bugs notifier - -permissions: - contents: read - issues: read - -on: - issues: - types: - - opened - -jobs: - auto-subscribe: - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 - with: - node-version: 18 - check-latest: true - - run: npm install mailgun.js form-data - - name: Send notification - uses: actions/github-script@v8 - env: - MAILGUN_API_KEY: ${{ secrets.LLVM_BUGS_KEY }} - with: - script: | - const Mailgun = require('mailgun.js'); - const formData = require('form-data'); - - const mailgun = new Mailgun(formData); - const DOMAIN = 'email.llvm.org'; - - const mg = mailgun.client({ username: 'api', key: process.env.MAILGUN_API_KEY }); - - github.rest.issues.get({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo - }) - .then((issue) => { - const payload = { - author : issue.data.user.login, - issue : issue.data.number, - title : issue.data.title, - url : issue.data.html_url, - labels : issue.data.labels.map((label) => label.name), - assignee : issue.data.assignees.map((assignee) => assignee.login), - body : issue.data.body - }; - - const data = { - from: 'LLVM Bugs ', - to: 'llvm-bugs@lists.llvm.org', - subject: `[Bug ${issue.data.number}] ${issue.data.title}`, - template: 'new-github-issue', - 'o:tracking-clicks': 'no', - 'h:X-Mailgun-Variables': JSON.stringify(payload) - }; - - return mg.messages.create(DOMAIN, data); - }) - .then((msg) => console.log(msg)); diff --git a/.github/workflows/llvm-tests.yml b/.github/workflows/llvm-tests.yml deleted file mode 100644 index b8a43c4e42e75..0000000000000 --- a/.github/workflows/llvm-tests.yml +++ /dev/null @@ -1,185 +0,0 @@ -name: LLVM Tests - -permissions: - contents: read - -on: - workflow_dispatch: - push: - branches: - - 'release/**' - paths: - - 'llvm/**' - - '.github/workflows/llvm-tests.yml' - pull_request: - branches: - - 'release/**' - paths: - - 'llvm/**' - - '.github/workflows/llvm-tests.yml' - -concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} - -jobs: - abi-dump-setup: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - outputs: - BASELINE_REF: ${{ steps.vars.outputs.BASELINE_REF }} - ABI_HEADERS: ${{ steps.vars.outputs.ABI_HEADERS }} - BASELINE_VERSION_MAJOR: ${{ steps.vars.outputs.BASELINE_VERSION_MAJOR }} - BASELINE_VERSION_MINOR: ${{ steps.vars.outputs.BASELINE_VERSION_MINOR }} - LLVM_VERSION_MAJOR: ${{ steps.version.outputs.major }} - LLVM_VERSION_MINOR: ${{ steps.version.outputs.minor }} - LLVM_VERSION_PATCH: ${{ steps.version.outputs.patch }} - steps: - - name: Checkout source - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 250 - - - name: Get LLVM version - id: version - uses: ./.github/workflows/get-llvm-version - - - name: Setup Variables - id: vars - run: | - # C++ ABI: - # 18.1.0 we aren't doing ABI checks. - # 18.1.1 We want to check 18.1.0. - # C ABI: - # 18.1.0 We want to check 17.0.x - # 18.1.1 We want to check 18.1.0 - echo "BASELINE_VERSION_MINOR=1" >> "$GITHUB_OUTPUT" - if [ ${{ steps.version.outputs.patch }} -eq 0 ]; then - { - echo "BASELINE_VERSION_MAJOR=$(( ${{ steps.version.outputs.major }} - 1))" - echo "ABI_HEADERS=llvm-c" - } >> "$GITHUB_OUTPUT" - else - { - echo "BASELINE_VERSION_MAJOR=${{ steps.version.outputs.major }}" - echo "ABI_HEADERS=." - } >> "$GITHUB_OUTPUT" - fi - - abi-dump: - if: github.repository_owner == 'llvm' - needs: abi-dump-setup - runs-on: ubuntu-24.04 - strategy: - matrix: - name: - - build-baseline - - build-latest - include: - - name: build-baseline - llvm_version_major: ${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }} - ref: llvmorg-${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }}.${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MINOR }}.0 - repo: llvm/llvm-project - - name: build-latest - llvm_version_major: ${{ needs.abi-dump-setup.outputs.LLVM_VERSION_MAJOR }} - ref: ${{ github.sha }} - repo: ${{ github.repository }} - steps: - - name: Install Ninja - uses: llvm/actions/install-ninja@main - - name: Install abi-compliance-checker - run: | - sudo apt-get update - sudo apt-get -y install abi-dumper autoconf pkg-config - - name: Install universal-ctags - run: | - git clone https://github.com/universal-ctags/ctags.git - cd ctags - ./autogen.sh - ./configure - sudo make install - - name: Download source code - uses: llvm/actions/get-llvm-project-src@main - with: - ref: ${{ matrix.ref }} - repo: ${{ matrix.repo }} - - name: Configure - run: | - mkdir install - cmake -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug -DLLVM_TARGETS_TO_BUILD="" -DLLVM_BUILD_LLVM_DYLIB=ON -DCMAKE_C_FLAGS_DEBUG="-g1 -Og" -DCMAKE_CXX_FLAGS_DEBUG="-g1 -Og" -DCMAKE_INSTALL_PREFIX="$(pwd)"/install llvm - - name: Build - # Need to run install-LLVM twice to ensure the symlink is installed (this is a bug). - run: | - ninja -C build install-LLVM - ninja -C build install-LLVM - ninja -C build install-llvm-headers - - name: Dump ABI - run: | - if [ "${{ needs.abi-dump-setup.outputs.ABI_HEADERS }}" = "llvm-c" ]; then - nm ./install/lib/libLLVM.so | awk "/T _LLVM/ || /T LLVM/ { print $3 }" | sort -u | sed -e "s/^_//g" | cut -d ' ' -f 3 > llvm.symbols - # Even though the -symbols-list option doesn't seem to filter out the symbols, I believe it speeds up processing, so I'm leaving it in. - export EXTRA_ARGS="-symbols-list llvm.symbols" - else - touch llvm.symbols - fi - abi-dumper $EXTRA_ARGS -lver ${{ matrix.ref }} -skip-cxx -public-headers ./install/include/${{ needs.abi-dump-setup.outputs.ABI_HEADERS }} -o ${{ matrix.ref }}.abi ./install/lib/libLLVM.so - # Remove symbol versioning from dumps, so we can compare across major versions. - sed -i 's/LLVM_${{ matrix.llvm_version_major }}/LLVM_NOVERSION/' ${{ matrix.ref }}.abi - - name: Upload ABI file - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: ${{ matrix.name }} - path: ${{ matrix.ref }}.abi - - - name: Upload symbol list file - if: matrix.name == 'build-baseline' - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: symbol-list - path: llvm.symbols - - abi-compare: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - needs: - - abi-dump-setup - - abi-dump - steps: - - name: Download baseline - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 - with: - name: build-baseline - path: build-baseline - - name: Download latest - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 - with: - name: build-latest - path: build-latest - - name: Download symbol list - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 - with: - name: symbol-list - path: symbol-list - - - name: Install abi-compliance-checker - run: | - sudo apt-get update - sudo apt-get -y install abi-compliance-checker - - name: Compare ABI - run: | - if [ -s symbol-list/llvm.symbols ]; then - # This option doesn't seem to work with the ABI dumper, so passing it here. - export EXTRA_ARGS="-symbols-list symbol-list/llvm.symbols" - fi - # FIXME: Reading of gzip'd abi files on the GitHub runners stop - # working some time in March of 2021, likely due to a change in the - # runner's environment. - abi-compliance-checker $EXTRA_ARGS -l libLLVM.so -old build-baseline/*.abi -new build-latest/*.abi || test "${{ needs.abi-dump-setup.outputs.ABI_HEADERS }}" = "llvm-c" - - name: Upload ABI Comparison - if: always() - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: compat-report-${{ github.sha }} - path: compat_reports/ diff --git a/.github/workflows/merged-prs.yml b/.github/workflows/merged-prs.yml deleted file mode 100644 index bde01b2b8a231..0000000000000 --- a/.github/workflows/merged-prs.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: "Add buildbot information to first PRs from new contributors" - -permissions: - contents: read - -on: - # It's safe to use pull_request_target here, because we aren't checking out - # code from the pull request branch. - # See https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ - pull_request_target: - types: - - closed - -jobs: - buildbot_comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - if: >- - (github.repository == 'llvm/llvm-project') && - (github.event.pull_request.merged == true) - steps: - - name: Checkout Automation Script - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/git/ - ref: main - - - name: Setup Automation Script - working-directory: ./llvm/utils/git/ - run: | - pip install --require-hashes -r requirements.txt - - - name: Add Buildbot information comment - working-directory: ./llvm/utils/git/ - run: | - python3 ./github-automation.py \ - --token '${{ secrets.GITHUB_TOKEN }}' \ - pr-buildbot-information \ - --issue-number "${{ github.event.pull_request.number }}" \ - --author "${{ github.event.pull_request.user.login }}" diff --git a/.github/workflows/mlir-spirv-tests.yml b/.github/workflows/mlir-spirv-tests.yml deleted file mode 100644 index 5bb16c739cdde..0000000000000 --- a/.github/workflows/mlir-spirv-tests.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: MLIR SPIR-V Tests - -permissions: - contents: read - -on: - workflow_dispatch: - pull_request: - paths: - - 'mlir/include/mlir/Dialect/SPIRV/**' - - 'mlir/lib/Dialect/SPIRV/**' - - 'mlir/include/mlir/Target/SPIRV/**' - - 'mlir/lib/Target/SPIRV/**' - - 'mlir/test/Target/SPIRV/**' - - '.github/workflows/mlir-spirv-tests.yml' - -concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} - -jobs: - check_spirv: - if: github.repository_owner == 'llvm' - name: Test MLIR SPIR-V - runs-on: ubuntu-24.04 - container: - image: ghcr.io/llvm/ci-ubuntu-24.04:latest - steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: 2G - key: spirv-mlir-ubuntu-24.04 - variant: sccache - - name: Build and Test - run: | - mkdir build - cmake -GNinja \ - -S llvm \ - -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DLLVM_ENABLE_ASSERTIONS=ON \ - -DCMAKE_C_COMPILER_LAUNCHER=sccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -DLLVM_TARGETS_TO_BUILD="host" \ - -DLLVM_INCLUDE_SPIRV_TOOLS_TESTS=ON \ - -DLLVM_ENABLE_PROJECTS=mlir - ninja -C build check-mlir diff --git a/.github/workflows/new-issues.yml b/.github/workflows/new-issues.yml deleted file mode 100644 index 8480a657cc717..0000000000000 --- a/.github/workflows/new-issues.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Labeling new issues -on: - issues: - types: ['opened'] - -permissions: - contents: read - -jobs: - automate-issues-labels: - permissions: - issues: write - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - uses: llvm/actions/issue-labeler@main - with: - repo-token: ${{ secrets.ISSUE_SUBSCRIBER_TOKEN }} - configuration-path: .github/new-issues-labeler.yml - include-title: 1 - include-body: 0 - sync-labels: 0 - enable-versioned-regex: 0 diff --git a/.github/workflows/new-prs.yml b/.github/workflows/new-prs.yml deleted file mode 100644 index 776c392481048..0000000000000 --- a/.github/workflows/new-prs.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: "Labelling new pull requests" - -permissions: - contents: read - -on: - # It's safe to use pull_request_target here, because we aren't checking out - # code from the pull request branch. - # See https://securitylab.github.com/research/github-actions-preventing-pwn-requests/ - pull_request_target: - types: - - opened - - reopened - - ready_for_review - - synchronize - -jobs: - greeter: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - # Only comment on PRs that have been opened for the first time, by someone - # new to LLVM or to GitHub as a whole. Ideally we'd look for FIRST_TIMER - # or FIRST_TIME_CONTRIBUTOR, but this does not appear to work. Instead check - # that we do not have any of the other author associations. - # See https://docs.github.com/en/webhooks/webhook-events-and-payloads?actionType=opened#pull_request - # for all the possible values. - if: >- - (github.repository == 'llvm/llvm-project') && - (github.event.action == 'opened') && - (github.event.pull_request.author_association != 'COLLABORATOR') && - (github.event.pull_request.author_association != 'CONTRIBUTOR') && - (github.event.pull_request.author_association != 'MANNEQUIN') && - (github.event.pull_request.author_association != 'MEMBER') && - (github.event.pull_request.author_association != 'OWNER') - steps: - - name: Checkout Automation Script - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/git/ - ref: main - - - name: Setup Automation Script - working-directory: ./llvm/utils/git/ - run: | - pip install --require-hashes -r requirements.txt - - - name: Greet Author - working-directory: ./llvm/utils/git/ - run: | - python3 ./github-automation.py \ - --token '${{ secrets.GITHUB_TOKEN }}' \ - pr-greeter \ - --issue-number "${{ github.event.pull_request.number }}" - - automate-prs-labels: - # Greet first so that only the author gets that notification. - needs: greeter - runs-on: ubuntu-24.04 - # Ignore PRs with more than 10 commits. Pull requests with a lot of - # commits tend to be accidents usually when someone made a mistake while trying - # to rebase. We want to ignore these pull requests to avoid excessive - # notifications. - # always() means that even if greeter is skipped, this job will run. - if: > - always() && github.repository == 'llvm/llvm-project' && - github.event.pull_request.draft == false && - github.event.pull_request.commits < 10 - steps: - - uses: actions/labeler@v6 - with: - configuration-path: .github/new-prs-labeler.yml - # workaround for https://github.com/actions/labeler/issues/112 - sync-labels: '' - repo-token: ${{ secrets.ISSUE_SUBSCRIBER_TOKEN }} diff --git a/.github/workflows/pr-code-format.yml b/.github/workflows/pr-code-format.yml deleted file mode 100644 index 8f9a5d48c8fac..0000000000000 --- a/.github/workflows/pr-code-format.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: "Check code formatting" - -permissions: - contents: read - -on: - pull_request: - branches: - - main - - sycl - - sycl-rel-** - - 'users/**' - -jobs: - code_formatter: - runs-on: ubuntu-24.04 - timeout-minutes: 30 - concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number }} - cancel-in-progress: true - if: (github.repository == 'llvm/llvm-project' || github.repository == 'intel/llvm') && !contains(github.event.pull_request.labels.*.name, 'disable-lint') - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 2 - - - name: Get changed files - id: changed-files - uses: step-security/changed-files@95b56dadb92a30ca9036f16423fd3c088a71ee94 # v46.0.5 - with: - separator: "," - skip_initial_fetch: true - base_sha: 'HEAD~1' - sha: 'HEAD' - - # We need to pull the script from the main branch, so that we ensure - # we get the latest version of this script. - - name: Fetch code formatting utils - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: ${{ github.repository }} - ref: ${{ github.base_ref }} - sparse-checkout: | - llvm/utils/git/requirements_formatting.txt - llvm/utils/git/code-format-helper.py - sparse-checkout-cone-mode: false - path: code-format-tools - - - name: "Listed files" - env: - CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} - run: | - echo "Formatting files:" - echo "$CHANGED_FILES" - - - name: Install clang-format - uses: aminya/setup-cpp@a276e6e3d1db9160db5edc458e99a30d3b109949 # v1.7.1 - with: - clangformat: 20.1.8 - - - name: Setup Python env - uses: actions/setup-python@v6.0.0 - with: - python-version: '3.11' - cache: 'pip' - cache-dependency-path: 'code-format-tools/llvm/utils/git/requirements_formatting.txt' - - - name: Install python dependencies - run: pip install -r code-format-tools/llvm/utils/git/requirements_formatting.txt - - - name: Run code formatter - env: - GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} - CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} - # Create an empty comments file so the pr-write job doesn't fail. - run: | - echo "[]" > comments && - python ./code-format-tools/llvm/utils/git/code-format-helper.py \ - --write-comment-to-file \ - --token ${{ secrets.GITHUB_TOKEN }} \ - --issue-number $GITHUB_PR_NUMBER \ - --start-rev HEAD~1 \ - --end-rev HEAD \ - --changed-files "$CHANGED_FILES" - - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - if: always() - with: - name: workflow-args - path: | - comments diff --git a/.github/workflows/pr-code-lint.yml b/.github/workflows/pr-code-lint.yml deleted file mode 100644 index bc70933147bd2..0000000000000 --- a/.github/workflows/pr-code-lint.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: "Code lint" - -permissions: - contents: read - -on: - pull_request: - branches: - - main - - 'users/**' - paths: - - 'clang-tools-extra/clang-tidy/**' - - '.github/workflows/pr-code-lint.yml' - -jobs: - code_linter: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - defaults: - run: - shell: bash - container: - image: 'ghcr.io/llvm/ci-ubuntu-24.04:latest' - timeout-minutes: 60 - concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - steps: - - name: Fetch LLVM sources - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - with: - fetch-depth: 2 - - - name: Get changed files - id: changed-files - uses: step-security/changed-files@3dbe17c78367e7d60f00d78ae6781a35be47b4a1 # v45.0.1 - with: - separator: "," - skip_initial_fetch: true - base_sha: 'HEAD~1' - sha: 'HEAD' - - - name: Listed files - env: - CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} - run: | - echo "Changed files:" - echo "$CHANGED_FILES" - - - name: Install clang-tidy - uses: aminya/setup-cpp@17c11551771948abc5752bbf3183482567c7caf0 # v1.1.1 - with: - clang-tidy: 20.1.8 - - - name: Setup Python env - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 - with: - python-version: '3.12' - - - name: Install Python dependencies - run: python3 -m pip install -r llvm/utils/git/requirements_linting.txt - - # TODO: create special mapping for 'codegen' targets, for now build predefined set - # TODO: add entrypoint in 'compute_projects.py' that only adds a project and its direct dependencies - - name: Configure and CodeGen - run: | - git config --global --add safe.directory '*' - - . <(git diff --name-only HEAD~1...HEAD | python3 .ci/compute_projects.py) - - if [[ "${projects_to_build}" == "" ]]; then - echo "No projects to analyze" - exit 0 - fi - - cmake -G Ninja \ - -B build \ - -S llvm \ - -DLLVM_ENABLE_ASSERTIONS=OFF \ - -DLLVM_ENABLE_PROJECTS="${projects_to_build}" \ - -DCMAKE_CXX_COMPILER=clang++ \ - -DCMAKE_C_COMPILER=clang \ - -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ - -DLLVM_INCLUDE_TESTS=OFF \ - -DCLANG_INCLUDE_TESTS=OFF \ - -DCMAKE_BUILD_TYPE=Release - - ninja -C build \ - clang-tablegen-targets \ - genconfusable # for "ConfusableIdentifierCheck.h" - - - name: Run code linter - env: - GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} - CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} - run: | - echo "[]" > comments && - python3 llvm/utils/git/code-lint-helper.py \ - --token ${{ secrets.GITHUB_TOKEN }} \ - --issue-number $GITHUB_PR_NUMBER \ - --start-rev HEAD~1 \ - --end-rev HEAD \ - --verbose \ - --changed-files "$CHANGED_FILES" - - - name: Upload results - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 #v4.3.0 - if: always() - with: - name: workflow-args - path: | - comments diff --git a/.github/workflows/pr-request-release-note.yml b/.github/workflows/pr-request-release-note.yml deleted file mode 100644 index a5d47bb982ae0..0000000000000 --- a/.github/workflows/pr-request-release-note.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: PR Request Release Note - -permissions: - contents: read - -on: - pull_request: - types: - - closed - -jobs: - request-release-note: - if: >- - github.repository_owner == 'llvm' && - startsWith(github.ref, 'refs/heads/release') - - runs-on: ubuntu-24.04 - steps: - # We need to pull the script from the main branch, so that we ensure - # we get the latest version of this script. - - name: Checkout Scripts - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: | - llvm/utils/git/requirements.txt - llvm/utils/git/github-automation.py - sparse-checkout-cone-mode: false - - - name: Install Dependencies - run: | - pip install --require-hashes -r llvm/utils/git/requirements.txt - - - name: Request Release Note - env: - # We need to use an llvmbot token here, because we are mentioning a user. - GITHUB_TOKEN: ${{ github.token }} - run: | - python3 llvm/utils/git/github-automation.py \ - --repo "$GITHUB_REPOSITORY" \ - --token "$GITHUB_TOKEN" \ - request-release-note \ - --pr-number ${{ github.event.pull_request.number}} - - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - if: always() - with: - name: workflow-args - path: | - comments diff --git a/.github/workflows/pr-subscriber.yml b/.github/workflows/pr-subscriber.yml deleted file mode 100644 index 1ac35ec967c52..0000000000000 --- a/.github/workflows/pr-subscriber.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: PR Subscriber - -on: - pull_request_target: - types: - - labeled - -permissions: - contents: read - -jobs: - auto-subscribe: - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Checkout Automation Script - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/git/ - ref: main - - - name: Setup Automation Script - working-directory: ./llvm/utils/git/ - run: | - pip install --require-hashes -r requirements.txt - - - name: Update watchers - working-directory: ./llvm/utils/git/ - run: | - python3 ./github-automation.py \ - --token '${{ secrets.ISSUE_SUBSCRIBER_TOKEN }}' \ - pr-subscriber \ - --issue-number "${{ github.event.number }}" \ - --label-name "${{ github.event.label.name }}" diff --git a/.github/workflows/premerge.yaml b/.github/workflows/premerge.yaml deleted file mode 100644 index f6e53750578cb..0000000000000 --- a/.github/workflows/premerge.yaml +++ /dev/null @@ -1,181 +0,0 @@ -name: CI Checks - -permissions: - contents: read - -on: - pull_request: - types: - - opened - - synchronize - - reopened - # When a PR is closed, we still start this workflow, but then skip - # all the jobs, which makes it effectively a no-op. The reason to - # do this is that it allows us to take advantage of concurrency groups - # to cancel in progress CI jobs whenever the PR is closed. - - closed - push: - branches: - - 'release/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} - cancel-in-progress: true - -jobs: - premerge-checks-linux: - name: Build and Test Linux - if: >- - github.repository_owner == 'llvm' && - (github.event_name != 'pull_request' || github.event.action != 'closed') - runs-on: llvm-premerge-linux-runners - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 2 - - name: Build and Test - run: | - git config --global --add safe.directory '*' - - source <(git diff --name-only HEAD~1...HEAD | python3 .ci/compute_projects.py) - - if [[ "${projects_to_build}" == "" ]]; then - echo "No projects to build" - exit 0 - fi - - echo "Building projects: ${projects_to_build}" - echo "Running project checks targets: ${project_check_targets}" - echo "Building runtimes: ${runtimes_to_build}" - echo "Running runtimes checks targets: ${runtimes_check_targets}" - echo "Running runtimes checks requiring reconfiguring targets: ${runtimes_check_targets_needs_reconfig}" - - export CC=/opt/llvm/bin/clang - export CXX=/opt/llvm/bin/clang++ - - # This environment variable is passes into the container through the - # runner pod definition. This differs between our two clusters which - # why we do not hardcode it. - export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET - export SCCACHE_GCS_RW_MODE=READ_WRITE - - # Set the idle timeout to zero to ensure sccache runs for the - # entire duration of the job. Otherwise it might stop if we run - # several test suites in a row and discard statistics that we want - # to save in the end. - export SCCACHE_IDLE_TIMEOUT=0 - sccache --start-server - - ./.ci/monolithic-linux.sh "${projects_to_build}" "${project_check_targets}" "${runtimes_to_build}" "${runtimes_check_targets}" "${runtimes_check_targets_needs_reconfig}" "${enable_cir}" - - name: Upload Artifacts - # In some cases, Github will fail to upload the artifact. We want to - # continue anyways as a failed artifact upload is an infra failure, not - # a checks failure. - # https://github.com/actions/upload-artifact/issues/569 - continue-on-error: true - if: '!cancelled()' - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: Premerge Artifacts (Linux) - path: artifacts/ - retention-days: 5 - include-hidden-files: 'true' - - premerge-checks-windows: - name: Build and Test Windows - if: >- - github.repository_owner == 'llvm' && - (github.event_name != 'pull_request' || github.event.action != 'closed') - runs-on: llvm-premerge-windows-2022-runners - defaults: - run: - shell: bash - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 2 - - name: Compute Projects - id: vars - run: | - source <(git diff --name-only HEAD~1...HEAD | python .ci/compute_projects.py) - - if [[ "${projects_to_build}" == "" ]]; then - echo "No projects to build" - fi - - echo "Building projects: ${projects_to_build}" - echo "Running project checks targets: ${project_check_targets}" - - echo "windows-projects=${projects_to_build}" >> $GITHUB_OUTPUT - echo "windows-check-targets=${project_check_targets}" >> $GITHUB_OUTPUT - - name: Build and Test - if: ${{ steps.vars.outputs.windows-projects != '' }} - shell: cmd - run: | - call C:\\BuildTools\\Common7\\Tools\\VsDevCmd.bat -arch=amd64 -host_arch=amd64 - # See the comments above in the Linux job for why we define each of - # these environment variables. - bash -c "export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET; export SCCACHE_GCS_RW_MODE=READ_WRITE; export SCCACHE_IDLE_TIMEOUT=0; sccache --start-server; .ci/monolithic-windows.sh \"${{ steps.vars.outputs.windows-projects }}\" \"${{ steps.vars.outputs.windows-check-targets }}\"" - - name: Upload Artifacts - # In some cases, Github will fail to upload the artifact. We want to - # continue anyways as a failed artifact upload is an infra failure, not - # a checks failure. - # https://github.com/actions/upload-artifact/issues/569 - continue-on-error: true - if: '!cancelled()' - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: Premerge Artifacts (Windows) - path: artifacts/ - retention-days: 5 - include-hidden-files: 'true' - - premerge-check-macos: - name: MacOS Premerge Checks - runs-on: macos-14 - if: >- - github.repository_owner == 'llvm' && - (startswith(github.ref_name, 'release/') || - startswith(github.base_ref, 'release/')) && - (github.event_name != 'pull_request' || github.event.action != 'closed') - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 2 - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: "2000M" - - name: Install Ninja - uses: llvm/actions/install-ninja@main - - name: Build and Test - run: | - source <(git diff --name-only HEAD~2..HEAD | python3 .ci/compute_projects.py) - - if [[ "${projects_to_build}" == "" ]]; then - echo "No projects to build" - exit 0 - fi - - echo "Building projects: ${projects_to_build}" - echo "Running project checks targets: ${project_check_targets}" - - # -DLLVM_DISABLE_ASSEMBLY_FILES=ON is for - # https://github.com/llvm/llvm-project/issues/81967 - # Disable sharding in lit so that the LIT_XFAIL environment var works. - cmake -G Ninja \ - -B build \ - -S llvm \ - -DLLVM_ENABLE_PROJECTS="${projects_to_build}" \ - -DLLVM_DISABLE_ASSEMBLY_FILES=ON \ - -DCMAKE_BUILD_TYPE=Release \ - -DLLDB_INCLUDE_TESTS=OFF \ - -DLLVM_ENABLE_ASSERTIONS=ON \ - -DCMAKE_C_COMPILER_LAUNCHER=ccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=ccache - - # The libcxx tests fail, so we are skipping the runtime targets. - ninja -C build ${project_check_targets} diff --git a/.github/workflows/release-asset-audit.py b/.github/workflows/release-asset-audit.py deleted file mode 100644 index 23b901a476dc0..0000000000000 --- a/.github/workflows/release-asset-audit.py +++ /dev/null @@ -1,102 +0,0 @@ -import github -import re -import sys - -_SPECIAL_CASE_BINARIES = { - "keith": {"clang+llvm-18.1.8-arm64-apple-macos11.tar.xz"}, -} - - -def _is_valid(uploader_name, valid_uploaders, asset_name): - if uploader_name in valid_uploaders: - return True - - if uploader_name in _SPECIAL_CASE_BINARIES: - return asset_name in _SPECIAL_CASE_BINARIES[uploader_name] - - return False - - -def _get_uploaders(release_version): - # Until llvm 18, assets were uploaded by community members, the release managers - # and the GitHub Actions bot. - if release_version <= 18: - return set( - [ - "DimitryAndric", - "stefanp-synopsys", - "lei137", - "omjavaid", - "nicolerabjohn", - "amy-kwan", - "mandlebug", - "zmodem", - "androm3da", - "tru", - "rovka", - "rorth", - "quinnlp", - "kamaub", - "abrisco", - "jakeegan", - "maryammo", - "tstellar", - "github-actions[bot]", - ] - ) - # llvm 19 and beyond, only the release managers, bot and a much smaller - # number of community members. - elif release_version >= 19: - return set( - [ - "zmodem", - "omjavaid", - "tru", - "tstellar", - "github-actions[bot]", - ] - ) - - -def _get_major_release_version(release_title): - # All release titles are of the form "LLVM X.Y.Z(-rcN)". - match = re.match("LLVM ([0-9]+)\.", release_title) - if match is None: - _write_comment_and_exit_with_error( - f'Could not parse release version from release title "{release_title}".' - ) - else: - return int(match.groups()[0]) - - -def _write_comment_and_exit_with_error(comment): - with open("comment", "w") as file: - file.write(comment) - sys.exit(1) - - -def main(): - token = sys.argv[1] - - gh = github.Github(login_or_token=token) - repo = gh.get_repo("llvm/llvm-project") - - for release in repo.get_releases(): - print("Release:", release.title) - uploaders = _get_uploaders(_get_major_release_version(release.title)) - for asset in release.get_assets(): - created_at = asset.created_at - updated_at = ( - "" if asset.created_at == asset.updated_at else asset.updated_at - ) - print( - f"{asset.name} : {asset.uploader.login} [{created_at} {updated_at}] ( {asset.download_count} )" - ) - if not _is_valid(asset.uploader.login, uploaders, asset.name): - _write_comment_and_exit_with_error( - f"@{asset.uploader.login} is not a valid uploader." - ) - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/release-asset-audit.yml b/.github/workflows/release-asset-audit.yml deleted file mode 100644 index 80c09da086039..0000000000000 --- a/.github/workflows/release-asset-audit.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Release Asset Audit - -on: - workflow_dispatch: - release: - schedule: - # * is a special character in YAML so you have to quote this string - # Run once an hour - - cron: '5 * * * *' - - pull_request: - paths: - - ".github/workflows/release-asset-audit.py" - - ".github/workflows/release-asset-audit.yml" - -permissions: - contents: read # Default everything to read-only - -jobs: - audit: - name: "Release Asset Audit" - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: | - .github/workflows/release-asset-audit.py - llvm/utils/git/requirements.txt - - name: "Run Audit Script" - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - python3 ./.github/workflows/release-asset-audit.py $GITHUB_TOKEN - - name: "File Issue" - if: >- - github.event_name != 'pull_request' && - failure() - uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd #v8.0.0 - with: - github-token: ${{ secrets.ISSUE_SUBSCRIBER_TOKEN }} - script: | - var fs = require('fs'); - var body = '' - if (fs.existsSync('./comment')) { - body = fs.readFileSync('./comment') + "\n\n"; - } - body = body + `\n\nhttps://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}` - - const issue = await github.rest.issues.create({ - owner: context.repo.owner, - repo: context.repo.repo, - title: "Release Asset Audit Failed", - labels: ['infrastructure'], - body: body - }); - console.log(issue); diff --git a/.github/workflows/release-binaries-all.yml b/.github/workflows/release-binaries-all.yml deleted file mode 100644 index 0b52a08202f1a..0000000000000 --- a/.github/workflows/release-binaries-all.yml +++ /dev/null @@ -1,105 +0,0 @@ -name: Release Binaries All - -permissions: - contents: read # Default everything to read-only - -on: - workflow_dispatch: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload binaries to the release page' - required: true - default: false - type: boolean - - workflow_call: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload binaries to the release page' - required: true - default: false - type: boolean - secrets: - RELEASE_TASKS_USER_TOKEN: - description: "Secret used to check user permissions." - required: false - - pull_request: - types: - - opened - - synchronize - - reopened - # When a PR is closed, we still start this workflow, but then skip - # all the jobs, which makes it effectively a no-op. The reason to - # do this is that it allows us to take advantage of concurrency groups - # to cancel in progress CI jobs whenever the PR is closed. - - closed - paths: - - '.github/workflows/release-binaries-all.yml' - - '.github/workflows/release-binaries.yml' - - '.github/workflows/release-binaries-setup-stage/*' - - '.github/workflows/release-binaries-save-stage/*' - - 'clang/cmake/caches/Release.cmake' - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || 'dispatch' }} - cancel-in-progress: True - -jobs: - setup-variables: - if: >- - (github.event_name != 'pull_request' || github.event.action != 'closed') - runs-on: ubuntu-24.04 - outputs: - release-version: ${{ steps.vars.outputs.release-version }} - upload: ${{ steps.vars.outputs.upload }} - steps: - - shell: bash - id: vars - run: | - upload="${{ inputs.upload }}" - release_version="${{ inputs.release-version }}" - if [ "${{ github.event_name }}" = "pull_request" ]; then - upload="false" - release_version="" - fi - echo "release-version=$release_version" >> "$GITHUB_OUTPUT" - echo "upload=$upload" >> "$GITHUB_OUTPUT" - - release-binaries-all: - name: Build Release Binaries - needs: - - setup-variables - permissions: - contents: write # For release uploads - id-token: write # For artifact attestations - attestations: write # For artifact attestations - strategy: - fail-fast: false - matrix: - # We use ubuntu-22.04 rather than the latest version to make the built - # binaries more portable (eg functional aginast older glibc). - runs-on: - - ubuntu-22.04 - - ubuntu-22.04-arm - - macos-13 - - macos-14 - - uses: ./.github/workflows/release-binaries.yml - with: - release-version: "${{ needs.setup-variables.outputs.release-version }}" - upload: ${{ needs.setup-variables.outputs.upload == 'true'}} - runs-on: "${{ matrix.runs-on }}" - secrets: - # This will be empty for pull_request events, but that's fine, because - # the release-binaries workflow does not use this secret for the - # pull_request event. - RELEASE_TASKS_USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} diff --git a/.github/workflows/release-binaries-save-stage/action.yml b/.github/workflows/release-binaries-save-stage/action.yml deleted file mode 100644 index f08088c7bc56f..0000000000000 --- a/.github/workflows/release-binaries-save-stage/action.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Save Stage -description: >- - Upload the source and binary directories from a build stage so that they - can be re-used in the next stage. This action is used to the release - binaries workflow into multiple stages to avoid the 6 hour timeout on - the GitHub hosted runners. -inputs: - build-prefix: - description: "Directory containing the build directory." - required: true - type: 'string' - -permissions: - contents: read - -runs: - using: "composite" - steps: - # We need to create an archive of the build directory, because it has too - # many files to upload. - - name: Package Build and Source Directories - shell: bash - run: | - # Remove .git/config to avoid leaking GITHUB_TOKEN stored there. - # See https://unit42.paloaltonetworks.com/github-repo-artifacts-leak-tokens/ - rm -Rf .git/config - # Windows does not support symlinks, so we need to dereference them. - tar --exclude build/ ${{ (runner.os == 'Windows' && '-h') || '' }} -c . | zstd -T0 -c > ../llvm-project.tar.zst - mv ../llvm-project.tar.zst . - tar -C ${{ inputs.build-prefix }} -c build/ | zstd -T0 -c > build.tar.zst - - - name: Upload Stage 1 Source - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 #v4.3.0 - with: - name: ${{ runner.os }}-${{ runner.arch }}-${{ github.job }}-source - path: llvm-project.tar.zst - retention-days: 2 - - - name: Upload Stage 1 Build Dir - uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 #v4.3.0 - with: - name: ${{ runner.os}}-${{ runner.arch }}-${{ github.job }}-build - path: build.tar.zst - retention-days: 2 diff --git a/.github/workflows/release-binaries-setup-stage/action.yml b/.github/workflows/release-binaries-setup-stage/action.yml deleted file mode 100644 index f5e5db27e6595..0000000000000 --- a/.github/workflows/release-binaries-setup-stage/action.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Setup Stage -description: >- - Setup the next stage of the release binaries workflow. This sets up the - environment correctly for a new stage of the release binaries workflow - and also restores the source and build directory from the previous stage. - -inputs: - previous-artifact: - description: >- - A unique descriptor for the artifact from the previous stage. This will - be used to construct the final artifact pattern, which is: - $RUNNER_OS-$RUNNER_ARCH-$PREVIOUS_ARTIFACT-* - required: false - type: 'string' - -outputs: - build-prefix: - description: "Directory containing the build directory." - value: ${{ steps.build-prefix.outputs.build-prefix }} - -runs: - using: "composite" - steps: - - name: Install Ninja - uses: llvm/actions/install-ninja@22e9f909d35b50bd1181709564bfe816eaeaae81 # main - - - name: Setup Windows - if: startsWith(runner.os, 'Windows') - uses: llvm/actions/setup-windows@main - with: - arch: amd64 - - - name: Set Build Prefix - id: build-prefix - shell: bash - run: | - build_prefix=`pwd` - if [ "${{ runner.os }}" = "Linux" ]; then - sudo chown $USER:$USER /mnt/ - build_prefix=/mnt/ - fi - echo "build-prefix=$build_prefix" >> $GITHUB_OUTPUT - - - name: Download Previous Stage Artifact - if: ${{ inputs.previous-artifact }} - id: download - uses: actions/download-artifact@6b208ae046db98c579e8a3aa621ab581ff575935 # v4.1.1 - with: - pattern: ${{ runner.os }}-${{ runner.arch }}-${{ inputs.previous-artifact }}-* - merge-multiple: true - - - name: Unpack Artifact - if: ${{ steps.download.outputs.download-path }} - shell: bash - run: | - tar --zstd -xf llvm-project.tar.zst - rm llvm-project.tar.zst - tar --zstd -C ${{ steps.build-prefix.outputs.build-prefix}} -xf build.tar.zst - rm build.tar.zst diff --git a/.github/workflows/release-binaries.yml b/.github/workflows/release-binaries.yml deleted file mode 100644 index 39c836f9e6dbc..0000000000000 --- a/.github/workflows/release-binaries.yml +++ /dev/null @@ -1,357 +0,0 @@ -name: Release Binaries - -on: - workflow_dispatch: - inputs: - release-version: - description: 'Release Version' - required: false - type: string - upload: - description: 'Upload binaries to the release page' - required: true - default: false - type: boolean - runs-on: - description: "Runner to use for the build" - required: true - type: choice - # We use ubuntu-22.04 rather than the latest version to make the built - # binaries more portable (eg functional aginast older glibc). - options: - - ubuntu-22.04 - - ubuntu-22.04-arm - - macos-13 - - macos-14 - - workflow_call: - inputs: - release-version: - description: 'Release Version' - required: false - type: string - upload: - description: 'Upload binaries to the release page' - required: true - default: false - type: boolean - runs-on: - description: "Runner to use for the build" - required: true - type: string - secrets: - RELEASE_TASKS_USER_TOKEN: - description: "Secret used to check user permissions." - required: false - - -permissions: - contents: read # Default everything to read-only - -jobs: - prepare: - name: Prepare to build binaries - runs-on: ${{ inputs.runs-on }} - if: github.repository_owner == 'llvm' - outputs: - release-version: ${{ steps.vars.outputs.release-version }} - ref: ${{ steps.vars.outputs.ref }} - upload: ${{ steps.vars.outputs.upload }} - target-cmake-flags: ${{ steps.vars.outputs.target-cmake-flags }} - ccache: ${{ steps.vars.outputs.ccache }} - build-flang: ${{ steps.vars.outputs.build-flang }} - release-binary-basename: ${{ steps.vars.outputs.release-binary-basename }} - release-binary-filename: ${{ steps.vars.outputs.release-binary-filename }} - build-runs-on: ${{ steps.vars.outputs.build-runs-on }} - test-runs-on: ${{ steps.vars.outputs.build-runs-on }} - - steps: - # It's good practice to use setup-python, but this is also required on macos-14 - # due to https://github.com/actions/runner-images/issues/10385 - - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c - with: - python-version: '3.12' - - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Install Dependencies - shell: bash - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - - - name: Check Permissions - if: github.event_name != 'pull_request' - env: - GITHUB_TOKEN: ${{ github.token }} - USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - shell: bash - run: | - ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --user "$GITHUB_ACTOR" --user-token "$USER_TOKEN" check-permissions - - - name: Collect Variables - id: vars - shell: bash - # In order for the test-release.sh script to run correctly, the LLVM - # source needs to be at the following location relative to the build dir: - # | X.Y.Z-rcN | ./rcN/llvm-project - # | X.Y.Z | ./final/llvm-project - # - # We also need to set divergent flags based on the release version: - # | X.Y.Z-rcN | -rc N -test-asserts - # | X.Y.Z | -final - run: | - trimmed=$(echo ${{ inputs.release-version }} | xargs) - if [ -n "$trimmed" ]; then - release_version="$trimmed" - ref="llvmorg-$release_version" - else - release_version="${{ (github.event_name == 'pull_request' && format('PR{0}', github.event.pull_request.number)) || 'CI'}}-$GITHUB_SHA" - ref="$GITHUB_SHA" - fi - if [ -n "${{ inputs.upload }}" ]; then - upload="${{ inputs.upload }}" - else - upload="false" - fi - echo "release-version=$release_version">> $GITHUB_OUTPUT - echo "ref=$ref" >> $GITHUB_OUTPUT - echo "upload=$upload" >> $GITHUB_OUTPUT - - release_binary_basename="LLVM-$release_version-$RUNNER_OS-$RUNNER_ARCH" - echo "release-binary-basename=$release_binary_basename" >> $GITHUB_OUTPUT - echo "release-binary-filename=$release_binary_basename.tar.xz" >> $GITHUB_OUTPUT - - target="$RUNNER_OS-$RUNNER_ARCH" - # The hendrikmuhs/ccache-action action does not support installing sccache - # on arm64 Linux. - if [ "$target" = "Linux-ARM64" ]; then - echo ccache=ccache >> $GITHUB_OUTPUT - else - echo ccache=sccache >> $GITHUB_OUTPUT - fi - - # The macOS builds try to cross compile some libraries so we need to - # add extra CMake args to disable them. - # See https://github.com/llvm/llvm-project/issues/99767 - if [ "$RUNNER_OS" = "macOS" ]; then - target_cmake_flags="$target_cmake_flags -DBOOTSTRAP_BOOTSTRAP_COMPILER_RT_ENABLE_IOS=OFF" - if [ "$RUNNER_ARCH" = "ARM64" ]; then - arches=arm64 - else - arches=x86_64 - # Disable Flang builds on macOS x86_64. The FortranLower library takes - # 2-3 hours to build on macOS, much slower than on Linux. - # The long build time causes the release build to time out on x86_64, - # so we need to disable flang there. - target_cmake_flags="$target_cmake_flags -DLLVM_RELEASE_ENABLE_PROJECTS='clang;lld;lldb;clang-tools-extra;polly;mlir'" - fi - target_cmake_flags="$target_cmake_flags -DBOOTSTRAP_BOOTSTRAP_DARWIN_osx_ARCHS=$arches -DBOOTSTRAP_BOOTSTRAP_DARWIN_osx_BUILTIN_ARCHS=$arches" - fi - - build_flang="true" - - if [ "$RUNNER_OS" = "Windows" ]; then - # The build times out on Windows, so we need to disable LTO. - target_cmake_flags="$target_cmake_flags -DLLVM_RELEASE_ENABLE_LTO=OFF" - fi - - echo "target-cmake-flags=$target_cmake_flags" >> $GITHUB_OUTPUT - echo "build-flang=$build_flang" >> $GITHUB_OUTPUT - case "${{ inputs.runs-on }}" in - ubuntu-22.04*) - build_runs_on="depot-${{ inputs.runs-on }}-16" - test_runs_on=$build_runs_on - ;; - macos-13) - if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then - build_runs_on="${{ inputs.runs-on }}" - else - build_runs_on="macos-13-large" - fi - test_runs_on="${{ inputs.runs-on }}" - ;; - macos-14) - if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then - build_runs_on="${{ inputs.runs-on }}" - else - build_runs_on="depot-macos-14" - fi - test_runs_on="${{ inputs.runs-on }}" - ;; - *) - test_runs_on="${{ inputs.runs-on }}" - build_runs_on=$test_runs_on - ;; - esac - echo "build-runs-on=$build_runs_on" >> $GITHUB_OUTPUT - echo "test-runs-on=$test_runs_on" >> $GITHUB_OUTPUT - - build-release-package: - name: "Build Release Package" - needs: prepare - if: github.repository_owner == 'llvm' - runs-on: ${{ needs.prepare.outputs.build-runs-on }} - steps: - - - name: Checkout Actions - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ (github.event_name == 'pull_request' && github.sha) || 'main' }} - sparse-checkout: | - .github/workflows/ - sparse-checkout-cone-mode: false - # Check out outside of working directory so the source checkout doesn't - # remove it. - path: workflows - - # actions/checkout does not support paths outside of the GITHUB_WORKSPACE. - # Also, anything that we put inside of GITHUB_WORKSPACE will be overwritten - # by future actions/checkout steps. Therefore, in order to checkout the - # latest actions from main, we need to first checkout out the actions inside of - # GITHUB_WORKSPACE (see previous step), then use actions/checkout to checkout - # the code being built and the move the actions from main back into GITHUB_WORKSPACE, - # becasue the uses on composite actions only reads workflows from inside GITHUB_WORKSPACE. - - shell: bash - run: mv workflows ../workflows-main - - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ needs.prepare.outputs.ref }} - - - name: Copy main workflows - shell: bash - run: | - mv ../workflows-main . - - - name: Setup Stage - id: setup-stage - uses: ./workflows-main/.github/workflows/release-binaries-setup-stage - - - name: Configure - id: build - shell: bash - env: - CCACHE_BIN: ${{ needs.prepare.outputs.ccache }} - run: | - # There were some issues on the ARM64 MacOS runners with trying to build x86 object, - # so we need to set some extra cmake flags to disable this. - cmake -G Ninja -S llvm -B ${{ steps.setup-stage.outputs.build-prefix }}/build \ - ${{ needs.prepare.outputs.target-cmake-flags }} \ - -C clang/cmake/caches/Release.cmake \ - -DBOOTSTRAP_LLVM_PARALLEL_LINK_JOBS=1 \ - -DBOOTSTRAP_BOOTSTRAP_CPACK_PACKAGE_FILE_NAME="${{ needs.prepare.outputs.release-binary-basename }}" - - - name: Build - shell: bash - run: | - ninja -v -C ${{ steps.setup-stage.outputs.build-prefix }}/build stage2-package - release_dir=`find ${{ steps.setup-stage.outputs.build-prefix }}/build -iname 'stage2-bins'` - mv $release_dir/${{ needs.prepare.outputs.release-binary-filename }} . - - - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - with: - name: ${{ runner.os }}-${{ runner.arch }}-release-binary - # Due to path differences on Windows when running in bash vs running on node, - # we need to search for files in the current workspace. - path: | - ${{ needs.prepare.outputs.release-binary-filename }} - - # Clean up some build files to reduce size of artifact. - - name: Clean Up Build Directory - shell: bash - run: | - find ${{ steps.setup-stage.outputs.build-prefix }}/build -iname ${{ needs.prepare.outputs.release-binary-filename }} -delete - find ${{ steps.setup-stage.outputs.build-prefix }}/build -iname _CPack_Packages -prune -exec rm -r {} + - - - name: Save Stage - uses: ./workflows-main/.github/workflows/release-binaries-save-stage - with: - build-prefix: ${{ steps.setup-stage.outputs.build-prefix }} - - upload-release-binaries: - name: "Upload Release Binaries" - needs: - - prepare - - build-release-package - if: >- - github.event_name != 'pull_request' && - needs.prepare.outputs.upload == 'true' - runs-on: ubuntu-24.04 - permissions: - contents: write # For release uploads - id-token: write # For artifact attestations - attestations: write # For artifact attestations - - steps: - - name: Checkout Release Scripts - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: | - llvm/utils/release/github-upload-release.py - llvm/utils/git/requirements.txt - sparse-checkout-cone-mode: false - - - name: 'Download artifact' - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 - with: - pattern: '*-release-binary' - merge-multiple: true - - - name: Attest Build Provenance - id: provenance - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 - with: - subject-path: ${{ needs.prepare.outputs.release-binary-filename }} - - - name: Rename attestation file - run: - mv ${{ steps.provenance.outputs.bundle-path }} ${{ needs.prepare.outputs.release-binary-filename }}.jsonl - - - name: Upload Build Provenance - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - with: - name: ${{ needs.prepare.outputs.release-binary-filename }}-attestation - path: ${{ needs.prepare.outputs.release-binary-filename }}.jsonl - - - name: Install Python Requirements - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - - - name: Upload Release - shell: bash - run: | - ./llvm/utils/release/github-upload-release.py \ - --token ${{ github.token }} \ - --release ${{ needs.prepare.outputs.release-version }} \ - upload \ - --files ${{ needs.prepare.outputs.release-binary-filename }}* - - test-release: - name: "Test Release" - needs: - - prepare - - build-release-package - if: >- - github.repository_owner == 'llvm' - runs-on: ${{ needs.prepare.outputs.test-runs-on }} - steps: - - name: Checkout Actions - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ (github.event_name == 'pull_request' && github.sha) || 'main' }} - sparse-checkout: | - .github/workflows/ - sparse-checkout-cone-mode: false - path: workflows - - name: Setup Stage - id: setup-stage - uses: ./workflows/.github/workflows/release-binaries-setup-stage - with: - previous-artifact: build-release-package - - - name: Run Tests - shell: bash - run: | - ninja -C ${{ steps.setup-stage.outputs.build-prefix }}/build stage2-check-all diff --git a/.github/workflows/release-documentation.yml b/.github/workflows/release-documentation.yml deleted file mode 100644 index 53a3eed98062f..0000000000000 --- a/.github/workflows/release-documentation.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Release Documentation - -permissions: - contents: read - -on: - workflow_dispatch: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload documentation' - required: false - type: boolean - - workflow_call: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload documentation' - required: false - type: boolean - -jobs: - release-documentation: - name: Build and Upload Release Documentation - runs-on: ubuntu-24.04 - env: - upload: ${{ inputs.upload && !contains(inputs.release-version, 'rc') }} - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Setup Python env - uses: actions/setup-python@v6.0.0 - with: - cache: 'pip' - cache-dependency-path: './llvm/docs/requirements.txt' - - - name: Install Dependencies - run: | - sudo apt-get update - sudo apt-get install -y \ - graphviz \ - python3-github \ - ninja-build \ - texlive-font-utils \ - libhwloc-dev - pip3 install --user -r ./llvm/docs/requirements.txt - - - name: Build Documentation - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - ./llvm/utils/release/build-docs.sh -release "${{ inputs.release-version }}" -no-doxygen - - - name: Create Release Notes Artifact - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 - with: - name: release-notes - path: docs-build/html-export/ - - - name: Clone www-releases - if: env.upload - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - repository: ${{ github.repository_owner }}/www-releases - ref: main - fetch-depth: 0 - path: www-releases - persist-credentials: false - - - name: Upload Release Notes - if: env.upload - env: - GH_TOKEN: ${{ secrets.WWW_RELEASES_TOKEN }} - run: | - mkdir -p www-releases/${{ inputs.release-version }} - mv ./docs-build/html-export/* www-releases/${{ inputs.release-version }} - cd www-releases - git checkout -b ${{ inputs.release-version }} - git add ${{ inputs.release-version }} - git config user.email "llvmbot@llvm.org" - git config user.name "llvmbot" - git commit -a -m "Add ${{ inputs.release-version }} documentation" - git push --force "https://$GH_TOKEN@github.com/llvmbot/www-releases.git" HEAD:refs/heads/${{ inputs.release-version }} - gh pr create -f -B main -H ${{ inputs.release-version }} -R llvmbot/www-releases diff --git a/.github/workflows/release-doxygen.yml b/.github/workflows/release-doxygen.yml deleted file mode 100644 index 806a022e991c5..0000000000000 --- a/.github/workflows/release-doxygen.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: Release Doxygen - -permissions: - contents: read - -on: - workflow_dispatch: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload documentation' - required: false - type: boolean - - workflow_call: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - upload: - description: 'Upload documentation' - required: false - type: boolean - secrets: - RELEASE_TASKS_USER_TOKEN: - description: "Secret used to check user permissions." - required: false - -jobs: - release-doxygen: - name: Build and Upload Release Doxygen - runs-on: ubuntu-24.04 - permissions: - contents: write - env: - upload: ${{ inputs.upload && !contains(inputs.release-version, 'rc') }} - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Setup Python env - uses: actions/setup-python@v6.0.0 - with: - cache: 'pip' - cache-dependency-path: './llvm/docs/requirements.txt' - - - name: Install Dependencies - run: | - sudo apt-get update - sudo apt-get install -y \ - doxygen \ - graphviz \ - python3-github \ - ninja-build \ - texlive-font-utils \ - libhwloc-dev - pip3 install --user -r ./llvm/docs/requirements.txt - - - name: Build Doxygen - run: | - ./llvm/utils/release/build-docs.sh -release "${{ inputs.release-version }}" -no-sphinx - - - name: Upload Doxygen - if: env.upload - env: - GITHUB_TOKEN: ${{ github.token }} - USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - run: | - ./llvm/utils/release/github-upload-release.py --token "$GITHUB_TOKEN" --release "${{ inputs.release-version }}" --user "${{ github.actor }}" --user-token "$USER_TOKEN" upload --files ./*doxygen*.tar.xz diff --git a/.github/workflows/release-lit.yml b/.github/workflows/release-lit.yml deleted file mode 100644 index 8362b88834ff6..0000000000000 --- a/.github/workflows/release-lit.yml +++ /dev/null @@ -1,79 +0,0 @@ -name: Release Lit - -permissions: - contents: read - -on: - workflow_dispatch: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - - workflow_call: - inputs: - release-version: - description: 'Release Version' - required: true - type: string - secrets: - RELEASE_TASKS_USER_TOKEN: - description: "Secret used to check user permissions." - required: false - -jobs: - release-lit: - name: Release Lit - runs-on: ubuntu-24.04 - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: "llvmorg-${{ inputs.release-version }}" - - - name: Install dependencies - run: | - sudo apt-get update - sudo apt-get install -y python3-setuptools python3-psutil python3-github - - - name: Check Permissions - env: - GITHUB_TOKEN: ${{ github.token }} - USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - run: | - ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --user ${{ github.actor }} --user-token "$USER_TOKEN" check-permissions - - - name: Setup Cpp - uses: aminya/setup-cpp@a276e6e3d1db9160db5edc458e99a30d3b109949 # v1.7.1 - with: - compiler: llvm-16.0.6 - cmake: true - ninja: true - - - name: Test lit - run: | - mkdir build && cd build - export FILECHECK_OPTS='-dump-input-filter=all -vv -color' - cmake ../llvm -DCMAKE_BUILD_TYPE=Release -G Ninja - ninja -v -j $(nproc) check-lit - - - name: Package lit - run: | - cd llvm/utils/lit - # Remove 'dev' suffix from lit version. - sed -i 's/ + "dev"//g' lit/__init__.py - python3 setup.py sdist bdist_wheel - - - name: Upload lit to test.pypi.org - uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 - with: - password: ${{ secrets.LLVM_LIT_TEST_PYPI_API_TOKEN }} - repository-url: https://test.pypi.org/legacy/ - packages-dir: llvm/utils/lit/dist/ - - - name: Upload lit to pypi.org - uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 - with: - password: ${{ secrets.LLVM_LIT_PYPI_API_TOKEN }} - packages-dir: llvm/utils/lit/dist/ diff --git a/.github/workflows/release-sources.yml b/.github/workflows/release-sources.yml deleted file mode 100644 index 88f580abdc28e..0000000000000 --- a/.github/workflows/release-sources.yml +++ /dev/null @@ -1,108 +0,0 @@ -name: Release Sources - -permissions: - contents: read - -on: - workflow_dispatch: - inputs: - release-version: - description: Release Version - required: true - type: string - workflow_call: - inputs: - release-version: - description: Release Version - required: true - type: string - secrets: - RELEASE_TASKS_USER_TOKEN: - description: "Secret used to check user permissions." - required: false - # Run on pull_requests for testing purposes. - pull_request: - paths: - - '.github/workflows/release-sources.yml' - types: - - opened - - synchronize - - reopened - # When a PR is closed, we still start this workflow, but then skip - # all the jobs, which makes it effectively a no-op. The reason to - # do this is that it allows us to take advantage of concurrency groups - # to cancel in progress CI jobs whenever the PR is closed. - - closed - -concurrency: - group: ${{ github.workflow }}-${{ inputs.release-version || github.event.pull_request.number }} - cancel-in-progress: True - -jobs: - inputs: - name: Collect Job Inputs - if: >- - github.repository_owner == 'llvm' && - github.event.action != 'closed' - outputs: - ref: ${{ steps.inputs.outputs.ref }} - export-args: ${{ steps.inputs.outputs.export-args }} - runs-on: ubuntu-24.04 - steps: - - id: inputs - run: | - ref=${{ (inputs.release-version && format('llvmorg-{0}', inputs.release-version)) || github.sha }} - if [ -n "${{ inputs.release-version }}" ]; then - export_args="-release ${{ inputs.release-version }} -final" - else - export_args="-git-ref ${{ github.sha }}" - fi - echo "ref=$ref" >> $GITHUB_OUTPUT - echo "export-args=$export_args" >> $GITHUB_OUTPUT - - release-sources: - name: Package Release Sources - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - needs: - - inputs - permissions: - id-token: write - attestations: write - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - ref: ${{ needs.inputs.outputs.ref }} - fetch-tags: true - - name: Install Dependencies - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - - - name: Check Permissions - if: github.event_name != 'pull_request' - env: - GITHUB_TOKEN: ${{ github.token }} - USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - run: | - ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --user ${{ github.actor }} --user-token "$USER_TOKEN" check-permissions - - name: Create Tarballs - run: | - ./llvm/utils/release/export.sh ${{ needs.inputs.outputs.export-args }} - - name: Attest Build Provenance - if: github.event_name != 'pull_request' - id: provenance - uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 - with: - subject-path: "*.xz" - - if: github.event_name != 'pull_request' - run: | - mv ${{ steps.provenance.outputs.bundle-path }} . - - name: Create Tarball Artifacts - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 - with: - path: | - *.xz - attestation.jsonl - - diff --git a/.github/workflows/release-tasks.yml b/.github/workflows/release-tasks.yml deleted file mode 100644 index 893e232a89642..0000000000000 --- a/.github/workflows/release-tasks.yml +++ /dev/null @@ -1,141 +0,0 @@ -name: Release Task - -permissions: - contents: read - -on: - push: - tags: - # The regex support here is limited, so just match everything that starts with llvmorg- and filter later. - - 'llvmorg-*' - -jobs: - validate-tag: - name: Validate Tag - runs-on: ubuntu-24.04 - if: github.repository == 'llvm/llvm-project' - outputs: - release-version: ${{ steps.validate-tag.outputs.release-version }} - steps: - - name: Validate Tag - id: validate-tag - run: | - echo "${{ github.ref_name }}" | grep -e '^llvmorg-[0-9]\+\.[0-9]\+\.[0-9]\+\(-rc[0-9]\+\)\?$' - release_version=$(echo "${{ github.ref_name }}" | sed 's/llvmorg-//g') - echo "release-version=$release_version" >> "$GITHUB_OUTPUT" - - release-create: - name: Create a New Release - runs-on: ubuntu-24.04 - permissions: - contents: write # For creating the release. - needs: validate-tag - - steps: - - name: Install Dependencies - run: | - sudo apt-get update - sudo apt-get install python3-github - - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - - name: Create Release - env: - GITHUB_TOKEN: ${{ github.token }} - USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - run: | - ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --release ${{ needs.validate-tag.outputs.release-version }} --user ${{ github.actor }} --user-token "$USER_TOKEN" create - release-documentation: - name: Build and Upload Release Documentation - needs: - - validate-tag - uses: ./.github/workflows/release-documentation.yml - with: - release-version: ${{ needs.validate-tag.outputs.release-version }} - upload: true - - release-doxygen: - name: Build and Upload Release Doxygen - permissions: - contents: write - needs: - - validate-tag - - release-create - uses: ./.github/workflows/release-doxygen.yml - with: - release-version: ${{ needs.validate-tag.outputs.release-version }} - upload: true - # Called workflows don't have access to secrets by default, so we need to explicitly pass secrets that we use. - secrets: - RELEASE_TASKS_USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - - release-lit: - name: Release Lit - needs: validate-tag - uses: ./.github/workflows/release-lit.yml - with: - release-version: ${{ needs.validate-tag.outputs.release-version }} - # Called workflows don't have access to secrets by default, so we need to explicitly pass secrets that we use. - secrets: - RELEASE_TASKS_USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - - release-binaries: - name: Build Release Binaries - permissions: - contents: write - id-token: write - attestations: write - needs: - - validate-tag - - release-create - uses: ./.github/workflows/release-binaries-all.yml - with: - release-version: ${{ needs.validate-tag.outputs.release-version }} - upload: true - # Called workflows don't have access to secrets by default, so we need to explicitly pass secrets that we use. - secrets: - RELEASE_TASKS_USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - - release-sources: - name: Package Release Sources - permissions: - contents: read - id-token: write - attestations: write - needs: - - validate-tag - uses: ./.github/workflows/release-sources.yml - with: - release-version: ${{ needs.validate-tag.outputs.release-version }} - # Called workflows don't have access to secrets by default, so we need to explicitly pass secrets that we use. - secrets: - RELEASE_TASKS_USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} - - uncomment-download-links: - name: Uncomment download links - runs-on: ubuntu-24.04 - permissions: - contents: write # For updating the release message. - needs: - - validate-tag - - release-create - - release-binaries - - steps: - - name: Install Dependencies - run: | - sudo apt-get update - sudo apt-get install python3-github - - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - sparse-checkout: llvm/utils/release/github-upload-release.py - sparse-checkout-cone-mode: false - - - name: Uncomment Download Links - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --release ${{ needs.validate-tag.outputs.release-version }} uncomment_download_links diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml deleted file mode 100644 index 401a48fb56466..0000000000000 --- a/.github/workflows/scorecard.yml +++ /dev/null @@ -1,62 +0,0 @@ -# This workflow uses actions that are not certified by GitHub. They are provided -# by a third-party and are governed by separate terms of service, privacy -# policy, and support documentation. - -# Check current LLVM-Project results here: https://securityscorecards.dev/viewer/?uri=github.com/intel/llvm - -name: Scorecard supply-chain security -on: - # For Branch-Protection check. Only the default branch is supported. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection - branch_protection_rule: - # To guarantee Maintained check is occasionally updated. See - # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained - schedule: - - cron: '30 20 * * *' - -# Declare default permissions as read only. -permissions: - contents: read - -jobs: - analysis: - name: Scorecard analysis - runs-on: ubuntu-24.04 - if: github.repository == 'intel/llvm' - permissions: - # Needed to upload the results to code-scanning dashboard. - security-events: write - # Needed to publish results and get a badge (see publish_results below). - id-token: write - - steps: - - name: "Checkout code" - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - persist-credentials: false - - - name: "Run analysis" - uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 - with: - results_file: results.sarif - results_format: sarif - - # - Publish results to OpenSSF REST API for easy access by consumers - # - Allows the repository to include the Scorecard badge. - # - See https://github.com/ossf/scorecard-action#publishing-results. - publish_results: true - - # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF - # format to the repository Actions tab. - - name: "Upload artifact" - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - with: - name: SARIF file - path: results.sarif - retention-days: 5 - - # Upload the results to GitHub's code scanning dashboard. - - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@80f993039571a6de66594ecaa432875a6942e8e0 # v2.20.6 - with: - sarif_file: results.sarif diff --git a/.github/workflows/set-release-binary-outputs.sh b/.github/workflows/set-release-binary-outputs.sh deleted file mode 100644 index 14d0798364e91..0000000000000 --- a/.github/workflows/set-release-binary-outputs.sh +++ /dev/null @@ -1,34 +0,0 @@ -# Usage: set-release-binary-outputs.sh - -set -e - -if [ -z "$GITHUB_OUTPUT" ]; then - export GITHUB_OUTPUT=`mktemp` - echo "Warning: Environment variable GITHUB_OUTPUT is not set." - echo "Writing output variables to $GITHUB_OUTPUT" -fi - -tag=$1 -upload=$2 - -if echo $tag | grep -e '^[0-9a-f]\+$'; then - # This is a plain commit. - # TODO: Don't hardcode this. - release_version="18" - upload='false' - ref="$tag" - -else - - pattern='^llvmorg-[0-9]\+\.[0-9]\+\.[0-9]\+\(-rc[0-9]\+\)\?$' - echo "$tag" | grep -e $pattern - if [ $? != 0 ]; then - echo "ERROR: Tag '$tag' doesn't match pattern: $pattern" - exit 1 - fi - release_version=`echo "$tag" | sed 's/llvmorg-//g'` - release=`echo "$release_version" | sed 's/-.*//g'` -fi -echo "release-version=$release_version" >> $GITHUB_OUTPUT -echo "upload=$upload" >> $GITHUB_OUTPUT -echo "ref=$tag" >> $GITHUB_OUTPUT diff --git a/.github/workflows/spirv-tests.yml b/.github/workflows/spirv-tests.yml deleted file mode 100644 index 28294114cce2f..0000000000000 --- a/.github/workflows/spirv-tests.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: SPIR-V Tests - -permissions: - contents: read - -on: - pull_request: - paths: - - 'llvm/lib/Target/SPIRV/**' - - 'llvm/test/CodeGen/SPIRV/**' - - '.github/workflows/spirv-tests.yml' - -concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} - -jobs: - check_spirv: - if: github.repository_owner == 'llvm' - name: Test SPIR-V - runs-on: ubuntu-24.04 - container: - image: ghcr.io/llvm/ci-ubuntu-24.04:latest - steps: - - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - - name: Setup ccache - uses: hendrikmuhs/ccache-action@bfa03e1de4d7f7c3e80ad9109feedd05c4f5a716 # v1.2.19 - with: - max-size: 2G - key: spirv-ubuntu-24.04 - variant: sccache - - name: Build and Test - run: | - mkdir build - cmake -GNinja \ - -S llvm \ - -B build \ - -DCMAKE_BUILD_TYPE=Release \ - -DLLVM_ENABLE_ASSERTIONS=ON \ - -DCMAKE_C_COMPILER_LAUNCHER=sccache \ - -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -DLLVM_TARGETS_TO_BUILD="SPIRV" \ - -DLLVM_INCLUDE_SPIRV_TOOLS_TESTS=ON - ninja -C build check-llvm-codegen-spirv diff --git a/.github/workflows/unprivileged-download-artifact/action.yml b/.github/workflows/unprivileged-download-artifact/action.yml deleted file mode 100644 index 9d8fb59a67c0e..0000000000000 --- a/.github/workflows/unprivileged-download-artifact/action.yml +++ /dev/null @@ -1,81 +0,0 @@ -name: Unprivileged Download Artifact -description: >- - Download artifacts from another workflow run without using an access token. -inputs: - run-id: - description: >- - The run-id for the workflow run that you want to download the artifact - from. If ommitted it will download the most recently created artifact - from the repo with the artifact-name. - required: false - artifact-name: - desciption: The name of the artifact to download. - required: true - - -outputs: - filename: - description: >- - The filename of the downloaded artifact or the empty string if the - artifact was not found. - value: ${{ steps.download-artifact.outputs.filename }} - artifact-id: - description: "The id of the artifact being downloaded." - value: ${{ steps.artifact-url.outputs.id }} - - -runs: - using: "composite" - steps: - - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea #v7.0.1 - id: artifact-url - with: - script: | - var response; - if (!"${{ inputs.run-id }}") { - response = await github.rest.actions.listArtifactsForRepo({ - owner: context.repo.owner, - repo: context.repo.repo, - name: "${{ inputs.artifact-name }}" - }) - } else { - response = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: "${{ inputs.run-id }}", - name: "${{ inputs.artifact-name }}" - }) - } - - console.log(response) - - for (artifact of response.data.artifacts) { - console.log(artifact); - } - - if (response.data.artifacts.length == 0) { - console.log("Could not find artifact ${{ inputs.artifact-name }} for workflow run ${{ inputs.run-id }}") - return; - } - - const url_response = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: response.data.artifacts[0].id, - archive_format: "zip" - }) - - core.setOutput("url", url_response.url); - core.setOutput("id", response.data.artifacts[0].id); - - - shell: bash - if: steps.artifact-url.outputs.url != '' - id: download-artifact - run: | - curl -L -o ${{ inputs.artifact-name }}.zip "${{ steps.artifact-url.outputs.url }}" - echo "filename=${{ inputs.artifact-name }}.zip" >> $GITHUB_OUTPUT - - - shell: bash - if: steps.download-artifact.outputs.filename != '' - run: | - unzip ${{ steps.download-artifact.outputs.filename }} diff --git a/.github/workflows/ur-build-hw.yml b/.github/workflows/ur-build-hw.yml deleted file mode 100644 index 81f000f6d0867..0000000000000 --- a/.github/workflows/ur-build-hw.yml +++ /dev/null @@ -1,182 +0,0 @@ -name: UR - Build adapters, test on HW - -on: - workflow_call: - inputs: - adapter_name: - required: true - type: string - other_adapter_name: - required: false - type: string - default: "" - runner_name: - required: true - type: string - static_loader: - required: false - type: string - default: OFF - static_adapter: - required: false - type: string - default: OFF - docker_image: - required: true - type: string - default: "" - image_options: - required: true - type: string - default: "" - workflow_dispatch: - inputs: - adapter_name: - required: true - type: string - other_adapter_name: - required: false - type: string - default: "" - runner_name: - required: true - type: string - static_loader: - required: false - type: string - default: OFF - static_adapter: - required: false - type: string - default: OFF - docker_image: - required: true - type: string - default: "" - image_options: - required: true - type: string - default: "" - -permissions: read-all - -env: - UR_LOG_CUDA: "level:error;flush:error" - UR_LOG_HIP: "level:error;flush:error" - UR_LOG_LEVEL_ZERO: "level:error;flush:error" - UR_LOG_NATIVE_CPU: "level:error;flush:error" - UR_LOG_OPENCL: "level:error;flush:error" - -jobs: - adapter_build_hw: - name: Build & CTS - # run only on upstream; forks won't have the HW - if: github.repository == 'intel/llvm' - strategy: - fail-fast: false - matrix: - adapter: [ - { - name: "${{inputs.adapter_name}}", - other_name: "${{inputs.other_adapter_name}}", - static_Loader: "${{inputs.static_loader}}", - static_adapter: "${{inputs.static_loader}}" - } - ] - build_type: [Release] - compiler: [{c: gcc, cxx: g++}] - - runs-on: ${{inputs.runner_name}} - container: - image: ${{ inputs.docker_image }} - options: ${{ inputs.image_options }} - - steps: - # TODO: - # - investigate if DUR_CONFORMANCE_AMD_ARCH could be removed - # - switch to Ninja generator in CMake - # - downloading DPC++ should be integrated somehow; most likely use nightly release. - # - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 - - # for some reason it's required to re-configure python for venv to work properly. - - name: Set up Python 3.12 - if: ${{ inputs.docker_image == 'ghcr.io/intel/llvm/ubuntu2404_intel_drivers:alldeps' }} - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - with: - python-version: '3.12' - - - name: Install UR python dependencies in venv - working-directory: ./unified-runtime - run: | - python3 -m venv .venv - . .venv/bin/activate - echo "$PATH" >> $GITHUB_PATH - pip install -r third_party/requirements.txt - pip install -r third_party/requirements_testing.txt - - - name: Download DPC++ - run: | - wget -O dpcpp_compiler.tar.gz https://github.com/intel/llvm/releases/download/nightly-2024-12-12/sycl_linux.tar.gz - mkdir -p dpcpp_compiler - tar -xvf dpcpp_compiler.tar.gz -C dpcpp_compiler - - - name: Install OpenCL - if: ${{ inputs.adapter_name == 'OPENCL' }} - run: | - wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ - | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null - echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list - sudo apt-get update - sudo apt-get install -y intel-oneapi-runtime-opencl - - - name: Configure Unified Runtime project - # ">" is used to avoid adding "\" at the end of each line; this command is quite long - run: > - cmake - -S unified-runtime - -B build - -DCMAKE_C_COMPILER=${{matrix.compiler.c}} - -DCMAKE_CXX_COMPILER=${{matrix.compiler.cxx}} - -DCMAKE_BUILD_TYPE=${{matrix.build_type}} - -DUR_ENABLE_TRACING=ON - -DUR_DEVELOPER_MODE=ON - -DUR_BUILD_TESTS=ON - -DUR_BUILD_ADAPTER_${{matrix.adapter.name}}=ON - ${{ matrix.adapter.other_name != '' && format('-DUR_BUILD_ADAPTER_{0}=ON', matrix.adapter.other_name) || '' }} - -DUR_STATIC_LOADER=${{matrix.adapter.static_Loader}} - -DUR_STATIC_ADAPTER_${{matrix.adapter.name}}=${{matrix.adapter.static_adapter}} - -DUR_DPCXX=./dpcpp_compiler/bin/clang++ - -DUR_SYCL_LIBRARY_DIR=./dpcpp_compiler/lib - -DCMAKE_INSTALL_PREFIX=./install - ${{ matrix.adapter.name == 'HIP' && '-DUR_CONFORMANCE_AMD_ARCH=gfx1030' || '' }} - ${{ matrix.adapter.name == 'HIP' && '-DUR_HIP_PLATFORM=AMD' || '' }} - - - name: Build - # This is so that device binaries can find the sycl runtime library - run: cmake --build build -j $(nproc) - - - name: Install - # This is to check that install command does not fail - run: cmake --install build - - - name: Test adapter specific - env: - ZE_ENABLE_LOADER_DEBUG_TRACE: 1 - LIT_OPTS: "--timeout 120" - # These tests cause timeouts on CI - LIT_FILTER_OUT: "(adapters/level_zero/memcheck.test|adapters/level_zero/v2/deferred_kernel_memcheck.test)" - run: cmake --build build -j $(nproc) -- check-unified-runtime-adapter - # Don't run adapter specific tests when building multiple adapters - if: ${{ matrix.adapter.other_name == '' }} - - - name: Test adapters - env: - ZE_ENABLE_LOADER_DEBUG_TRACE: 1 - LIT_OPTS: "--timeout 120" - run: cmake --build build -j $(nproc) -- check-unified-runtime-conformance - - - name: Get information about platform - if: ${{ always() }} - run: ./unified-runtime/.github/scripts/get_system_info.sh diff --git a/.github/workflows/ur-build-offload.yml b/.github/workflows/ur-build-offload.yml deleted file mode 100644 index 82268c0340639..0000000000000 --- a/.github/workflows/ur-build-offload.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: UR - Build offload adapter - -permissions: read-all - -on: [ workflow_call, workflow_dispatch ] - -jobs: - offload_build: - name: Build - strategy: - fail-fast: false - matrix: - build_type: [Release] - compiler: [{c: gcc, cxx: g++}] - - runs-on: [ "Linux", "build" ] - container: - image: 'ghcr.io/intel/llvm/ubuntu2404_base' - - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 - - - name: Install liboffload - run: sudo apt-get update -qq && sudo apt-get install --no-install-recommends -yqq liboffload-21 liboffload-21-dev - - - name: Configure Unified Runtime project - # ">" is used to avoid adding "\" at the end of each line; this command is quite long - run: > - cmake - -S unified-runtime - -B $GITHUB_WORKSPACE/build - -DCMAKE_C_COMPILER=${{matrix.compiler.c}} - -DCMAKE_CXX_COMPILER=${{matrix.compiler.cxx}} - -DCMAKE_BUILD_TYPE=${{matrix.build_type}} - -DUR_ENABLE_TRACING=ON - -DUR_DEVELOPER_MODE=ON - -DUR_BUILD_TESTS=OFF - -DUR_BUILD_ADAPTER_OFFLOAD=ON - -DUR_OFFLOAD_INSTALL_DIR="/usr/lib/llvm-21" - -DUR_OFFLOAD_INCLUDE_DIR="/usr/lib/llvm-21/include" - -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install - - - name: Build - # This is so that device binaries can find the sycl runtime library - run: cmake --build $GITHUB_WORKSPACE/build -j $(nproc) - - - name: Install - # This is to check that install command does not fail - run: cmake --install $GITHUB_WORKSPACE/build - - - name: Get information about platform - if: ${{ always() }} - run: ./unified-runtime/.github/scripts/get_system_info.sh diff --git a/.github/workflows/ur-precommit.yml b/.github/workflows/ur-precommit.yml deleted file mode 100644 index 1bc37adc61dfc..0000000000000 --- a/.github/workflows/ur-precommit.yml +++ /dev/null @@ -1,147 +0,0 @@ -name: Unified Runtime Pre Commit -# Note: this is the very first version of UR workflow. -# It was pretty much copy-pasted from UR repository. -# Over time it will be most likely integrated more into existing workflows. - -# Note: the trigger is copy-pasted from sycl-linux-precommit.yml - probably to be fine-tuned. -on: - # We rely on "Fork pull request workflows from outside collaborators" - - # "Require approval for all outside collaborators" at - # https://github.com/intel/llvm/settings/actions for security. - pull_request: - branches: - - sycl - - sycl-rel-** - # Do not run builds if changes are only in the following locations - paths-ignore: - - '.github/ISSUE_TEMPLATE/**' - - '.github/CODEOWNERS' - - 'sycl/doc/**' - - 'sycl/gdb/**' - - 'clang/docs/**' - - '**.md' - - '**.rst' - - '.github/workflows/sycl-windows-*.yml' - - '.github/workflows/sycl-macos-*.yml' - - '.github/workflows/sycl-nightly.yml' - - '.github/workflows/sycl-rel-nightly.yml' - - 'devops/containers/**' - - 'devops/actions/build_container/**' - -concurrency: - # Cancel a currently running workflow from the same PR, branch or tag. - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -permissions: read-all - -jobs: - detect_changes: - name: Detect Changes - uses: ./.github/workflows/sycl-detect-changes.yml - - source_checks: - name: Source Checks - needs: [detect_changes] - if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} - uses: ./.github/workflows/ur-source-checks.yml - - adapters: - name: Adapters - needs: [detect_changes, source_checks] - if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} - strategy: - fail-fast: false - matrix: - # Extra native CPU jobs are here to force the loader to be used. - # UR will not use the loader if there is only one target. - include: - - name: L0 - runner: UR_L0 - image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN - - name: L0_V2 - runner: UR_L0 - image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN - - name: L0 - runner: UR_L0 - static: ON - image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN - - name: L0 - runner: UR_L0 - other_adapter: NATIVE_CPU - image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN - - name: HIP - runner: UR_HIP - image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --cap-add=SYS_ADMIN - - name: CUDA - runner: UR_CUDA - image_options: -u 1001 --privileged --cap-add SYS_ADMIN --gpus all - - name: OPENCL - runner: UR_OPENCL - docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" - image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --privileged --cap-add SYS_ADMIN - - name: OPENCL - runner: UR_OPENCL - other_adapter: NATIVE_CPU - docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" - image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --privileged --cap-add SYS_ADMIN - - name: NATIVE_CPU - runner: UR_NATIVE_CPU - docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" - image_options: -u 1001 --device=/dev/dri --privileged --cap-add SYS_ADMIN - uses: ./.github/workflows/ur-build-hw.yml - with: - adapter_name: ${{ matrix.name }} - runner_name: ${{ matrix.runner }} - static_loader: ${{ matrix.static || 'OFF' }} - static_adapter: ${{ matrix.static || 'OFF' }} - other_adapter_name: ${{ matrix.other_adapter || '' }} - docker_image: ${{ matrix.docker_image || 'ghcr.io/intel/llvm/ubuntu2404_intel_drivers:alldeps'}} - image_options: ${{ matrix.image_options || '' }} - -# TODO: Enable once the apt package at https://apt.llvm.org/noble/pool/main/l/llvm-toolchain-snapshot/ is updated -# offload_build: -# name: Adapters (Offload) -# needs: [detect_changes, source_checks] -# if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur_offload_adapter') }} -# uses: ./.github/workflows/ur-build-offload.yml - - macos: - name: MacOS build only - needs: [detect_changes, source_checks] - if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} - strategy: - matrix: - os: ['macos-latest'] - runs-on: ${{matrix.os}} - - steps: - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 - - - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v5.0.0 - with: - python-version: "3.10" - - - name: Install prerequisites - working-directory: ${{github.workspace}}/unified-runtime - run: | - python3 -m pip install -r third_party/requirements.txt - python3 -m pip install -r third_party/requirements_testing.txt - - - name: Install hwloc - run: brew install hwloc - - - name: Configure Unified Runtime project - working-directory: ${{github.workspace}}/unified-runtime - run: > - cmake - -B${{github.workspace}}/build - -DUR_ENABLE_TRACING=ON - -DUR_DEVELOPER_MODE=ON - -DCMAKE_BUILD_TYPE=Release - -DUR_BUILD_TESTS=ON - -DUR_FORMAT_CPP_STYLE=ON - - - name: Build - run: cmake --build ${{github.workspace}}/build -j $(sysctl -n hw.logicalcpu) diff --git a/.github/workflows/ur-source-checks.yml b/.github/workflows/ur-source-checks.yml deleted file mode 100644 index b444e3252d41c..0000000000000 --- a/.github/workflows/ur-source-checks.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: UR - Check generated sources - -on: - workflow_call: - -permissions: - contents: read - -jobs: - source_checks: - strategy: - matrix: - os: ['ubuntu-22.04', 'windows-2022'] - - runs-on: ${{matrix.os}} - - steps: - # TODO: - # - split into separate jobs for each OS - # - - name: Checkout LLVM - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 - - - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v5.0.0 - with: - python-version: 3.9 - - - name: Install UR python dependencies - working-directory: ${{github.workspace}}/unified-runtime - run: pip install -r third_party/requirements.txt - - - name: "[Lin] Install doxygen" - if: matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get update - sudo apt-get install -y doxygen libhwloc-dev - - - name: "[Win] Install doxygen" - if: matrix.os == 'windows-2022' - run: | - $WorkingDir = $PWD.Path - Invoke-WebRequest -Uri https://github.com/doxygen/doxygen/releases/download/Release_1_9_8/doxygen-1.9.8.windows.x64.bin.zip -OutFile "$WorkingDir\doxygen.zip" - Expand-Archive -Path "$WorkingDir\doxygen.zip" - Add-Content $env:GITHUB_PATH "$WorkingDir\doxygen" - - - name: Configure Unified Runtime project - working-directory: ${{github.workspace}}/unified-runtime - run: > - cmake - -B${{github.workspace}}/build - -DCMAKE_PREFIX_PATH="${{env.VCPKG_PATH}}" - -DUR_ENABLE_TRACING=OFF - -DCMAKE_BUILD_TYPE=Debug - -DUR_BUILD_TESTS=OFF - -DUR_FORMAT_CPP_STYLE=ON - - # Verifying license should be enough on a single OS - - name: Verify that each source file contains a license - if: matrix.os == 'ubuntu-22.04' - run: cmake --build ${{github.workspace}}/build --target verify-licenses - - - name: Generate source from spec, check for uncommitted diff - run: cmake --build ${{github.workspace}}/build --target check-generated diff --git a/.github/workflows/version-check.py b/.github/workflows/version-check.py deleted file mode 100755 index f75fd50300881..0000000000000 --- a/.github/workflows/version-check.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/python3 - -from git import Repo -import re -import sys - - -def get_version_from_tag(tag): - m = re.match("llvmorg-([0-9]+)\.([0-9]+)\.([0-9]+)(-rc[0-9]+)?$", tag) - if m: - if m.lastindex == 4: - # We have an rc tag. - return m.group(1, 2, 3) - # We have a final release tag. - return (m.group(1), m.group(2), str(int(m.group(3)) + 1)) - - m = re.match("llvmorg-([0-9]+)-init", tag) - if m: - return (m.group(1), "1", "0") - - raise Exception(f"error: Tag is not valid: {tag}") - - -version = sys.argv[1] - -repo = Repo() - -tag = repo.git.describe(tags=True, abbrev=0) -expected_version = ".".join(get_version_from_tag(tag)) - -if version != expected_version: - print("error: Expected version", expected_version, "but found version", version) - sys.exit(1) - -print("Versions match:", version, expected_version) -sys.exit(0) diff --git a/.github/workflows/version-check.yml b/.github/workflows/version-check.yml deleted file mode 100644 index 990591f4cea15..0000000000000 --- a/.github/workflows/version-check.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: LLVM Project Version Check - -on: - push: - branches: - - 'release/**' - pull_request: - branches: - - 'release/**' - -permissions: - contents: read - -jobs: - version_check: - if: github.repository_owner == 'llvm' - runs-on: ubuntu-24.04 - steps: - - name: Fetch LLVM sources - uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 - with: - fetch-depth: 0 - - - name: Install dependencies - run: | - pip install --require-hashes -r ./llvm/utils/git/requirements.txt - - - name: Version Check - run: | - version=$(grep -o 'LLVM_VERSION_\(MAJOR\|MINOR\|PATCH\) [0-9]\+' cmake/Modules/LLVMVersion.cmake | cut -d ' ' -f 2 | tr "\n" "." | sed 's/.$//g') - .github/workflows/version-check.py "$version" From 0c5de9d27317300fcadeeb0754f6b335691c3f3d Mon Sep 17 00:00:00 2001 From: Jinsong Ji Date: Tue, 4 Nov 2025 09:45:28 -0800 Subject: [PATCH 2/4] Remove unused .ci folder --- .ci/all_requirements.txt | 187 ------- .ci/cache_lit_timing_files.py | 80 --- .ci/compute_projects.py | 338 ------------ .ci/compute_projects_test.py | 418 -------------- .ci/generate_test_report_github.py | 28 - .ci/generate_test_report_lib.py | 269 --------- .ci/generate_test_report_lib_test.py | 778 --------------------------- .ci/metrics/Dockerfile | 7 - .ci/metrics/metrics.py | 327 ----------- .ci/metrics/metrics_test.py | 75 --- .ci/metrics/requirements.lock.txt | 347 ------------ .ci/metrics/requirements.txt | 2 - .ci/monolithic-linux.sh | 102 ---- .ci/monolithic-windows.sh | 54 -- .ci/requirements.txt | 1 - .ci/utils.sh | 54 -- 16 files changed, 3067 deletions(-) delete mode 100644 .ci/all_requirements.txt delete mode 100644 .ci/cache_lit_timing_files.py delete mode 100644 .ci/compute_projects.py delete mode 100644 .ci/compute_projects_test.py delete mode 100644 .ci/generate_test_report_github.py delete mode 100644 .ci/generate_test_report_lib.py delete mode 100644 .ci/generate_test_report_lib_test.py delete mode 100644 .ci/metrics/Dockerfile delete mode 100644 .ci/metrics/metrics.py delete mode 100644 .ci/metrics/metrics_test.py delete mode 100644 .ci/metrics/requirements.lock.txt delete mode 100644 .ci/metrics/requirements.txt delete mode 100755 .ci/monolithic-linux.sh delete mode 100755 .ci/monolithic-windows.sh delete mode 100644 .ci/requirements.txt delete mode 100644 .ci/utils.sh diff --git a/.ci/all_requirements.txt b/.ci/all_requirements.txt deleted file mode 100644 index f73500efdc7e0..0000000000000 --- a/.ci/all_requirements.txt +++ /dev/null @@ -1,187 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --generate-hashes --output-file=./.ci/all_requirements.txt ./.ci/requirements.txt ./lldb/test/requirements.txt ./mlir/python/requirements.txt -# -junitparser==3.2.0 \ - --hash=sha256:b05e89c27e7b74b3c563a078d6e055d95cf397444f8f689b0ca616ebda0b3c65 \ - --hash=sha256:e14fdc0a999edfc15889b637390e8ef6ca09a49532416d3bd562857d42d4b96d - # via -r ./.ci/requirements.txt -ml-dtypes==0.5.1 ; python_version < "3.13" \ - --hash=sha256:023ce2f502efd4d6c1e0472cc58ce3640d051d40e71e27386bed33901e201327 \ - --hash=sha256:05f23447a1c20ddf4dc7c2c661aa9ed93fcb2658f1017c204d1e758714dc28a8 \ - --hash=sha256:12651420130ee7cc13059fc56dac6ad300c3af3848b802d475148c9defd27c23 \ - --hash=sha256:141b2ea2f20bb10802ddca55d91fe21231ef49715cfc971998e8f2a9838f3dbe \ - --hash=sha256:15ad0f3b0323ce96c24637a88a6f44f6713c64032f27277b069f285c3cf66478 \ - --hash=sha256:1b7fbe5571fdf28fd3aaab3ef4aafc847de9ebf263be959958c1ca58ec8eadf5 \ - --hash=sha256:26ebcc69d7b779c8f129393e99732961b5cc33fcff84090451f448c89b0e01b4 \ - --hash=sha256:6f462f5eca22fb66d7ff9c4744a3db4463af06c49816c4b6ac89b16bfcdc592e \ - --hash=sha256:6f76232163b5b9c34291b54621ee60417601e2e4802a188a0ea7157cd9b323f4 \ - --hash=sha256:7000b6e4d8ef07542c05044ec5d8bbae1df083b3f56822c3da63993a113e716f \ - --hash=sha256:810512e2eccdfc3b41eefa3a27402371a3411453a1efc7e9c000318196140fed \ - --hash=sha256:8f2c028954f16ede77902b223a8da2d9cbb3892375b85809a5c3cfb1587960c4 \ - --hash=sha256:9626d0bca1fb387d5791ca36bacbba298c5ef554747b7ebeafefb4564fc83566 \ - --hash=sha256:ac5b58559bb84a95848ed6984eb8013249f90b6bab62aa5acbad876e256002c9 \ - --hash=sha256:ad4953c5eb9c25a56d11a913c2011d7e580a435ef5145f804d98efa14477d390 \ - --hash=sha256:aefedc579ece2f8fb38f876aa7698204ee4c372d0e54f1c1ffa8ca580b54cc60 \ - --hash=sha256:afb2009ac98da274e893e03162f6269398b2b00d947e7057ee2469a921d58135 \ - --hash=sha256:b8a9d46b4df5ae2135a8e8e72b465448ebbc1559997f4f9304a9ecc3413efb5b \ - --hash=sha256:bd73f51957949069573ff783563486339a9285d72e2f36c18e0c1aa9ca7eb190 \ - --hash=sha256:bf9975bda82a99dc935f2ae4c83846d86df8fd6ba179614acac8e686910851da \ - --hash=sha256:c09526488c3a9e8b7a23a388d4974b670a9a3dd40c5c8a61db5593ce9b725bab \ - --hash=sha256:c9945669d3dadf8acb40ec2e57d38c985d8c285ea73af57fc5b09872c516106d \ - --hash=sha256:d13755f8e8445b3870114e5b6240facaa7cb0c3361e54beba3e07fa912a6e12b \ - --hash=sha256:fd918d4e6a4e0c110e2e05be7a7814d10dc1b95872accbf6512b80a109b71ae1 - # via -r ./mlir/python/requirements.txt -nanobind==2.7.0 \ - --hash=sha256:73b12d0e751d140d6c1bf4b215e18818a8debfdb374f08dc3776ad208d808e74 \ - --hash=sha256:f9f1b160580c50dcf37b6495a0fd5ec61dc0d95dae5f8004f87dd9ad7eb46b34 - # via -r ./mlir/python/requirements.txt -numpy==2.0.2 \ - --hash=sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a \ - --hash=sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195 \ - --hash=sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951 \ - --hash=sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1 \ - --hash=sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c \ - --hash=sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc \ - --hash=sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b \ - --hash=sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd \ - --hash=sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4 \ - --hash=sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd \ - --hash=sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318 \ - --hash=sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448 \ - --hash=sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece \ - --hash=sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d \ - --hash=sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5 \ - --hash=sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8 \ - --hash=sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57 \ - --hash=sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78 \ - --hash=sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66 \ - --hash=sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a \ - --hash=sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e \ - --hash=sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c \ - --hash=sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa \ - --hash=sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d \ - --hash=sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c \ - --hash=sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729 \ - --hash=sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97 \ - --hash=sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c \ - --hash=sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9 \ - --hash=sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669 \ - --hash=sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4 \ - --hash=sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73 \ - --hash=sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385 \ - --hash=sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8 \ - --hash=sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c \ - --hash=sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b \ - --hash=sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692 \ - --hash=sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15 \ - --hash=sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131 \ - --hash=sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a \ - --hash=sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326 \ - --hash=sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b \ - --hash=sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded \ - --hash=sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04 \ - --hash=sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd - # via - # -r ./mlir/python/requirements.txt - # ml-dtypes -packaging==25.0 \ - --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ - --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f - # via -r ./lldb/test/requirements.txt -pexpect==4.9.0 ; sys_platform != "win32" \ - --hash=sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523 \ - --hash=sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f - # via -r ./lldb/test/requirements.txt -psutil==7.0.0 \ - --hash=sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25 \ - --hash=sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e \ - --hash=sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91 \ - --hash=sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da \ - --hash=sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34 \ - --hash=sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553 \ - --hash=sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456 \ - --hash=sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17 \ - --hash=sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993 \ - --hash=sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99 - # via -r ./lldb/test/requirements.txt -ptyprocess==0.7.0 \ - --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \ - --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220 - # via pexpect -pybind11==2.13.6 \ - --hash=sha256:237c41e29157b962835d356b370ededd57594a26d5894a795960f0047cb5caf5 \ - --hash=sha256:ba6af10348c12b24e92fa086b39cfba0eff619b61ac77c406167d813b096d39a - # via -r ./mlir/python/requirements.txt -pyyaml==6.0.1 \ - --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ - --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ - --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \ - --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ - --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ - --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ - --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \ - --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ - --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ - --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ - --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \ - --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \ - --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ - --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \ - --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ - --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ - --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ - --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \ - --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ - --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ - --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ - --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \ - --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ - --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ - --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ - --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \ - --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \ - --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ - --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ - --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \ - --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ - --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ - --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ - --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \ - --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \ - --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \ - --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \ - --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \ - --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \ - --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ - --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ - --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ - --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \ - --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ - --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \ - --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ - --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ - --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ - --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \ - --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ - --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f - # via -r ./mlir/python/requirements.txt -swig==4.3.1 \ - --hash=sha256:07082c2f8693f83ba136136e54e92a5af014488ca4f2a3de4b471337c00d92aa \ - --hash=sha256:0743063399e373b17d658481f4cd327245ef58a1d17a3e2071de88dec60082fc \ - --hash=sha256:19936cb924e7c86f207bf4e12e00c930342068fcb7073dcc9c8b49bd7a2c7389 \ - --hash=sha256:3d34c3fd96c5c288881a25418df06d814aa09e734bc32af5cd92e9217841b5f6 \ - --hash=sha256:444b11d8ee27aa64ac83e827dbeb724aa0cfb1062c20ecbb88180bffa39d5dc3 \ - --hash=sha256:54fd71196e1999fd0e204c8fa5ab39d472eb8831f30a951d7115c82d07bdf5f3 \ - --hash=sha256:5f31b7e815b76b42cc503322ad88e6eb3ebb0bb0b91044445c8a31b5b4aa4664 \ - --hash=sha256:7687a1c1b6c3033f75b753d638cac967e3f6011c04fb25ab405cf9086ecf8d4c \ - --hash=sha256:86600ddde81e24f6fa989920784d72c3ca7ca6a7583fe74b4c5c80076dddd0a5 \ - --hash=sha256:bbb43485d120d3fd2c979f258f81eae78274f83ba3767d5b3fe376ac70504934 \ - --hash=sha256:c5391080a3785b0505eb211af1cbb1f1e3838e5bb1e54f740a9d7ba2e385c879 \ - --hash=sha256:d4ffde3e87cd2a764495a516751c2c3c301f8b237aba2ac3963f786ff59b7f68 \ - --hash=sha256:d84b3e31d943d81b28bd4144dcf5271909ad2313f0f2afbd7f2fb37ef2a6d8bb \ - --hash=sha256:efec16327029f682f649a26da726bb0305be8800bd0f1fa3e81bf0769cf5b476 \ - --hash=sha256:fc496c0d600cf1bb2d91e28d3d6eae9c4301e5ea7a0dec5a4281b5efed4245a8 - # via -r ./lldb/test/requirements.txt diff --git a/.ci/cache_lit_timing_files.py b/.ci/cache_lit_timing_files.py deleted file mode 100644 index 2f43e46fc0e56..0000000000000 --- a/.ci/cache_lit_timing_files.py +++ /dev/null @@ -1,80 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Caches .lit_test_times.txt files between premerge invocations. - -.lit_test_times.txt files are used by lit to order tests to best take advantage -of parallelism. Having them around and up to date can result in a ~15% -improvement in test times. This script downloading cached test time files and -uploading new versions to the GCS buckets used for caching. -""" - -import sys -import os -import logging -import multiprocessing.pool -import pathlib -import glob - -from google.cloud import storage - -GCS_PARALLELISM = 100 - - -def _maybe_upload_timing_file(bucket, timing_file_path): - if os.path.exists(timing_file_path): - timing_file_blob = bucket.blob("lit_timing/" + timing_file_path) - timing_file_blob.upload_from_filename(timing_file_path) - - -def upload_timing_files(storage_client, bucket_name: str): - bucket = storage_client.bucket(bucket_name) - with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool: - futures = [] - for timing_file_path in glob.glob("**/.lit_test_times.txt", recursive=True): - futures.append( - thread_pool.apply_async( - _maybe_upload_timing_file, (bucket, timing_file_path) - ) - ) - for future in futures: - future.get() - print("Done uploading") - - -def _maybe_download_timing_file(blob): - file_name = blob.name.removeprefix("lit_timing/") - pathlib.Path(os.path.dirname(file_name)).mkdir(parents=True, exist_ok=True) - blob.download_to_filename(file_name) - - -def download_timing_files(storage_client, bucket_name: str): - bucket = storage_client.bucket(bucket_name) - blobs = bucket.list_blobs(prefix="lit_timing") - with multiprocessing.pool.ThreadPool(GCS_PARALLELISM) as thread_pool: - futures = [] - for timing_file_blob in blobs: - futures.append( - thread_pool.apply_async( - _maybe_download_timing_file, (timing_file_blob,) - ) - ) - for future in futures: - future.get() - print("Done downloading") - - -if __name__ == "__main__": - if len(sys.argv) != 2: - logging.fatal("Expected usage is cache_lit_timing_files.py ") - sys.exit(1) - action = sys.argv[1] - storage_client = storage.Client() - bucket_name = os.environ["CACHE_GCS_BUCKET"] - if action == "download": - download_timing_files(storage_client, bucket_name) - elif action == "upload": - upload_timing_files(storage_client, bucket_name) - else: - logging.fatal("Expected usage is cache_lit_timing_files.py ") - sys.exit(1) diff --git a/.ci/compute_projects.py b/.ci/compute_projects.py deleted file mode 100644 index 8567552fa25a6..0000000000000 --- a/.ci/compute_projects.py +++ /dev/null @@ -1,338 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Computes the list of projects that need to be tested from a diff. - -Does some things, spits out a list of projects. -""" - -from collections.abc import Set -import pathlib -import platform -import sys - -# This mapping lists out the dependencies for each project. These should be -# direct dependencies. The code will handle transitive dependencies. Some -# projects might have optional dependencies depending upon how they are built. -# The dependencies listed here should be the dependencies required for the -# configuration built/tested in the premerge CI. -PROJECT_DEPENDENCIES = { - "llvm": set(), - "clang": {"llvm"}, - "CIR": {"clang", "mlir"}, - "bolt": {"clang", "lld", "llvm"}, - "clang-tools-extra": {"clang", "llvm"}, - "compiler-rt": {"clang", "lld"}, - "libc": {"clang", "lld"}, - "openmp": {"clang", "lld"}, - "flang": {"llvm", "clang"}, - "lldb": {"llvm", "clang"}, - "libclc": {"llvm", "clang"}, - "lld": {"llvm"}, - "mlir": {"llvm"}, - "polly": {"llvm"}, -} - -# This mapping describes the additional projects that should be tested when a -# specific project is touched. We enumerate them specifically rather than -# just invert the dependencies list to give more control over what exactly is -# tested. -DEPENDENTS_TO_TEST = { - "llvm": { - "bolt", - "clang", - "clang-tools-extra", - "lld", - "lldb", - "mlir", - "polly", - "flang", - }, - "lld": {"bolt", "cross-project-tests"}, - "clang": {"clang-tools-extra", "cross-project-tests", "lldb"}, - "mlir": {"flang"}, - # Test everything if ci scripts are changed. - ".ci": { - "llvm", - "clang", - "CIR", - "lld", - "lldb", - "bolt", - "clang-tools-extra", - "mlir", - "polly", - "flang", - "libclc", - "openmp", - }, -} - -# This mapping describes runtimes that should be enabled for a specific project, -# but not necessarily run for testing. The only case of this currently is lldb -# which needs some runtimes enabled for tests. -DEPENDENT_RUNTIMES_TO_BUILD = {"lldb": {"libcxx", "libcxxabi", "libunwind"}} - -# This mapping describes runtimes that should be tested when the key project is -# touched. -DEPENDENT_RUNTIMES_TO_TEST = { - "clang": {"compiler-rt"}, - "clang-tools-extra": {"libc"}, - "libc": {"libc"}, - "compiler-rt": {"compiler-rt"}, - ".ci": {"compiler-rt", "libc"}, -} -DEPENDENT_RUNTIMES_TO_TEST_NEEDS_RECONFIG = { - "llvm": {"libcxx", "libcxxabi", "libunwind"}, - "clang": {"libcxx", "libcxxabi", "libunwind"}, - ".ci": {"libcxx", "libcxxabi", "libunwind"}, -} - -EXCLUDE_LINUX = { - "cross-project-tests", # TODO(issues/132796): Tests are failing. - "openmp", # https://github.com/google/llvm-premerge-checks/issues/410 -} - -EXCLUDE_WINDOWS = { - "cross-project-tests", # TODO(issues/132797): Tests are failing. - "compiler-rt", # TODO(issues/132798): Tests take excessive time. - "openmp", # TODO(issues/132799): Does not detect perl installation. - "libc", # No Windows Support. - "lldb", # TODO(issues/132800): Needs environment setup. - "bolt", # No Windows Support. - "libcxx", - "libcxxabi", - "libunwind", -} - -# These are projects that we should test if the project itself is changed but -# where testing is not yet stable enough for it to be enabled on changes to -# dependencies. -EXCLUDE_DEPENDENTS_WINDOWS = { - "flang", # TODO(issues/132803): Flang is not stable. -} - -EXCLUDE_MAC = { - "bolt", - "compiler-rt", - "cross-project-tests", - "flang", - "libc", - "lldb", - "openmp", - "polly", - "libcxx", - "libcxxabi", - "libunwind", -} - -PROJECT_CHECK_TARGETS = { - "clang-tools-extra": "check-clang-tools", - "compiler-rt": "check-compiler-rt", - "cross-project-tests": "check-cross-project", - "libcxx": "check-cxx", - "libcxxabi": "check-cxxabi", - "libunwind": "check-unwind", - "lldb": "check-lldb", - "llvm": "check-llvm", - "clang": "check-clang", - "CIR": "check-clang-cir", - "bolt": "check-bolt", - "lld": "check-lld", - "flang": "check-flang", - "libc": "check-libc", - "lld": "check-lld", - "lldb": "check-lldb", - "mlir": "check-mlir", - "openmp": "check-openmp", - "polly": "check-polly", -} - -RUNTIMES = {"libcxx", "libcxxabi", "libunwind", "compiler-rt", "libc"} - -# Meta projects are projects that need explicit handling but do not reside -# in their own top level folder. To add a meta project, the start of the path -# for the metaproject should be mapped to the name of the project below. -# Multiple paths can map to the same metaproject. -META_PROJECTS = { - ("clang", "lib", "CIR"): "CIR", - ("clang", "test", "CIR"): "CIR", - ("clang", "include", "clang", "CIR"): "CIR", - ("*", "docs"): "docs", - ("llvm", "utils", "gn"): "gn", - (".github", "workflows", "premerge.yaml"): ".ci", - ("third-party",): ".ci", -} - -# Projects that should not run any tests. These need to be metaprojects. -SKIP_PROJECTS = ["docs", "gn"] - - -def _add_dependencies(projects: Set[str], runtimes: Set[str]) -> Set[str]: - projects_with_dependents = set(projects) - current_projects_count = 0 - while current_projects_count != len(projects_with_dependents): - current_projects_count = len(projects_with_dependents) - for project in list(projects_with_dependents): - if project in PROJECT_DEPENDENCIES: - projects_with_dependents.update(PROJECT_DEPENDENCIES[project]) - for runtime in runtimes: - if runtime in PROJECT_DEPENDENCIES: - projects_with_dependents.update(PROJECT_DEPENDENCIES[runtime]) - return projects_with_dependents - - -def _exclude_projects(current_projects: Set[str], platform: str) -> Set[str]: - if platform == "Linux": - to_exclude = EXCLUDE_LINUX - elif platform == "Windows": - to_exclude = EXCLUDE_WINDOWS - elif platform == "Darwin": - to_exclude = EXCLUDE_MAC - else: - raise ValueError(f"Unexpected platform: {platform}") - return current_projects.difference(to_exclude) - - -def _compute_projects_to_test(modified_projects: Set[str], platform: str) -> Set[str]: - projects_to_test = set() - for modified_project in modified_projects: - if modified_project in RUNTIMES: - continue - # Skip all projects where we cannot run tests. - if modified_project in PROJECT_CHECK_TARGETS: - projects_to_test.add(modified_project) - if modified_project not in DEPENDENTS_TO_TEST: - continue - for dependent_project in DEPENDENTS_TO_TEST[modified_project]: - if ( - platform == "Windows" - and dependent_project in EXCLUDE_DEPENDENTS_WINDOWS - ): - continue - projects_to_test.add(dependent_project) - projects_to_test = _exclude_projects(projects_to_test, platform) - return projects_to_test - - -def _compute_projects_to_build( - projects_to_test: Set[str], runtimes: Set[str] -) -> Set[str]: - return _add_dependencies(projects_to_test, runtimes) - - -def _compute_project_check_targets(projects_to_test: Set[str]) -> Set[str]: - check_targets = set() - for project_to_test in projects_to_test: - if project_to_test in PROJECT_CHECK_TARGETS: - check_targets.add(PROJECT_CHECK_TARGETS[project_to_test]) - return check_targets - - -def _compute_runtimes_to_test(modified_projects: Set[str], platform: str) -> Set[str]: - runtimes_to_test = set() - for modified_project in modified_projects: - if modified_project in DEPENDENT_RUNTIMES_TO_TEST: - runtimes_to_test.update(DEPENDENT_RUNTIMES_TO_TEST[modified_project]) - return _exclude_projects(runtimes_to_test, platform) - - -def _compute_runtimes_to_test_needs_reconfig( - modified_projects: Set[str], platform: str -) -> Set[str]: - runtimes_to_test = set() - for modified_project in modified_projects: - if modified_project in DEPENDENT_RUNTIMES_TO_TEST_NEEDS_RECONFIG: - runtimes_to_test.update( - DEPENDENT_RUNTIMES_TO_TEST_NEEDS_RECONFIG[modified_project] - ) - return _exclude_projects(runtimes_to_test, platform) - - -def _compute_runtimes_to_build( - runtimes_to_test: Set[str], modified_projects: Set[str], platform: str -) -> Set[str]: - runtimes_to_build = set(runtimes_to_test) - for modified_project in modified_projects: - if modified_project in DEPENDENT_RUNTIMES_TO_BUILD: - runtimes_to_build.update(DEPENDENT_RUNTIMES_TO_BUILD[modified_project]) - return _exclude_projects(runtimes_to_build, platform) - - -def _path_matches(matcher: tuple[str], file_path: tuple[str]) -> bool: - if len(file_path) < len(matcher): - return False - for match_part, file_part in zip(matcher, file_path): - if match_part == "*" or file_part == "*": - continue - if match_part != file_part: - return False - return True - - -def _get_modified_projects_for_file(modified_file: str) -> Set[str]: - modified_projects = set() - path_parts = pathlib.Path(modified_file).parts - for meta_project_files in META_PROJECTS.keys(): - if _path_matches(meta_project_files, path_parts): - meta_project = META_PROJECTS[meta_project_files] - if meta_project in SKIP_PROJECTS: - return set() - modified_projects.add(meta_project) - modified_projects.add(pathlib.Path(modified_file).parts[0]) - return modified_projects - - -def _get_modified_projects(modified_files: list[str]) -> Set[str]: - modified_projects = set() - for modified_file in modified_files: - modified_projects.update(_get_modified_projects_for_file(modified_file)) - return modified_projects - - -def get_env_variables(modified_files: list[str], platform: str) -> Set[str]: - modified_projects = _get_modified_projects(modified_files) - projects_to_test = _compute_projects_to_test(modified_projects, platform) - runtimes_to_test = _compute_runtimes_to_test(modified_projects, platform) - runtimes_to_test_needs_reconfig = _compute_runtimes_to_test_needs_reconfig( - modified_projects, platform - ) - runtimes_to_build = _compute_runtimes_to_build( - runtimes_to_test | runtimes_to_test_needs_reconfig, modified_projects, platform - ) - projects_to_build = _compute_projects_to_build(projects_to_test, runtimes_to_build) - projects_check_targets = _compute_project_check_targets(projects_to_test) - runtimes_check_targets = _compute_project_check_targets(runtimes_to_test) - runtimes_check_targets_needs_reconfig = _compute_project_check_targets( - runtimes_to_test_needs_reconfig - ) - - # CIR is used as a pseudo-project in this script. It is built as part of the - # clang build, but it requires an explicit option to enable. We set that - # option here, and remove it from the projects_to_build list. - enable_cir = "ON" if "CIR" in projects_to_build else "OFF" - projects_to_build.discard("CIR") - - # We use a semicolon to separate the projects/runtimes as they get passed - # to the CMake invocation and thus we need to use the CMake list separator - # (;). We use spaces to separate the check targets as they end up getting - # passed to ninja. - return { - "projects_to_build": ";".join(sorted(projects_to_build)), - "project_check_targets": " ".join(sorted(projects_check_targets)), - "runtimes_to_build": ";".join(sorted(runtimes_to_build)), - "runtimes_check_targets": " ".join(sorted(runtimes_check_targets)), - "runtimes_check_targets_needs_reconfig": " ".join( - sorted(runtimes_check_targets_needs_reconfig) - ), - "enable_cir": enable_cir, - } - - -if __name__ == "__main__": - current_platform = platform.system() - if len(sys.argv) == 2: - current_platform = sys.argv[1] - env_variables = get_env_variables(sys.stdin.readlines(), current_platform) - for env_variable in env_variables: - print(f"{env_variable}='{env_variables[env_variable]}'") diff --git a/.ci/compute_projects_test.py b/.ci/compute_projects_test.py deleted file mode 100644 index 7d780b51ca5d1..0000000000000 --- a/.ci/compute_projects_test.py +++ /dev/null @@ -1,418 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Tests for compute_projects.py""" - -import unittest - -import compute_projects - - -class TestComputeProjects(unittest.TestCase): - def test_llvm(self): - env_variables = compute_projects.get_env_variables( - ["llvm/CMakeLists.txt"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "bolt;clang;clang-tools-extra;flang;lld;lldb;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-bolt check-clang check-clang-tools check-flang check-lld check-lldb check-llvm check-mlir check-polly", - ) - self.assertEqual( - env_variables["runtimes_to_build"], "libcxx;libcxxabi;libunwind" - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - - def test_llvm_windows(self): - env_variables = compute_projects.get_env_variables( - ["llvm/CMakeLists.txt"], "Windows" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;clang-tools-extra;lld;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-clang check-clang-tools check-lld check-llvm check-mlir check-polly", - ) - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual( - env_variables["runtimes_check_targets"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "", - ) - - def test_llvm_mac(self): - env_variables = compute_projects.get_env_variables( - ["llvm/CMakeLists.txt"], "Darwin" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;clang-tools-extra;lld;llvm;mlir", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-clang check-clang-tools check-lld check-llvm check-mlir", - ) - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual( - env_variables["runtimes_check_targets"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "", - ) - - def test_clang(self): - env_variables = compute_projects.get_env_variables( - ["clang/CMakeLists.txt"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;clang-tools-extra;lld;lldb;llvm", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-clang check-clang-tools check-lldb", - ) - self.assertEqual( - env_variables["runtimes_to_build"], "compiler-rt;libcxx;libcxxabi;libunwind" - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - self.assertEqual( - env_variables["enable_cir"], - "OFF", - ) - - def test_clang_windows(self): - env_variables = compute_projects.get_env_variables( - ["clang/CMakeLists.txt"], "Windows" - ) - self.assertEqual( - env_variables["projects_to_build"], "clang;clang-tools-extra;llvm" - ) - self.assertEqual( - env_variables["project_check_targets"], "check-clang check-clang-tools" - ) - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual( - env_variables["runtimes_check_targets"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "", - ) - self.assertEqual(env_variables["enable_cir"], "OFF") - - def test_compiler_rt(self): - env_variables = compute_projects.get_env_variables( - ["compiler-rt/lib/asan/asan_allocator.cpp"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;lld", - ) - self.assertEqual( - env_variables["project_check_targets"], - "", - ) - self.assertEqual(env_variables["runtimes_to_build"], "compiler-rt") - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "", - ) - self.assertEqual( - env_variables["enable_cir"], - "OFF", - ) - - def test_cir(self): - env_variables = compute_projects.get_env_variables( - ["clang/lib/CIR/CMakeLists.txt"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;clang-tools-extra;lld;lldb;llvm;mlir", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-clang check-clang-cir check-clang-tools check-lldb", - ) - self.assertEqual( - env_variables["runtimes_to_build"], "compiler-rt;libcxx;libcxxabi;libunwind" - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - self.assertEqual(env_variables["enable_cir"], "ON") - - def test_bolt(self): - env_variables = compute_projects.get_env_variables( - ["bolt/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "bolt;clang;lld;llvm") - self.assertEqual(env_variables["project_check_targets"], "check-bolt") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_lldb(self): - env_variables = compute_projects.get_env_variables( - ["lldb/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "clang;lldb;llvm") - self.assertEqual(env_variables["project_check_targets"], "check-lldb") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_mlir(self): - env_variables = compute_projects.get_env_variables( - ["mlir/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "clang;flang;llvm;mlir") - self.assertEqual( - env_variables["project_check_targets"], "check-flang check-mlir" - ) - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - self.assertEqual(env_variables["enable_cir"], "OFF") - - def test_flang(self): - env_variables = compute_projects.get_env_variables( - ["flang/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "clang;flang;llvm") - self.assertEqual(env_variables["project_check_targets"], "check-flang") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - self.assertEqual(env_variables["enable_cir"], "OFF") - - def test_invalid_subproject(self): - env_variables = compute_projects.get_env_variables( - ["llvm-libgcc/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_top_level_file(self): - env_variables = compute_projects.get_env_variables(["README.md"], "Linux") - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_exclude_libcxx_in_projects(self): - env_variables = compute_projects.get_env_variables( - ["libcxx/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_include_libc_in_runtimes(self): - env_variables = compute_projects.get_env_variables( - ["libc/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "clang;lld") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "libc") - self.assertEqual(env_variables["runtimes_check_targets"], "check-libc") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_exclude_docs(self): - env_variables = compute_projects.get_env_variables( - ["llvm/docs/CIBestPractices.rst"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_exclude_gn(self): - env_variables = compute_projects.get_env_variables( - ["llvm/utils/gn/build/BUILD.gn"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_ci(self): - env_variables = compute_projects.get_env_variables( - [".ci/compute_projects.py"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "bolt;clang;clang-tools-extra;flang;libclc;lld;lldb;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-bolt check-clang check-clang-cir check-clang-tools check-flang check-lld check-lldb check-llvm check-mlir check-polly", - ) - self.assertEqual( - env_variables["runtimes_to_build"], - "compiler-rt;libc;libcxx;libcxxabi;libunwind", - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt check-libc", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - - def test_windows_ci(self): - env_variables = compute_projects.get_env_variables( - [".ci/compute_projects.py"], "Windows" - ) - self.assertEqual( - env_variables["projects_to_build"], - "clang;clang-tools-extra;libclc;lld;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-clang check-clang-cir check-clang-tools check-lld check-llvm check-mlir check-polly", - ) - self.assertEqual( - env_variables["runtimes_to_build"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "", - ) - - def test_lldb(self): - env_variables = compute_projects.get_env_variables( - ["lldb/CMakeLists.txt"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "clang;lldb;llvm") - self.assertEqual(env_variables["project_check_targets"], "check-lldb") - self.assertEqual( - env_variables["runtimes_to_build"], "libcxx;libcxxabi;libunwind" - ) - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_clang_tools_extra(self): - env_variables = compute_projects.get_env_variables( - ["clang-tools-extra/CMakeLists.txt"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], "clang;clang-tools-extra;lld;llvm" - ) - self.assertEqual(env_variables["project_check_targets"], "check-clang-tools") - self.assertEqual(env_variables["runtimes_to_build"], "libc") - self.assertEqual(env_variables["runtimes_check_targets"], "check-libc") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_premerge_workflow(self): - env_variables = compute_projects.get_env_variables( - [".github/workflows/premerge.yaml"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "bolt;clang;clang-tools-extra;flang;libclc;lld;lldb;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-bolt check-clang check-clang-cir check-clang-tools check-flang check-lld check-lldb check-llvm check-mlir check-polly", - ) - self.assertEqual( - env_variables["runtimes_to_build"], - "compiler-rt;libc;libcxx;libcxxabi;libunwind", - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt check-libc", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - - def test_other_github_workflow(self): - env_variables = compute_projects.get_env_variables( - [".github/workflows/docs.yml"], "Linux" - ) - self.assertEqual(env_variables["projects_to_build"], "") - self.assertEqual(env_variables["project_check_targets"], "") - self.assertEqual(env_variables["runtimes_to_build"], "") - self.assertEqual(env_variables["runtimes_check_targets"], "") - self.assertEqual(env_variables["runtimes_check_targets_needs_reconfig"], "") - - def test_third_party_benchmark(self): - env_variables = compute_projects.get_env_variables( - ["third-party/benchmark/CMakeLists.txt"], "Linux" - ) - self.assertEqual( - env_variables["projects_to_build"], - "bolt;clang;clang-tools-extra;flang;libclc;lld;lldb;llvm;mlir;polly", - ) - self.assertEqual( - env_variables["project_check_targets"], - "check-bolt check-clang check-clang-cir check-clang-tools check-flang check-lld check-lldb check-llvm check-mlir check-polly", - ) - self.assertEqual( - env_variables["runtimes_to_build"], - "compiler-rt;libc;libcxx;libcxxabi;libunwind", - ) - self.assertEqual( - env_variables["runtimes_check_targets"], - "check-compiler-rt check-libc", - ) - self.assertEqual( - env_variables["runtimes_check_targets_needs_reconfig"], - "check-cxx check-cxxabi check-unwind", - ) - - -if __name__ == "__main__": - unittest.main() diff --git a/.ci/generate_test_report_github.py b/.ci/generate_test_report_github.py deleted file mode 100644 index 7242264723cbf..0000000000000 --- a/.ci/generate_test_report_github.py +++ /dev/null @@ -1,28 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Script to generate a build report for Github.""" - -import argparse -import platform - -import generate_test_report_lib - -PLATFORM_TITLES = { - "Windows": ":window: Windows x64 Test Results", - "Linux": ":penguin: Linux x64 Test Results", -} - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("return_code", help="The build's return code.", type=int) - parser.add_argument( - "build_test_logs", help="Paths to JUnit report files and ninja logs.", nargs="*" - ) - args = parser.parse_args() - - report = generate_test_report_lib.generate_report_from_files( - PLATFORM_TITLES[platform.system()], args.return_code, args.build_test_logs - ) - - print(report) diff --git a/.ci/generate_test_report_lib.py b/.ci/generate_test_report_lib.py deleted file mode 100644 index d868c08ab69ef..0000000000000 --- a/.ci/generate_test_report_lib.py +++ /dev/null @@ -1,269 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Library to parse JUnit XML files and return a markdown report.""" - -from junitparser import JUnitXml, Failure - -SEE_BUILD_FILE_STR = "Download the build's log file to see the details." -UNRELATED_FAILURES_STR = ( - "If these failures are unrelated to your changes (for example " - "tests are broken or flaky at HEAD), please open an issue at " - "https://github.com/llvm/llvm-project/issues and add the " - "`infrastructure` label." -) -# The maximum number of lines to pull from a ninja failure. -NINJA_LOG_SIZE_THRESHOLD = 500 - - -def _parse_ninja_log(ninja_log: list[str]) -> list[tuple[str, str]]: - """Parses an individual ninja log.""" - failures = [] - index = 0 - while index < len(ninja_log): - while index < len(ninja_log) and not ninja_log[index].startswith("FAILED:"): - index += 1 - if index == len(ninja_log): - # We hit the end of the log without finding a build failure, go to - # the next log. - return failures - # We are trying to parse cases like the following: - # - # [4/5] test/4.stamp - # FAILED: touch test/4.stamp - # touch test/4.stamp - # - # index will point to the line that starts with Failed:. The progress - # indicator is the line before this ([4/5] test/4.stamp) and contains a pretty - # printed version of the target being built (test/4.stamp). We use this line - # and remove the progress information to get a succinct name for the target. - failing_action = ninja_log[index - 1].split("] ")[1] - failure_log = [] - while ( - index < len(ninja_log) - and not ninja_log[index].startswith("[") - and not ninja_log[index].startswith("ninja: build stopped:") - and len(failure_log) < NINJA_LOG_SIZE_THRESHOLD - ): - failure_log.append(ninja_log[index]) - index += 1 - failures.append((failing_action, "\n".join(failure_log))) - return failures - - -def find_failure_in_ninja_logs(ninja_logs: list[list[str]]) -> list[tuple[str, str]]: - """Extracts failure messages from ninja output. - - This function takes stdout/stderr from ninja in the form of a list of files - represented as a list of lines. This function then returns tuples containing - the name of the target and the error message. - - Args: - ninja_logs: A list of files in the form of a list of lines representing the log - files captured from ninja. - - Returns: - A list of tuples. The first string is the name of the target that failed. The - second string is the error message. - """ - failures = [] - for ninja_log in ninja_logs: - log_failures = _parse_ninja_log(ninja_log) - failures.extend(log_failures) - return failures - - -def _format_ninja_failures(ninja_failures: list[tuple[str, str]]) -> list[str]: - """Formats ninja failures into summary views for the report.""" - output = [] - for build_failure in ninja_failures: - failed_action, failure_message = build_failure - output.extend( - [ - "
", - f"{failed_action}", - "", - "```", - failure_message, - "```", - "
", - ] - ) - return output - - -# Set size_limit to limit the byte size of the report. The default is 1MB as this -# is the most that can be put into an annotation. If the generated report exceeds -# this limit and failures are listed, it will be generated again without failures -# listed. This minimal report will always fit into an annotation. -# If include failures is False, total number of test will be reported but their names -# and output will not be. -def generate_report( - title, - return_code, - junit_objects, - ninja_logs: list[list[str]], - size_limit=1024 * 1024, - list_failures=True, -): - failures = {} - tests_run = 0 - tests_skipped = 0 - tests_failed = 0 - - for results in junit_objects: - for testsuite in results: - tests_run += testsuite.tests - tests_skipped += testsuite.skipped - tests_failed += testsuite.failures - - for test in testsuite: - if ( - not test.is_passed - and test.result - and isinstance(test.result[0], Failure) - ): - if failures.get(testsuite.name) is None: - failures[testsuite.name] = [] - failures[testsuite.name].append( - (test.classname + "/" + test.name, test.result[0].text) - ) - - report = [f"# {title}", ""] - - if tests_run == 0: - if return_code == 0: - report.extend( - [ - "The build succeeded and no tests ran. This is expected in some " - "build configurations." - ] - ) - else: - ninja_failures = find_failure_in_ninja_logs(ninja_logs) - if not ninja_failures: - report.extend( - [ - "The build failed before running any tests. Detailed " - "information about the build failure could not be " - "automatically obtained.", - "", - SEE_BUILD_FILE_STR, - "", - UNRELATED_FAILURES_STR, - ] - ) - else: - report.extend( - [ - "The build failed before running any tests. Click on a " - "failure below to see the details.", - "", - ] - ) - report.extend(_format_ninja_failures(ninja_failures)) - report.extend( - [ - "", - UNRELATED_FAILURES_STR, - ] - ) - return "\n".join(report) - - tests_passed = tests_run - tests_skipped - tests_failed - - def plural(num_tests): - return "test" if num_tests == 1 else "tests" - - if tests_passed: - report.append(f"* {tests_passed} {plural(tests_passed)} passed") - if tests_skipped: - report.append(f"* {tests_skipped} {plural(tests_skipped)} skipped") - if tests_failed: - report.append(f"* {tests_failed} {plural(tests_failed)} failed") - - if not list_failures: - report.extend( - [ - "", - "Failed tests and their output was too large to report. " - + SEE_BUILD_FILE_STR, - ] - ) - elif failures: - report.extend( - ["", "## Failed Tests", "(click on a test name to see its output)"] - ) - - for testsuite_name, failures in failures.items(): - report.extend(["", f"### {testsuite_name}"]) - for name, output in failures: - report.extend( - [ - "
", - f"{name}", - "", - "```", - output, - "```", - "
", - ] - ) - elif return_code != 0: - # No tests failed but the build was in a failed state. Bring this to the user's - # attention. - ninja_failures = find_failure_in_ninja_logs(ninja_logs) - if not ninja_failures: - report.extend( - [ - "", - "All tests passed but another part of the build **failed**. " - "Information about the build failure could not be automatically " - "obtained.", - "", - SEE_BUILD_FILE_STR, - ] - ) - else: - report.extend( - [ - "", - "All tests passed but another part of the build **failed**. Click on " - "a failure below to see the details.", - "", - ] - ) - report.extend(_format_ninja_failures(ninja_failures)) - - if failures or return_code != 0: - report.extend(["", UNRELATED_FAILURES_STR]) - - report = "\n".join(report) - if len(report.encode("utf-8")) > size_limit: - return generate_report( - title, - return_code, - junit_objects, - size_limit, - list_failures=False, - ) - - return report - - -def generate_report_from_files(title, return_code, build_log_files): - junit_files = [ - junit_file for junit_file in build_log_files if junit_file.endswith(".xml") - ] - ninja_log_files = [ - ninja_log for ninja_log in build_log_files if ninja_log.endswith(".log") - ] - ninja_logs = [] - for ninja_log_file in ninja_log_files: - with open(ninja_log_file, "r") as ninja_log_file_handle: - ninja_logs.append( - [log_line.strip() for log_line in ninja_log_file_handle.readlines()] - ) - return generate_report( - title, return_code, [JUnitXml.fromfile(p) for p in junit_files], ninja_logs - ) diff --git a/.ci/generate_test_report_lib_test.py b/.ci/generate_test_report_lib_test.py deleted file mode 100644 index 466a8234776dc..0000000000000 --- a/.ci/generate_test_report_lib_test.py +++ /dev/null @@ -1,778 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# To run these tests: -# python -m unittest generate_test_report_lib_test.py - -import unittest -from io import StringIO -from textwrap import dedent -import tempfile -import os - -from junitparser import JUnitXml - -import generate_test_report_lib - - -def junit_from_xml(xml): - return JUnitXml.fromfile(StringIO(xml)) - - -class TestReports(unittest.TestCase): - def test_find_failure_ninja_logs(self): - failures = generate_test_report_lib.find_failure_in_ninja_logs( - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: touch test/4.stamp", - "Wow! This system is really broken!", - "[5/5] test/5.stamp", - ], - ] - ) - self.assertEqual(len(failures), 1) - self.assertEqual( - failures[0], - ( - "test/4.stamp", - dedent( - """\ - FAILED: touch test/4.stamp - Wow! This system is really broken!""" - ), - ), - ) - - def test_no_failure_ninja_log(self): - failures = generate_test_report_lib.find_failure_in_ninja_logs( - [ - [ - "[1/3] test/1.stamp", - "[2/3] test/2.stamp", - "[3/3] test/3.stamp", - ] - ] - ) - self.assertEqual(failures, []) - - def test_ninja_log_end(self): - failures = generate_test_report_lib.find_failure_in_ninja_logs( - [ - [ - "[1/3] test/1.stamp", - "[2/3] test/2.stamp", - "[3/3] test/3.stamp", - "FAILED: touch test/3.stamp", - "Wow! This system is really broken!", - "ninja: build stopped: subcommand failed.", - ] - ] - ) - self.assertEqual(len(failures), 1) - self.assertEqual( - failures[0], - ( - "test/3.stamp", - dedent( - """\ - FAILED: touch test/3.stamp - Wow! This system is really broken!""" - ), - ), - ) - - def test_ninja_log_multiple_failures(self): - failures = generate_test_report_lib.find_failure_in_ninja_logs( - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "FAILED: touch test/2.stamp", - "Wow! This system is really broken!", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: touch test/4.stamp", - "Wow! This system is maybe broken!", - "[5/5] test/5.stamp", - ] - ] - ) - self.assertEqual(len(failures), 2) - self.assertEqual( - failures[0], - ( - "test/2.stamp", - dedent( - """\ - FAILED: touch test/2.stamp - Wow! This system is really broken!""" - ), - ), - ) - self.assertEqual( - failures[1], - ( - "test/4.stamp", - dedent( - """\ - FAILED: touch test/4.stamp - Wow! This system is maybe broken!""" - ), - ), - ) - - def test_title_only(self): - self.assertEqual( - generate_test_report_lib.generate_report("Foo", 0, [], []), - dedent( - """\ - # Foo - - The build succeeded and no tests ran. This is expected in some build configurations.""" - ), - ) - - def test_title_only_failure(self): - self.assertEqual( - generate_test_report_lib.generate_report("Foo", 1, [], []), - dedent( - """\ - # Foo - - The build failed before running any tests. Detailed information about the build failure could not be automatically obtained. - - Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ), - ) - - def test_title_only_failure_ninja_log(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [], - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: test/4.stamp", - "touch test/4.stamp", - "Wow! Risk!", - "[5/5] test/5.stamp", - ] - ], - ), - dedent( - """\ - # Foo - - The build failed before running any tests. Click on a failure below to see the details. - -
- test/4.stamp - - ``` - FAILED: test/4.stamp - touch test/4.stamp - Wow! Risk! - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ), - ) - - def test_no_tests_in_testsuite(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - """ - ) - ) - ], - [], - ), - dedent( - """\ - # Foo - - The build failed before running any tests. Detailed information about the build failure could not be automatically obtained. - - Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ), - ) - - def test_no_failures(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 0, - [ - junit_from_xml( - dedent( - """\ - - - - - - """ - ) - ) - ], - [], - ), - ( - dedent( - """\ - # Foo - - * 1 test passed""" - ) - ), - ) - - def test_no_failures_build_failed(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - """ - ) - ) - ], - [], - ), - ( - dedent( - """\ - # Foo - - * 1 test passed - - All tests passed but another part of the build **failed**. Information about the build failure could not be automatically obtained. - - Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_no_failures_build_failed_ninja_log(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - """ - ) - ) - ], - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: test/4.stamp", - "touch test/4.stamp", - "Wow! Close To You!", - "[5/5] test/5.stamp", - ] - ], - ), - ( - dedent( - """\ - # Foo - - * 1 test passed - - All tests passed but another part of the build **failed**. Click on a failure below to see the details. - -
- test/4.stamp - - ``` - FAILED: test/4.stamp - touch test/4.stamp - Wow! Close To You! - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_no_failures_multiple_build_failed_ninja_log(self): - test = generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - """ - ) - ) - ], - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "FAILED: touch test/2.stamp", - "Wow! Be Kind!", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: touch test/4.stamp", - "Wow! I Dare You!", - "[5/5] test/5.stamp", - ] - ], - ) - print(test) - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - """ - ) - ) - ], - [ - [ - "[1/5] test/1.stamp", - "[2/5] test/2.stamp", - "FAILED: touch test/2.stamp", - "Wow! Be Kind!", - "[3/5] test/3.stamp", - "[4/5] test/4.stamp", - "FAILED: touch test/4.stamp", - "Wow! I Dare You!", - "[5/5] test/5.stamp", - ] - ], - ), - ( - dedent( - """\ - # Foo - - * 1 test passed - - All tests passed but another part of the build **failed**. Click on a failure below to see the details. - -
- test/2.stamp - - ``` - FAILED: touch test/2.stamp - Wow! Be Kind! - ``` -
-
- test/4.stamp - - ``` - FAILED: touch test/4.stamp - Wow! I Dare You! - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_report_single_file_single_testsuite(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - - - - - - - - """ - ) - ) - ], - [], - ), - ( - dedent( - """\ - # Foo - - * 1 test passed - * 1 test skipped - * 2 tests failed - - ## Failed Tests - (click on a test name to see its output) - - ### Bar -
- Bar/test_3/test_3 - - ``` - Output goes here - ``` -
-
- Bar/test_4/test_4 - - ``` - Other output goes here - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - MULTI_SUITE_OUTPUT = dedent( - """\ - # ABC and DEF - - * 1 test passed - * 1 test skipped - * 2 tests failed - - ## Failed Tests - (click on a test name to see its output) - - ### ABC -
- ABC/test_2/test_2 - - ``` - ABC/test_2 output goes here - ``` -
- - ### DEF -
- DEF/test_2/test_2 - - ``` - DEF/test_2 output goes here - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - - def test_report_single_file_multiple_testsuites(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "ABC and DEF", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - - - - - - - - - - """ - ) - ) - ], - [], - ), - self.MULTI_SUITE_OUTPUT, - ) - - def test_report_multiple_files_multiple_testsuites(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "ABC and DEF", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - - """ - ) - ), - junit_from_xml( - dedent( - """\ - - - - - - - - - - - """ - ) - ), - ], - [], - ), - self.MULTI_SUITE_OUTPUT, - ) - - def test_report_dont_list_failures(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - """ - ) - ) - ], - [], - list_failures=False, - ), - ( - dedent( - """\ - # Foo - - * 1 test failed - - Failed tests and their output was too large to report. Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_report_dont_list_failures_link_to_log(self): - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - """ - ) - ) - ], - [], - list_failures=False, - ), - ( - dedent( - """\ - # Foo - - * 1 test failed - - Failed tests and their output was too large to report. Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_report_size_limit(self): - test_output = "f" * 1000 - self.assertEqual( - generate_test_report_lib.generate_report( - "Foo", - 1, - [ - junit_from_xml( - dedent( - """\ - - - - - - - - """.format( - output=test_output - ) - ) - ) - ], - [], - size_limit=512, - ), - ( - dedent( - """\ - # Foo - - * 1 test failed - - Failed tests and their output was too large to report. Download the build's log file to see the details. - - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ) - ), - ) - - def test_generate_report_end_to_end(self): - with tempfile.TemporaryDirectory() as temp_dir: - junit_xml_file = os.path.join(temp_dir, "junit.xml") - with open(junit_xml_file, "w") as junit_xml_handle: - junit_xml_handle.write( - dedent( - """\ - - - - - - """ - ) - ) - ninja_log_file = os.path.join(temp_dir, "ninja.log") - with open(ninja_log_file, "w") as ninja_log_handle: - ninja_log_handle.write( - dedent( - """\ - [1/5] test/1.stamp - [2/5] test/2.stamp - [3/5] test/3.stamp - [4/5] test/4.stamp - FAILED: test/4.stamp - touch test/4.stamp - Wow! That's so True! - [5/5] test/5.stamp""" - ) - ) - self.assertEqual( - generate_test_report_lib.generate_report_from_files( - "Foo", 1, [junit_xml_file, ninja_log_file] - ), - dedent( - """\ - # Foo - - * 1 test passed - - All tests passed but another part of the build **failed**. Click on a failure below to see the details. - -
- test/4.stamp - - ``` - FAILED: test/4.stamp - touch test/4.stamp - Wow! That's so True! - ``` -
- - If these failures are unrelated to your changes (for example tests are broken or flaky at HEAD), please open an issue at https://github.com/llvm/llvm-project/issues and add the `infrastructure` label.""" - ), - ) diff --git a/.ci/metrics/Dockerfile b/.ci/metrics/Dockerfile deleted file mode 100644 index 80f1c64e3e61b..0000000000000 --- a/.ci/metrics/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM docker.io/python:3.12 - -COPY requirements.lock.txt ./ -RUN pip3 install --no-cache-dir -r requirements.lock.txt -COPY metrics.py ./ - -CMD ["python3", "metrics.py"] diff --git a/.ci/metrics/metrics.py b/.ci/metrics/metrics.py deleted file mode 100644 index 26fdeef1913ab..0000000000000 --- a/.ci/metrics/metrics.py +++ /dev/null @@ -1,327 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Collects Github metrics and uploads them to Grafana. - -This script contains machinery that will pull metrics periodically from Github -about workflow runs. It will upload the collected metrics to the specified -Grafana instance. -""" - -import collections -import datetime -import github -import logging -import os -import requests -import time - -from dataclasses import dataclass -from github import Auth -from github import Github - -GRAFANA_URL = ( - "https://influx-prod-13-prod-us-east-0.grafana.net/api/v1/push/influx/write" -) -SCRAPE_INTERVAL_SECONDS = 5 * 60 - -# Lists the Github workflows we want to track. Maps the Github job name to -# the metric name prefix in grafana. -# This metric name is also used as a key in the job->name map. -GITHUB_WORKFLOW_TO_TRACK = {"CI Checks": "github_llvm_premerge_checks"} - -# Lists the Github jobs to track for a given workflow. The key is the stable -# name (metric name) of the workflow (see GITHUB_WORKFLOW_TO_TRACK). -# Each value is a map to link the github job name to the corresponding metric -# name. -GITHUB_JOB_TO_TRACK = { - "github_llvm_premerge_checks": { - "Build and Test Linux": "premerge_linux", - "Build and Test Windows": "premerge_windows", - } -} - -# The number of workflows to pull when sampling Github workflows. -# - Github API filtering is broken: we cannot apply any filtering: -# - See https://github.com/orgs/community/discussions/86766 -# - A workflow can complete before another workflow, even when starting later. -# - We don't want to sample the same workflow twice. -# -# This means we essentially have a list of workflows sorted by creation date, -# and that's all we can deduce from it. So for each iteration, we'll blindly -# process the last N workflows. -GITHUB_WORKFLOWS_MAX_PROCESS_COUNT = 2000 -# Second reason for the cut: reaching a workflow older than X. -# This means we will miss long-tails (exceptional jobs running for more than -# X hours), but that's also the case with the count cutoff above. -# Only solution to avoid missing any workflow would be to process the complete -# list, which is not possible. -GITHUB_WORKFLOW_MAX_CREATED_AGE_HOURS = 8 - -# Grafana will fail to insert any metric older than ~2 hours (value determined -# by trial and error). -GRAFANA_METRIC_MAX_AGE_MN = 120 - - -@dataclass -class JobMetrics: - job_name: str - queue_time: int - run_time: int - status: int - completed_at_ns: int - workflow_id: int - workflow_name: str - - -@dataclass -class GaugeMetric: - name: str - value: int - time_ns: int - - -def github_get_metrics( - github_repo: github.Repository, last_workflows_seen_as_completed: set[int] -) -> tuple[list[JobMetrics], int]: - """Gets the metrics for specified Github workflows. - - This function takes in a list of workflows to track, and optionally the - workflow ID of the last tracked invocation. It grabs the relevant data - from Github, returning it to the caller. - If the last_seen_workflow parameter is None, this returns no metrics, but - returns the id of the most recent workflow. - - Args: - github_repo: A github repo object to use to query the relevant information. - last_seen_workflow: the last workflow this function processed. - - Returns: - Returns a tuple with 2 elements: - - a list of JobMetrics objects, one per processed job. - - the ID of the most recent processed workflow run. - """ - workflow_metrics = [] - queued_count = collections.Counter() - running_count = collections.Counter() - - # Initialize all the counters to 0 so we report 0 when no job is queued - # or running. - for wf_name, wf_metric_name in GITHUB_WORKFLOW_TO_TRACK.items(): - for job_name, job_metric_name in GITHUB_JOB_TO_TRACK[wf_metric_name].items(): - queued_count[wf_metric_name + "_" + job_metric_name] = 0 - running_count[wf_metric_name + "_" + job_metric_name] = 0 - - # The list of workflows this iteration will process. - # MaxSize = GITHUB_WORKFLOWS_MAX_PROCESS_COUNT - workflow_seen_as_completed = set() - - # Since we process a fixed count of workflows, we want to know when - # the depth is too small and if we miss workflows. - # E.g.: is there was more than N workflows int last 2 hours. - # To monitor this, we'll log the age of the oldest workflow processed, - # and setup alterting in Grafana to help us adjust this depth. - oldest_seen_workflow_age_mn = None - - # Do not apply any filters to this query. - # See https://github.com/orgs/community/discussions/86766 - # Applying filters like `status=completed` will break pagination, and - # return a non-sorted and incomplete list of workflows. - i = 0 - for task in iter(github_repo.get_workflow_runs()): - # Max depth reached, stopping. - if i >= GITHUB_WORKFLOWS_MAX_PROCESS_COUNT: - break - i += 1 - - workflow_age_mn = ( - datetime.datetime.now(datetime.timezone.utc) - task.created_at - ).total_seconds() / 60 - oldest_seen_workflow_age_mn = workflow_age_mn - # If we reach a workflow older than X, stop. - if workflow_age_mn > GITHUB_WORKFLOW_MAX_CREATED_AGE_HOURS * 60: - break - - # This workflow is not interesting to us. - if task.name not in GITHUB_WORKFLOW_TO_TRACK: - continue - - if task.status == "completed": - workflow_seen_as_completed.add(task.id) - - # This workflow has already been seen completed in the previous run. - if task.id in last_workflows_seen_as_completed: - continue - - name_prefix = GITHUB_WORKFLOW_TO_TRACK[task.name] - for job in task.jobs(): - # This job is not interesting to us. - if job.name not in GITHUB_JOB_TO_TRACK[name_prefix]: - continue - - name_suffix = GITHUB_JOB_TO_TRACK[name_prefix][job.name] - metric_name = name_prefix + "_" + name_suffix - - if task.status != "completed": - if job.status == "queued": - queued_count[metric_name] += 1 - elif job.status == "in_progress": - running_count[metric_name] += 1 - continue - - job_result = int(job.conclusion == "success" or job.conclusion == "skipped") - - created_at = job.created_at - started_at = job.started_at - completed_at = job.completed_at - - # GitHub API can return results where the started_at is slightly - # later then the created_at (or completed earlier than started). - # This would cause a -23h59mn delta, which will show up as +24h - # queue/run time on grafana. - if started_at < created_at: - logging.info( - "Workflow {} started before being created.".format(task.id) - ) - queue_time = datetime.timedelta(seconds=0) - else: - queue_time = started_at - created_at - if completed_at < started_at: - logging.info("Workflow {} finished before starting.".format(task.id)) - run_time = datetime.timedelta(seconds=0) - else: - run_time = completed_at - started_at - - if run_time.seconds == 0: - continue - - # Grafana will refuse to ingest metrics older than ~2 hours, so we - # should avoid sending historical data. - metric_age_mn = ( - datetime.datetime.now(datetime.timezone.utc) - completed_at - ).total_seconds() / 60 - if metric_age_mn > GRAFANA_METRIC_MAX_AGE_MN: - logging.warning( - f"Job {job.id} from workflow {task.id} dropped due" - + f" to staleness: {metric_age_mn}mn old." - ) - continue - - logging.info(f"Adding a job metric for job {job.id} in workflow {task.id}") - # The timestamp associated with the event is expected by Grafana to be - # in nanoseconds. - completed_at_ns = int(completed_at.timestamp()) * 10**9 - workflow_metrics.append( - JobMetrics( - metric_name, - queue_time.seconds, - run_time.seconds, - job_result, - completed_at_ns, - task.id, - task.name, - ) - ) - - for name, value in queued_count.items(): - workflow_metrics.append( - GaugeMetric(f"workflow_queue_size_{name}", value, time.time_ns()) - ) - for name, value in running_count.items(): - workflow_metrics.append( - GaugeMetric(f"running_workflow_count_{name}", value, time.time_ns()) - ) - - # Always send a hearbeat metric so we can monitor is this container is still able to log to Grafana. - workflow_metrics.append( - GaugeMetric("metrics_container_heartbeat", 1, time.time_ns()) - ) - - # Log the oldest workflow we saw, allowing us to monitor if the processing - # depth is correctly set-up. - if oldest_seen_workflow_age_mn is not None: - workflow_metrics.append( - GaugeMetric( - "github_oldest_processed_workflow_mn", - oldest_seen_workflow_age_mn, - time.time_ns(), - ) - ) - return workflow_metrics, workflow_seen_as_completed - - -def upload_metrics(workflow_metrics, metrics_userid, api_key): - """Upload metrics to Grafana. - - Takes in a list of workflow metrics and then uploads them to Grafana - through a REST request. - - Args: - workflow_metrics: A list of metrics to upload to Grafana. - metrics_userid: The userid to use for the upload. - api_key: The API key to use for the upload. - """ - - if len(workflow_metrics) == 0: - logging.info("No metrics found to upload.") - return - - metrics_batch = [] - for workflow_metric in workflow_metrics: - if isinstance(workflow_metric, GaugeMetric): - name = workflow_metric.name.lower().replace(" ", "_") - metrics_batch.append( - f"{name} value={workflow_metric.value} {workflow_metric.time_ns}" - ) - elif isinstance(workflow_metric, JobMetrics): - name = workflow_metric.job_name.lower().replace(" ", "_") - metrics_batch.append( - f"{name} queue_time={workflow_metric.queue_time},run_time={workflow_metric.run_time},status={workflow_metric.status} {workflow_metric.completed_at_ns}" - ) - else: - raise ValueError( - f"Unsupported object type {type(workflow_metric)}: {str(workflow_metric)}" - ) - - request_data = "\n".join(metrics_batch) - response = requests.post( - GRAFANA_URL, - headers={"Content-Type": "text/plain"}, - data=request_data, - auth=(metrics_userid, api_key), - ) - - if response.status_code < 200 or response.status_code >= 300: - logging.info(f"Failed to submit data to Grafana: {response.status_code}") - - -def main(): - # Authenticate with Github - github_auth = Auth.Token(os.environ["GITHUB_TOKEN"]) - grafana_api_key = os.environ["GRAFANA_API_KEY"] - grafana_metrics_userid = os.environ["GRAFANA_METRICS_USERID"] - - # The last workflow this script processed. - # Because the Github queries are broken, we'll simply log a 'processed' - # bit for the last COUNT_TO_PROCESS workflows. - gh_last_workflows_seen_as_completed = set() - - # Enter the main loop. Every five minutes we wake up and dump metrics for - # the relevant jobs. - while True: - github_object = Github(auth=github_auth) - github_repo = github_object.get_repo("llvm/llvm-project") - - gh_metrics, gh_last_workflows_seen_as_completed = github_get_metrics( - github_repo, gh_last_workflows_seen_as_completed - ) - - upload_metrics(gh_metrics, grafana_metrics_userid, grafana_api_key) - logging.info(f"Uploaded {len(gh_metrics)} metrics") - - time.sleep(SCRAPE_INTERVAL_SECONDS) - - -if __name__ == "__main__": - logging.basicConfig(level=logging.INFO) - main() diff --git a/.ci/metrics/metrics_test.py b/.ci/metrics/metrics_test.py deleted file mode 100644 index 259e55f817939..0000000000000 --- a/.ci/metrics/metrics_test.py +++ /dev/null @@ -1,75 +0,0 @@ -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -"""Tests for metrics.py""" - -from dataclasses import dataclass -import requests -import unittest -import unittest.mock - -import metrics - - -class TestMetrics(unittest.TestCase): - def test_upload_gauge_metric(self): - """Test that we can upload a gauge metric correctly. - - Also verify that we pass around parameters like API keys and user IDs - correctly to the HTTP POST request. - """ - test_metrics = [metrics.GaugeMetric("gauge_test", 5, 1000)] - return_value = requests.Response() - return_value.status_code = 204 - with unittest.mock.patch( - "requests.post", return_value=return_value - ) as post_mock: - metrics.upload_metrics(test_metrics, "test_userid", "test_api_key") - self.assertSequenceEqual(post_mock.call_args.args, [metrics.GRAFANA_URL]) - self.assertEqual( - post_mock.call_args.kwargs["data"], "gauge_test value=5 1000" - ) - self.assertEqual( - post_mock.call_args.kwargs["auth"], ("test_userid", "test_api_key") - ) - - def test_upload_job_metric(self): - """Test that we can upload a job metric correctly.""" - test_metrics = [ - metrics.JobMetrics("test_job", 5, 10, 1, 1000, 7, "test_workflow") - ] - return_value = requests.Response() - return_value.status_code = 204 - with unittest.mock.patch( - "requests.post", return_value=return_value - ) as post_mock: - metrics.upload_metrics(test_metrics, "test_userid", "test_aoi_key") - self.assertEqual( - post_mock.call_args.kwargs["data"], - "test_job queue_time=5,run_time=10,status=1 1000", - ) - - def test_upload_unknown_metric(self): - """Test we report an error if we encounter an unknown metric type.""" - - @dataclass - class FakeMetric: - fake_data: str - - test_metrics = [FakeMetric("test")] - - with self.assertRaises(ValueError): - metrics.upload_metrics(test_metrics, "test_userid", "test_api_key") - - def test_bad_response_code(self): - """Test that we gracefully handle HTTP response errors.""" - test_metrics = [metrics.GaugeMetric("gauge_test", 5, 1000)] - return_value = requests.Response() - return_value.status_code = 403 - # Just assert that we continue running here and do not raise anything. - with unittest.mock.patch("requests.post", return_value=return_value) as _: - metrics.upload_metrics(test_metrics, "test_userid", "test_api_key") - - -if __name__ == "__main__": - unittest.main() diff --git a/.ci/metrics/requirements.lock.txt b/.ci/metrics/requirements.lock.txt deleted file mode 100644 index 0cb2b446d9fd3..0000000000000 --- a/.ci/metrics/requirements.lock.txt +++ /dev/null @@ -1,347 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# pip-compile --generate-hashes --output-file=./requirements.lock.txt ./requirements.txt -# -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via - # cryptography - # pynacl -charset-normalizer==3.4.0 \ - --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ - --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ - --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ - --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ - --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ - --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ - --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ - --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ - --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ - --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ - --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ - --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ - --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ - --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ - --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ - --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ - --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ - --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ - --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ - --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ - --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ - --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ - --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ - --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ - --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ - --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ - --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ - --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ - --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ - --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ - --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ - --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ - --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ - --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ - --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ - --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ - --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ - --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ - --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ - --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ - --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ - --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ - --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ - --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ - --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ - --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ - --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ - --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ - --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ - --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ - --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ - --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ - --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ - --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ - --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ - --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ - --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ - --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ - --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ - --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ - --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ - --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ - --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ - --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ - --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ - --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ - --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ - --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ - --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ - --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ - --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ - --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ - --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ - --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ - --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ - --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ - --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ - --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ - --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ - --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ - --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ - --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ - --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ - --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ - --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ - --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ - --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ - --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ - --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ - --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ - --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ - --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ - --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ - --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ - --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ - --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ - --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ - --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ - --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ - --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ - --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ - --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ - --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ - --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ - --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 - # via requests -cryptography==44.0.1 \ - --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \ - --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \ - --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \ - --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \ - --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \ - --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \ - --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \ - --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \ - --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \ - --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \ - --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \ - --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \ - --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \ - --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \ - --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \ - --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \ - --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \ - --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \ - --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \ - --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \ - --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \ - --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \ - --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \ - --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \ - --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \ - --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \ - --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \ - --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \ - --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \ - --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \ - --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00 - # via pyjwt -deprecated==1.2.15 \ - --hash=sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320 \ - --hash=sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d - # via pygithub -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygithub==2.5.0 \ - --hash=sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2 \ - --hash=sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf - # via -r ./requirements.txt -pyjwt[crypto]==2.10.1 \ - --hash=sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953 \ - --hash=sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb - # via pygithub -pynacl==1.5.0 \ - --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ - --hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \ - --hash=sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93 \ - --hash=sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1 \ - --hash=sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92 \ - --hash=sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff \ - --hash=sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba \ - --hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \ - --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ - --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 - # via pygithub -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via -r ./requirements.txt -requests==2.32.4 \ - --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ - --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 - # via pygithub -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via python-dateutil -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via pygithub -urllib3==2.5.0 \ - --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \ - --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc - # via - # pygithub - # requests -wrapt==1.16.0 \ - --hash=sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc \ - --hash=sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81 \ - --hash=sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 \ - --hash=sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e \ - --hash=sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca \ - --hash=sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0 \ - --hash=sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb \ - --hash=sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487 \ - --hash=sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40 \ - --hash=sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c \ - --hash=sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060 \ - --hash=sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202 \ - --hash=sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41 \ - --hash=sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9 \ - --hash=sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b \ - --hash=sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664 \ - --hash=sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d \ - --hash=sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362 \ - --hash=sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00 \ - --hash=sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc \ - --hash=sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1 \ - --hash=sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267 \ - --hash=sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956 \ - --hash=sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966 \ - --hash=sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 \ - --hash=sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228 \ - --hash=sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72 \ - --hash=sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d \ - --hash=sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292 \ - --hash=sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0 \ - --hash=sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0 \ - --hash=sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36 \ - --hash=sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c \ - --hash=sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5 \ - --hash=sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f \ - --hash=sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 \ - --hash=sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b \ - --hash=sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2 \ - --hash=sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593 \ - --hash=sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39 \ - --hash=sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 \ - --hash=sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf \ - --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf \ - --hash=sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 \ - --hash=sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c \ - --hash=sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c \ - --hash=sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f \ - --hash=sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440 \ - --hash=sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465 \ - --hash=sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136 \ - --hash=sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b \ - --hash=sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8 \ - --hash=sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3 \ - --hash=sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8 \ - --hash=sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6 \ - --hash=sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e \ - --hash=sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f \ - --hash=sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c \ - --hash=sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e \ - --hash=sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 \ - --hash=sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2 \ - --hash=sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020 \ - --hash=sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35 \ - --hash=sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d \ - --hash=sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3 \ - --hash=sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537 \ - --hash=sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809 \ - --hash=sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d \ - --hash=sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a \ - --hash=sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4 - # via deprecated diff --git a/.ci/metrics/requirements.txt b/.ci/metrics/requirements.txt deleted file mode 100644 index 91c9c317a7e46..0000000000000 --- a/.ci/metrics/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -pygithub==2.5.0 -python-dateutil==2.9.0.post0 diff --git a/.ci/monolithic-linux.sh b/.ci/monolithic-linux.sh deleted file mode 100755 index 75729b3fd5f6a..0000000000000 --- a/.ci/monolithic-linux.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env bash -#===----------------------------------------------------------------------===## -# -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -#===----------------------------------------------------------------------===## - -# -# This script performs a monolithic build of the monorepo and runs the tests of -# most projects on Linux. This should be replaced by per-project scripts that -# run only the relevant tests. -# - -source .ci/utils.sh - -INSTALL_DIR="${BUILD_DIR}/install" - -mkdir -p artifacts/reproducers - -# Make sure any clang reproducers will end up as artifacts -export CLANG_CRASH_DIAGNOSTICS_DIR=`realpath artifacts/reproducers` - -projects="${1}" -targets="${2}" -runtimes="${3}" -runtime_targets="${4}" -runtime_targets_needs_reconfig="${5}" -enable_cir="${6}" - -lit_args="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml --use-unique-output-file-name --timeout=1200 --time-tests --succinct" - -start-group "CMake" -export PIP_BREAK_SYSTEM_PACKAGES=1 -pip install -q -r "${MONOREPO_ROOT}"/.ci/all_requirements.txt - -# Set the system llvm-symbolizer as preferred. -export LLVM_SYMBOLIZER_PATH=`which llvm-symbolizer` -[[ ! -f "${LLVM_SYMBOLIZER_PATH}" ]] && echo "llvm-symbolizer not found!" - -# Set up all runtimes either way. libcxx is a dependency of LLDB. -# It will not be built unless it is used. -cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \ - -D LLVM_ENABLE_PROJECTS="${projects}" \ - -D LLVM_ENABLE_RUNTIMES="${runtimes}" \ - -G Ninja \ - -D CMAKE_PREFIX_PATH="${HOME}/.local" \ - -D CMAKE_BUILD_TYPE=Release \ - -D CLANG_ENABLE_CIR=${enable_cir} \ - -D LLVM_ENABLE_ASSERTIONS=ON \ - -D LLVM_BUILD_EXAMPLES=ON \ - -D COMPILER_RT_BUILD_LIBFUZZER=OFF \ - -D LLVM_LIT_ARGS="${lit_args}" \ - -D LLVM_ENABLE_LLD=ON \ - -D CMAKE_CXX_FLAGS=-gmlt \ - -D CMAKE_C_COMPILER_LAUNCHER=sccache \ - -D CMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -D LIBCXX_CXX_ABI=libcxxabi \ - -D MLIR_ENABLE_BINDINGS_PYTHON=ON \ - -D LLDB_ENABLE_PYTHON=ON \ - -D LLDB_ENFORCE_STRICT_TEST_REQUIREMENTS=ON \ - -D CMAKE_INSTALL_PREFIX="${INSTALL_DIR}" - -start-group "ninja" - -# Targets are not escaped as they are passed as separate arguments. -ninja -C "${BUILD_DIR}" -k 0 ${targets} |& tee ninja.log - -if [[ "${runtime_targets}" != "" ]]; then - start-group "ninja Runtimes" - - ninja -C "${BUILD_DIR}" ${runtime_targets} |& tee ninja_runtimes.log -fi - -# Compiling runtimes with just-built Clang and running their tests -# as an additional testing for Clang. -if [[ "${runtime_targets_needs_reconfig}" != "" ]]; then - start-group "CMake Runtimes C++26" - - cmake \ - -D LIBCXX_TEST_PARAMS="std=c++26" \ - -D LIBCXXABI_TEST_PARAMS="std=c++26" \ - "${BUILD_DIR}" - - start-group "ninja Runtimes C++26" - - ninja -C "${BUILD_DIR}" ${runtime_targets_needs_reconfig} \ - |& tee ninja_runtimes_needs_reconfig1.log - - start-group "CMake Runtimes Clang Modules" - - cmake \ - -D LIBCXX_TEST_PARAMS="enable_modules=clang" \ - -D LIBCXXABI_TEST_PARAMS="enable_modules=clang" \ - "${BUILD_DIR}" - - start-group "ninja Runtimes Clang Modules" - - ninja -C "${BUILD_DIR}" ${runtime_targets_needs_reconfig} \ - |& tee ninja_runtimes_needs_reconfig2.log -fi diff --git a/.ci/monolithic-windows.sh b/.ci/monolithic-windows.sh deleted file mode 100755 index 0f3a1994a0497..0000000000000 --- a/.ci/monolithic-windows.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash -#===----------------------------------------------------------------------===## -# -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -#===----------------------------------------------------------------------===## - -# -# This script performs a monolithic build of the monorepo and runs the tests of -# most projects on Windows. This should be replaced by per-project scripts that -# run only the relevant tests. -# - -source .ci/utils.sh - -projects="${1}" -targets="${2}" - -start-group "CMake" -pip install -q -r "${MONOREPO_ROOT}"/.ci/all_requirements.txt - -export CC=cl -export CXX=cl -export LD=link - -# The CMAKE_*_LINKER_FLAGS to disable the manifest come from research -# on fixing a build reliability issue on the build server, please -# see https://github.com/llvm/llvm-project/pull/82393 and -# https://discourse.llvm.org/t/rfc-future-of-windows-pre-commit-ci/76840/40 -# for further information. -# We limit the number of parallel compile jobs to 24 control memory -# consumption and improve build reliability. -cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \ - -D LLVM_ENABLE_PROJECTS="${projects}" \ - -G Ninja \ - -D CMAKE_BUILD_TYPE=Release \ - -D LLVM_ENABLE_ASSERTIONS=ON \ - -D LLVM_BUILD_EXAMPLES=ON \ - -D COMPILER_RT_BUILD_LIBFUZZER=OFF \ - -D LLVM_LIT_ARGS="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml --use-unique-output-file-name --timeout=1200 --time-tests --succinct" \ - -D COMPILER_RT_BUILD_ORC=OFF \ - -D CMAKE_C_COMPILER_LAUNCHER=sccache \ - -D CMAKE_CXX_COMPILER_LAUNCHER=sccache \ - -D MLIR_ENABLE_BINDINGS_PYTHON=ON \ - -D CMAKE_EXE_LINKER_FLAGS="/MANIFEST:NO" \ - -D CMAKE_MODULE_LINKER_FLAGS="/MANIFEST:NO" \ - -D CMAKE_SHARED_LINKER_FLAGS="/MANIFEST:NO" - -start-group "ninja" - -# Targets are not escaped as they are passed as separate arguments. -ninja -C "${BUILD_DIR}" -k 0 ${targets} |& tee ninja.log diff --git a/.ci/requirements.txt b/.ci/requirements.txt deleted file mode 100644 index ad63858c9fdc2..0000000000000 --- a/.ci/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -junitparser==3.2.0 diff --git a/.ci/utils.sh b/.ci/utils.sh deleted file mode 100644 index 2a3d2426b630a..0000000000000 --- a/.ci/utils.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash -#===----------------------------------------------------------------------===## -# -# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -#===----------------------------------------------------------------------===## - -# This script performs some setup and contains some utilities used for in the -# monolithic-linux.sh and monolithic-windows.sh scripts. - -set -ex -set -o pipefail - -MONOREPO_ROOT="${MONOREPO_ROOT:="$(git rev-parse --show-toplevel)"}" -BUILD_DIR="${BUILD_DIR:=${MONOREPO_ROOT}/build}" - -rm -rf "${BUILD_DIR}" - -sccache --zero-stats - -function at-exit { - retcode=$? - - mkdir -p artifacts - sccache --show-stats - sccache --show-stats >> artifacts/sccache_stats.txt - cp "${BUILD_DIR}"/.ninja_log artifacts/.ninja_log - cp "${MONOREPO_ROOT}"/*.log artifacts/ || : - cp "${BUILD_DIR}"/test-results.*.xml artifacts/ || : - - # If building fails there will be no results files. - shopt -s nullglob - - if [[ "$GITHUB_STEP_SUMMARY" != "" ]]; then - python "${MONOREPO_ROOT}"/.ci/generate_test_report_github.py \ - $retcode "${BUILD_DIR}"/test-results.*.xml "${MONOREPO_ROOT}"/ninja*.log \ - >> $GITHUB_STEP_SUMMARY - fi -} -trap at-exit EXIT - -function start-group { - groupname=$1 - if [[ "$GITHUB_ACTIONS" != "" ]]; then - echo "::endgroup" - echo "::group::$groupname" - elif [[ "$POSTCOMMIT_CI" != "" ]]; then - echo "@@@$STEP@@@" - else - echo "Starting $groupname" - fi -} From 5ad7d25d3bb7873d1d9491dbdb2365ba74be3a59 Mon Sep 17 00:00:00 2001 From: Jinsong Ji Date: Tue, 4 Nov 2025 10:24:55 -0800 Subject: [PATCH 3/4] restore used workflows --- .github/workflows/email-check.yaml | 48 +++++++++++++++ .github/workflows/pr-code-format.yml | 92 ++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 .github/workflows/email-check.yaml create mode 100644 .github/workflows/pr-code-format.yml diff --git a/.github/workflows/email-check.yaml b/.github/workflows/email-check.yaml new file mode 100644 index 0000000000000..817ece9c0b4d5 --- /dev/null +++ b/.github/workflows/email-check.yaml @@ -0,0 +1,48 @@ +name: "Check for private emails used in PRs" + +on: + pull_request: + branches: + - sycl + - sycl-rel-** + +permissions: + contents: read + +jobs: + validate_email: + runs-on: ubuntu-24.04 + if: github.repository == 'intel/llvm' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Extract author email + id: author + run: | + git log -1 + echo "EMAIL=$(git show -s --format='%ae' HEAD~0)" >> $GITHUB_OUTPUT + # Create empty comment file + echo "[]" > comments + + - name: Validate author email + if: ${{ endsWith(steps.author.outputs.EMAIL, 'noreply.github.com') }} + env: + COMMENT: >- + ⚠️ We detected that you are using a GitHub private e-mail address to contribute to the repo.
+ Please turn off [Keep my email addresses private](https://github.com/settings/emails) setting in your account.
+ See [LLVM Developer Policy](https://llvm.org/docs/DeveloperPolicy.html#email-addresses) and + [LLVM Discourse](https://discourse.llvm.org/t/hidden-emails-on-github-should-we-do-something-about-it) for more information. + run: | + cat << EOF > comments + [{"body" : "$COMMENT"}] + EOF + + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 + if: always() + with: + name: workflow-args + path: | + comments diff --git a/.github/workflows/pr-code-format.yml b/.github/workflows/pr-code-format.yml new file mode 100644 index 0000000000000..8f9a5d48c8fac --- /dev/null +++ b/.github/workflows/pr-code-format.yml @@ -0,0 +1,92 @@ +name: "Check code formatting" + +permissions: + contents: read + +on: + pull_request: + branches: + - main + - sycl + - sycl-rel-** + - 'users/**' + +jobs: + code_formatter: + runs-on: ubuntu-24.04 + timeout-minutes: 30 + concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + if: (github.repository == 'llvm/llvm-project' || github.repository == 'intel/llvm') && !contains(github.event.pull_request.labels.*.name, 'disable-lint') + steps: + - name: Fetch LLVM sources + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 + with: + fetch-depth: 2 + + - name: Get changed files + id: changed-files + uses: step-security/changed-files@95b56dadb92a30ca9036f16423fd3c088a71ee94 # v46.0.5 + with: + separator: "," + skip_initial_fetch: true + base_sha: 'HEAD~1' + sha: 'HEAD' + + # We need to pull the script from the main branch, so that we ensure + # we get the latest version of this script. + - name: Fetch code formatting utils + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v5.0.0 + with: + repository: ${{ github.repository }} + ref: ${{ github.base_ref }} + sparse-checkout: | + llvm/utils/git/requirements_formatting.txt + llvm/utils/git/code-format-helper.py + sparse-checkout-cone-mode: false + path: code-format-tools + + - name: "Listed files" + env: + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + run: | + echo "Formatting files:" + echo "$CHANGED_FILES" + + - name: Install clang-format + uses: aminya/setup-cpp@a276e6e3d1db9160db5edc458e99a30d3b109949 # v1.7.1 + with: + clangformat: 20.1.8 + + - name: Setup Python env + uses: actions/setup-python@v6.0.0 + with: + python-version: '3.11' + cache: 'pip' + cache-dependency-path: 'code-format-tools/llvm/utils/git/requirements_formatting.txt' + + - name: Install python dependencies + run: pip install -r code-format-tools/llvm/utils/git/requirements_formatting.txt + + - name: Run code formatter + env: + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + # Create an empty comments file so the pr-write job doesn't fail. + run: | + echo "[]" > comments && + python ./code-format-tools/llvm/utils/git/code-format-helper.py \ + --write-comment-to-file \ + --token ${{ secrets.GITHUB_TOKEN }} \ + --issue-number $GITHUB_PR_NUMBER \ + --start-rev HEAD~1 \ + --end-rev HEAD \ + --changed-files "$CHANGED_FILES" + + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 #v5.0.0 + if: always() + with: + name: workflow-args + path: | + comments From d43ac6be2f23fbc20cae135631df835e02f00960 Mon Sep 17 00:00:00 2001 From: Jinsong Ji Date: Tue, 4 Nov 2025 10:28:19 -0800 Subject: [PATCH 4/4] restore ur workflows --- .github/workflows/ur-build-hw.yml | 182 +++++++++++++++++++++++++ .github/workflows/ur-build-offload.yml | 54 ++++++++ .github/workflows/ur-precommit.yml | 147 ++++++++++++++++++++ .github/workflows/ur-source-checks.yml | 63 +++++++++ 4 files changed, 446 insertions(+) create mode 100644 .github/workflows/ur-build-hw.yml create mode 100644 .github/workflows/ur-build-offload.yml create mode 100644 .github/workflows/ur-precommit.yml create mode 100644 .github/workflows/ur-source-checks.yml diff --git a/.github/workflows/ur-build-hw.yml b/.github/workflows/ur-build-hw.yml new file mode 100644 index 0000000000000..81f000f6d0867 --- /dev/null +++ b/.github/workflows/ur-build-hw.yml @@ -0,0 +1,182 @@ +name: UR - Build adapters, test on HW + +on: + workflow_call: + inputs: + adapter_name: + required: true + type: string + other_adapter_name: + required: false + type: string + default: "" + runner_name: + required: true + type: string + static_loader: + required: false + type: string + default: OFF + static_adapter: + required: false + type: string + default: OFF + docker_image: + required: true + type: string + default: "" + image_options: + required: true + type: string + default: "" + workflow_dispatch: + inputs: + adapter_name: + required: true + type: string + other_adapter_name: + required: false + type: string + default: "" + runner_name: + required: true + type: string + static_loader: + required: false + type: string + default: OFF + static_adapter: + required: false + type: string + default: OFF + docker_image: + required: true + type: string + default: "" + image_options: + required: true + type: string + default: "" + +permissions: read-all + +env: + UR_LOG_CUDA: "level:error;flush:error" + UR_LOG_HIP: "level:error;flush:error" + UR_LOG_LEVEL_ZERO: "level:error;flush:error" + UR_LOG_NATIVE_CPU: "level:error;flush:error" + UR_LOG_OPENCL: "level:error;flush:error" + +jobs: + adapter_build_hw: + name: Build & CTS + # run only on upstream; forks won't have the HW + if: github.repository == 'intel/llvm' + strategy: + fail-fast: false + matrix: + adapter: [ + { + name: "${{inputs.adapter_name}}", + other_name: "${{inputs.other_adapter_name}}", + static_Loader: "${{inputs.static_loader}}", + static_adapter: "${{inputs.static_loader}}" + } + ] + build_type: [Release] + compiler: [{c: gcc, cxx: g++}] + + runs-on: ${{inputs.runner_name}} + container: + image: ${{ inputs.docker_image }} + options: ${{ inputs.image_options }} + + steps: + # TODO: + # - investigate if DUR_CONFORMANCE_AMD_ARCH could be removed + # - switch to Ninja generator in CMake + # - downloading DPC++ should be integrated somehow; most likely use nightly release. + # + - name: Checkout LLVM + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 + + # for some reason it's required to re-configure python for venv to work properly. + - name: Set up Python 3.12 + if: ${{ inputs.docker_image == 'ghcr.io/intel/llvm/ubuntu2404_intel_drivers:alldeps' }} + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 + with: + python-version: '3.12' + + - name: Install UR python dependencies in venv + working-directory: ./unified-runtime + run: | + python3 -m venv .venv + . .venv/bin/activate + echo "$PATH" >> $GITHUB_PATH + pip install -r third_party/requirements.txt + pip install -r third_party/requirements_testing.txt + + - name: Download DPC++ + run: | + wget -O dpcpp_compiler.tar.gz https://github.com/intel/llvm/releases/download/nightly-2024-12-12/sycl_linux.tar.gz + mkdir -p dpcpp_compiler + tar -xvf dpcpp_compiler.tar.gz -C dpcpp_compiler + + - name: Install OpenCL + if: ${{ inputs.adapter_name == 'OPENCL' }} + run: | + wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB \ + | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null + echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list + sudo apt-get update + sudo apt-get install -y intel-oneapi-runtime-opencl + + - name: Configure Unified Runtime project + # ">" is used to avoid adding "\" at the end of each line; this command is quite long + run: > + cmake + -S unified-runtime + -B build + -DCMAKE_C_COMPILER=${{matrix.compiler.c}} + -DCMAKE_CXX_COMPILER=${{matrix.compiler.cxx}} + -DCMAKE_BUILD_TYPE=${{matrix.build_type}} + -DUR_ENABLE_TRACING=ON + -DUR_DEVELOPER_MODE=ON + -DUR_BUILD_TESTS=ON + -DUR_BUILD_ADAPTER_${{matrix.adapter.name}}=ON + ${{ matrix.adapter.other_name != '' && format('-DUR_BUILD_ADAPTER_{0}=ON', matrix.adapter.other_name) || '' }} + -DUR_STATIC_LOADER=${{matrix.adapter.static_Loader}} + -DUR_STATIC_ADAPTER_${{matrix.adapter.name}}=${{matrix.adapter.static_adapter}} + -DUR_DPCXX=./dpcpp_compiler/bin/clang++ + -DUR_SYCL_LIBRARY_DIR=./dpcpp_compiler/lib + -DCMAKE_INSTALL_PREFIX=./install + ${{ matrix.adapter.name == 'HIP' && '-DUR_CONFORMANCE_AMD_ARCH=gfx1030' || '' }} + ${{ matrix.adapter.name == 'HIP' && '-DUR_HIP_PLATFORM=AMD' || '' }} + + - name: Build + # This is so that device binaries can find the sycl runtime library + run: cmake --build build -j $(nproc) + + - name: Install + # This is to check that install command does not fail + run: cmake --install build + + - name: Test adapter specific + env: + ZE_ENABLE_LOADER_DEBUG_TRACE: 1 + LIT_OPTS: "--timeout 120" + # These tests cause timeouts on CI + LIT_FILTER_OUT: "(adapters/level_zero/memcheck.test|adapters/level_zero/v2/deferred_kernel_memcheck.test)" + run: cmake --build build -j $(nproc) -- check-unified-runtime-adapter + # Don't run adapter specific tests when building multiple adapters + if: ${{ matrix.adapter.other_name == '' }} + + - name: Test adapters + env: + ZE_ENABLE_LOADER_DEBUG_TRACE: 1 + LIT_OPTS: "--timeout 120" + run: cmake --build build -j $(nproc) -- check-unified-runtime-conformance + + - name: Get information about platform + if: ${{ always() }} + run: ./unified-runtime/.github/scripts/get_system_info.sh diff --git a/.github/workflows/ur-build-offload.yml b/.github/workflows/ur-build-offload.yml new file mode 100644 index 0000000000000..82268c0340639 --- /dev/null +++ b/.github/workflows/ur-build-offload.yml @@ -0,0 +1,54 @@ +name: UR - Build offload adapter + +permissions: read-all + +on: [ workflow_call, workflow_dispatch ] + +jobs: + offload_build: + name: Build + strategy: + fail-fast: false + matrix: + build_type: [Release] + compiler: [{c: gcc, cxx: g++}] + + runs-on: [ "Linux", "build" ] + container: + image: 'ghcr.io/intel/llvm/ubuntu2404_base' + + steps: + - name: Checkout LLVM + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 + + - name: Install liboffload + run: sudo apt-get update -qq && sudo apt-get install --no-install-recommends -yqq liboffload-21 liboffload-21-dev + + - name: Configure Unified Runtime project + # ">" is used to avoid adding "\" at the end of each line; this command is quite long + run: > + cmake + -S unified-runtime + -B $GITHUB_WORKSPACE/build + -DCMAKE_C_COMPILER=${{matrix.compiler.c}} + -DCMAKE_CXX_COMPILER=${{matrix.compiler.cxx}} + -DCMAKE_BUILD_TYPE=${{matrix.build_type}} + -DUR_ENABLE_TRACING=ON + -DUR_DEVELOPER_MODE=ON + -DUR_BUILD_TESTS=OFF + -DUR_BUILD_ADAPTER_OFFLOAD=ON + -DUR_OFFLOAD_INSTALL_DIR="/usr/lib/llvm-21" + -DUR_OFFLOAD_INCLUDE_DIR="/usr/lib/llvm-21/include" + -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install + + - name: Build + # This is so that device binaries can find the sycl runtime library + run: cmake --build $GITHUB_WORKSPACE/build -j $(nproc) + + - name: Install + # This is to check that install command does not fail + run: cmake --install $GITHUB_WORKSPACE/build + + - name: Get information about platform + if: ${{ always() }} + run: ./unified-runtime/.github/scripts/get_system_info.sh diff --git a/.github/workflows/ur-precommit.yml b/.github/workflows/ur-precommit.yml new file mode 100644 index 0000000000000..1bc37adc61dfc --- /dev/null +++ b/.github/workflows/ur-precommit.yml @@ -0,0 +1,147 @@ +name: Unified Runtime Pre Commit +# Note: this is the very first version of UR workflow. +# It was pretty much copy-pasted from UR repository. +# Over time it will be most likely integrated more into existing workflows. + +# Note: the trigger is copy-pasted from sycl-linux-precommit.yml - probably to be fine-tuned. +on: + # We rely on "Fork pull request workflows from outside collaborators" - + # "Require approval for all outside collaborators" at + # https://github.com/intel/llvm/settings/actions for security. + pull_request: + branches: + - sycl + - sycl-rel-** + # Do not run builds if changes are only in the following locations + paths-ignore: + - '.github/ISSUE_TEMPLATE/**' + - '.github/CODEOWNERS' + - 'sycl/doc/**' + - 'sycl/gdb/**' + - 'clang/docs/**' + - '**.md' + - '**.rst' + - '.github/workflows/sycl-windows-*.yml' + - '.github/workflows/sycl-macos-*.yml' + - '.github/workflows/sycl-nightly.yml' + - '.github/workflows/sycl-rel-nightly.yml' + - 'devops/containers/**' + - 'devops/actions/build_container/**' + +concurrency: + # Cancel a currently running workflow from the same PR, branch or tag. + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: read-all + +jobs: + detect_changes: + name: Detect Changes + uses: ./.github/workflows/sycl-detect-changes.yml + + source_checks: + name: Source Checks + needs: [detect_changes] + if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} + uses: ./.github/workflows/ur-source-checks.yml + + adapters: + name: Adapters + needs: [detect_changes, source_checks] + if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} + strategy: + fail-fast: false + matrix: + # Extra native CPU jobs are here to force the loader to be used. + # UR will not use the loader if there is only one target. + include: + - name: L0 + runner: UR_L0 + image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN + - name: L0_V2 + runner: UR_L0 + image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN + - name: L0 + runner: UR_L0 + static: ON + image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN + - name: L0 + runner: UR_L0 + other_adapter: NATIVE_CPU + image_options: -u 1001 --device=/dev/dri -v /dev/dri/by-path:/dev/dri/by-path --privileged --cap-add SYS_ADMIN + - name: HIP + runner: UR_HIP + image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --cap-add=SYS_ADMIN + - name: CUDA + runner: UR_CUDA + image_options: -u 1001 --privileged --cap-add SYS_ADMIN --gpus all + - name: OPENCL + runner: UR_OPENCL + docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" + image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --privileged --cap-add SYS_ADMIN + - name: OPENCL + runner: UR_OPENCL + other_adapter: NATIVE_CPU + docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" + image_options: -u 1001 --device=/dev/dri --device=/dev/kfd --privileged --cap-add SYS_ADMIN + - name: NATIVE_CPU + runner: UR_NATIVE_CPU + docker_image: "ghcr.io/intel/llvm/ubuntu2204_build:latest" + image_options: -u 1001 --device=/dev/dri --privileged --cap-add SYS_ADMIN + uses: ./.github/workflows/ur-build-hw.yml + with: + adapter_name: ${{ matrix.name }} + runner_name: ${{ matrix.runner }} + static_loader: ${{ matrix.static || 'OFF' }} + static_adapter: ${{ matrix.static || 'OFF' }} + other_adapter_name: ${{ matrix.other_adapter || '' }} + docker_image: ${{ matrix.docker_image || 'ghcr.io/intel/llvm/ubuntu2404_intel_drivers:alldeps'}} + image_options: ${{ matrix.image_options || '' }} + +# TODO: Enable once the apt package at https://apt.llvm.org/noble/pool/main/l/llvm-toolchain-snapshot/ is updated +# offload_build: +# name: Adapters (Offload) +# needs: [detect_changes, source_checks] +# if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur_offload_adapter') }} +# uses: ./.github/workflows/ur-build-offload.yml + + macos: + name: MacOS build only + needs: [detect_changes, source_checks] + if: ${{ always() && !cancelled() && contains(needs.detect_changes.outputs.filters, 'ur') }} + strategy: + matrix: + os: ['macos-latest'] + runs-on: ${{matrix.os}} + + steps: + - name: Checkout LLVM + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 + + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v5.0.0 + with: + python-version: "3.10" + + - name: Install prerequisites + working-directory: ${{github.workspace}}/unified-runtime + run: | + python3 -m pip install -r third_party/requirements.txt + python3 -m pip install -r third_party/requirements_testing.txt + + - name: Install hwloc + run: brew install hwloc + + - name: Configure Unified Runtime project + working-directory: ${{github.workspace}}/unified-runtime + run: > + cmake + -B${{github.workspace}}/build + -DUR_ENABLE_TRACING=ON + -DUR_DEVELOPER_MODE=ON + -DCMAKE_BUILD_TYPE=Release + -DUR_BUILD_TESTS=ON + -DUR_FORMAT_CPP_STYLE=ON + + - name: Build + run: cmake --build ${{github.workspace}}/build -j $(sysctl -n hw.logicalcpu) diff --git a/.github/workflows/ur-source-checks.yml b/.github/workflows/ur-source-checks.yml new file mode 100644 index 0000000000000..b444e3252d41c --- /dev/null +++ b/.github/workflows/ur-source-checks.yml @@ -0,0 +1,63 @@ +name: UR - Check generated sources + +on: + workflow_call: + +permissions: + contents: read + +jobs: + source_checks: + strategy: + matrix: + os: ['ubuntu-22.04', 'windows-2022'] + + runs-on: ${{matrix.os}} + + steps: + # TODO: + # - split into separate jobs for each OS + # + - name: Checkout LLVM + uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4.1.1 + + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v5.0.0 + with: + python-version: 3.9 + + - name: Install UR python dependencies + working-directory: ${{github.workspace}}/unified-runtime + run: pip install -r third_party/requirements.txt + + - name: "[Lin] Install doxygen" + if: matrix.os == 'ubuntu-22.04' + run: | + sudo apt-get update + sudo apt-get install -y doxygen libhwloc-dev + + - name: "[Win] Install doxygen" + if: matrix.os == 'windows-2022' + run: | + $WorkingDir = $PWD.Path + Invoke-WebRequest -Uri https://github.com/doxygen/doxygen/releases/download/Release_1_9_8/doxygen-1.9.8.windows.x64.bin.zip -OutFile "$WorkingDir\doxygen.zip" + Expand-Archive -Path "$WorkingDir\doxygen.zip" + Add-Content $env:GITHUB_PATH "$WorkingDir\doxygen" + + - name: Configure Unified Runtime project + working-directory: ${{github.workspace}}/unified-runtime + run: > + cmake + -B${{github.workspace}}/build + -DCMAKE_PREFIX_PATH="${{env.VCPKG_PATH}}" + -DUR_ENABLE_TRACING=OFF + -DCMAKE_BUILD_TYPE=Debug + -DUR_BUILD_TESTS=OFF + -DUR_FORMAT_CPP_STYLE=ON + + # Verifying license should be enough on a single OS + - name: Verify that each source file contains a license + if: matrix.os == 'ubuntu-22.04' + run: cmake --build ${{github.workspace}}/build --target verify-licenses + + - name: Generate source from spec, check for uncommitted diff + run: cmake --build ${{github.workspace}}/build --target check-generated