From e4d259c28a951169f3d9352181c577ab4ba7b125 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 10 Feb 2025 05:01:39 +0000 Subject: [PATCH 01/43] Update dependencies from https://github.com/dotnet/arcade build 20250206.4 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25106.4 --- eng/Version.Details.xml | 4 +- eng/common/build.sh | 1 - .../core-templates/job/source-build.yml | 5 +- .../core-templates/post-build/post-build.yml | 6 + .../steps/install-microbuild.yml | 44 ++- .../core-templates/steps/source-build.yml | 8 +- eng/common/cross/build-android-rootfs.sh | 49 ++- eng/common/cross/build-rootfs.sh | 129 +++++-- eng/common/cross/install-debs.py | 334 ++++++++++++++++++ eng/common/cross/toolchain.cmake | 149 ++++---- eng/common/internal/Tools.csproj | 10 - eng/common/native/install-dependencies.sh | 61 ++++ eng/common/post-build/publish-using-darc.ps1 | 7 +- eng/common/sdk-task.ps1 | 2 +- eng/common/sdk-task.sh | 104 ++++++ eng/common/tools.ps1 | 27 +- eng/common/tools.sh | 28 +- global.json | 4 +- 18 files changed, 783 insertions(+), 189 deletions(-) create mode 100644 eng/common/cross/install-debs.py create mode 100644 eng/common/native/install-dependencies.sh create mode 100644 eng/common/sdk-task.sh diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 65892b40e..49b41b196 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 380002a14775d7f68f098c7e6b7d1c3638bd4c5d + 91630b31ce859c28f637b62b566ea8829b982f2c https://github.com/dotnet/wpf diff --git a/eng/common/build.sh b/eng/common/build.sh index ac1ee8620..483647daf 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -231,7 +231,6 @@ function Build { /p:Restore=$restore \ /p:Build=$build \ /p:DotNetBuildRepo=$product_build \ - /p:ArcadeBuildFromSource=$source_build \ /p:DotNetBuildSourceOnly=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml index c4713c8b6..05f7ad6ef 100644 --- a/eng/common/core-templates/job/source-build.yml +++ b/eng/common/core-templates/job/source-build.yml @@ -12,9 +12,10 @@ parameters: # The name of the job. This is included in the job ID. # targetRID: '' # The name of the target RID to use, instead of the one auto-detected by Arcade. - # nonPortable: false + # portableBuild: false # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than - # linux-x64), and compiling against distro-provided packages rather than portable ones. + # linux-x64), and compiling against distro-provided packages rather than portable ones. The + # default is portable mode. # skipPublishValidation: false # Disables publishing validation. By default, a check is performed to ensure no packages are # published by source-build. diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index 454fd75c7..a8c0bd3b9 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -44,6 +44,11 @@ parameters: displayName: Publish installers and checksums type: boolean default: true + + - name: requireDefaultChannels + displayName: Fail the build if there are no default channel(s) registrations for the current build + type: boolean + default: false - name: SDLValidationParameters type: object @@ -312,5 +317,6 @@ stages: -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -AzdoToken '$(System.AccessToken)' -WaitPublishingFinish true + -RequireDefaultChannels ${{ parameters.requireDefaultChannels }} -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index 9abe726e5..2a6a52948 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -1,19 +1,49 @@ parameters: - # Enable cleanup tasks for MicroBuild + # Enable install tasks for MicroBuild enableMicrobuild: false - # Enable cleanup tasks for MicroBuild on Mac and Linux + # Enable install tasks for MicroBuild on Mac and Linux # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' enableMicrobuildForMacAndLinux: false + # Location of the MicroBuild output folder + microBuildOutputFolder: '$(Agent.TempDirectory)' continueOnError: false steps: - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - # Remove Python downgrade with https://github.com/dotnet/arcade/issues/15151 - - ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: + # Install Python 3.12.x on when Python > 3.12.x is installed - https://github.com/dotnet/source-build/issues/4802 + - script: | + version=$(python3 --version | awk '{print $2}') + major=$(echo $version | cut -d. -f1) + minor=$(echo $version | cut -d. -f2) + + installPython=false + if [ "$major" -gt 3 ] || { [ "$major" -eq 3 ] && [ "$minor" -gt 12 ]; }; then + installPython=true + fi + + echo "Python version: $version." + echo "Install Python 3.12.x: $installPython." + echo "##vso[task.setvariable variable=installPython;isOutput=true]$installPython" + name: InstallPython + displayName: 'Determine Python installation' + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + - task: UsePythonVersion@0 - displayName: 'Use Python 3.11.x' inputs: - versionSpec: '3.11.x' + versionSpec: '3.12.x' + displayName: 'Use Python 3.12.x' + condition: and(succeeded(), eq(variables['InstallPython.installPython'], 'true'), ne(variables['Agent.Os'], 'Windows_NT')) + + # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable + - task: UseDotNet@2 + displayName: Install .NET 8.0 SDK for MicroBuild Plugin + inputs: + packageType: sdk + version: 8.0.x + installationPath: ${{ parameters.microBuildOutputFolder }}/dotnet + workingDirectory: ${{ parameters.microBuildOutputFolder }} + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - task: MicroBuildSigningPlugin@4 displayName: Install MicroBuild plugin @@ -25,7 +55,7 @@ steps: azureSubscription: 'MicroBuild Signing Task (DevDiv)' env: TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)' + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} condition: and( diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index 2915d29bb..f9ba1625c 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -76,6 +76,11 @@ steps: assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml fi + portableBuildArgs= + if [ '${{ parameters.platform.portableBuild }}' != '' ]; then + portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}' + fi + ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ --configuration $buildConfig \ --restore --build --pack $publishArgs -bl \ @@ -85,8 +90,7 @@ steps: $targetRidArgs \ $runtimeOsArgs \ $baseOsArgs \ - /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \ - /p:ArcadeBuildFromSource=true \ + $portableBuildArgs \ /p:DotNetBuildSourceOnly=true \ /p:DotNetBuildRepo=true \ /p:AssetManifestFileName=$assetManifestFileName diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh index 7e9ba2b75..fbd8d8084 100644 --- a/eng/common/cross/build-android-rootfs.sh +++ b/eng/common/cross/build-android-rootfs.sh @@ -6,10 +6,11 @@ usage() { echo "Creates a toolchain and sysroot used for cross-compiling for Android." echo - echo "Usage: $0 [BuildArch] [ApiLevel]" + echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]" echo echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" + echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history" echo echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" echo "by setting the TOOLCHAIN_DIR environment variable" @@ -25,10 +26,15 @@ __BuildArch=arm64 __AndroidArch=aarch64 __AndroidToolchain=aarch64-linux-android -for i in "$@" - do - lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" - case $lowerI in +while :; do + if [[ "$#" -le 0 ]]; then + break + fi + + i=$1 + + lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" + case $lowerI in -?|-h|--help) usage exit 1 @@ -43,6 +49,10 @@ for i in "$@" __AndroidArch=arm __AndroidToolchain=arm-linux-androideabi ;; + --ndk) + shift + __NDK_Version=$1 + ;; *[0-9]) __ApiLevel=$i ;; @@ -50,8 +60,17 @@ for i in "$@" __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" ;; esac + shift done +if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then + __NDK_File_Arch_Spec=-x86_64 + __SysRoot=sysroot +else + __NDK_File_Arch_Spec= + __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot +fi + # Obtain the location of the bash script to figure out where the root of the repo is. __ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" @@ -78,6 +97,7 @@ fi echo "Target API level: $__ApiLevel" echo "Target architecture: $__BuildArch" +echo "NDK version: $__NDK_Version" echo "NDK location: $__NDK_Dir" echo "Target Toolchain location: $__ToolchainDir" @@ -85,8 +105,8 @@ echo "Target Toolchain location: $__ToolchainDir" if [ ! -d $__NDK_Dir ]; then echo Downloading the NDK into $__NDK_Dir mkdir -p $__NDK_Dir - wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip - unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip + unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir fi if [ ! -d $__lldb_Dir ]; then @@ -116,16 +136,11 @@ for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/ fi done -cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/" +cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/" # Generate platform file for build.sh script to assign to __DistroRid echo "Generating platform file..." -echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform - -echo "Now to build coreclr, libraries and installers; run:" -echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ - --subsetCategory coreclr -echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ - --subsetCategory libraries -echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \ - --subsetCategory installer +echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform + +echo "Now to build coreclr, libraries and host; run:" +echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 20ae8c286..74f399716 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -5,7 +5,7 @@ set -e usage() { echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86" + echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86" echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" echo " for alpine can be specified with version: alpineX.YY or alpineedge" echo " for FreeBSD can be: freebsd13, freebsd14" @@ -15,6 +15,7 @@ usage() echo "llvmx[.y] - optional, LLVM version for LLVM related packages." echo "--skipunmount - optional, will skip the unmount of rootfs folder." echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." + echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems." echo "--use-mirror - optional, use mirror URL to fetch resources, when available." echo "--jobs N - optional, restrict to N jobs." exit 1 @@ -52,14 +53,12 @@ __UbuntuPackages+=" symlinks" __UbuntuPackages+=" libicu-dev" __UbuntuPackages+=" liblttng-ust-dev" __UbuntuPackages+=" libunwind8-dev" -__UbuntuPackages+=" libnuma-dev" __AlpinePackages+=" gettext-dev" __AlpinePackages+=" icu-dev" __AlpinePackages+=" libunwind-dev" __AlpinePackages+=" lttng-ust-dev" __AlpinePackages+=" compiler-rt" -__AlpinePackages+=" numactl-dev" # runtime libraries' dependencies __UbuntuPackages+=" libcurl4-openssl-dev" @@ -73,8 +72,8 @@ __AlpinePackages+=" krb5-dev" __AlpinePackages+=" openssl-dev" __AlpinePackages+=" zlib-dev" -__FreeBSDBase="13.3-RELEASE" -__FreeBSDPkg="1.17.0" +__FreeBSDBase="13.4-RELEASE" +__FreeBSDPkg="1.21.3" __FreeBSDABI="13" __FreeBSDPackages="libunwind" __FreeBSDPackages+=" icu" @@ -129,10 +128,12 @@ __AlpineKeys=' 616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== 616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== 616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== +66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ== ' __Keyring= __KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" __SkipSigCheck=0 +__SkipEmulation=0 __UseMirror=0 __UnprocessedBuildArgs= @@ -181,6 +182,18 @@ while :; do __Keyring="--keyring $__KeyringFile" fi ;; + loongarch64) + __BuildArch=loongarch64 + __AlpineArch=loongarch64 + __QEMUArch=loongarch64 + __UbuntuArch=loong64 + __UbuntuSuites=unreleased + __LLDB_Package="liblldb-19-dev" + + if [[ "$__CodeName" == "sid" ]]; then + __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" + fi + ;; riscv64) __BuildArch=riscv64 __AlpineArch=riscv64 @@ -341,10 +354,28 @@ while :; do ;; sid) # Debian sid __CodeName=sid - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + __UbuntuSuites= - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + # Debian-Ports architectures need different values + case "$__UbuntuArch" in + amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x) + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + *) + __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" + fi + ;; + esac + + if [[ -e "$__KeyringFile" ]]; then + __Keyring="--keyring $__KeyringFile" fi ;; tizen) @@ -371,7 +402,7 @@ while :; do ;; freebsd14) __CodeName=freebsd - __FreeBSDBase="14.0-RELEASE" + __FreeBSDBase="14.2-RELEASE" __FreeBSDABI="14" __SkipUnmount=1 ;; @@ -389,6 +420,9 @@ while :; do --skipsigcheck) __SkipSigCheck=1 ;; + --skipemulation) + __SkipEmulation=1 + ;; --rootfsdir|-rootfsdir) shift __RootfsDir="$1" @@ -421,16 +455,15 @@ case "$__AlpineVersion" in elif [[ "$__AlpineArch" == "x86" ]]; then __AlpineVersion=3.17 # minimum version that supports lldb-dev __AlpinePackages+=" llvm15-libs" - elif [[ "$__AlpineArch" == "riscv64" ]]; then + elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then + __AlpineVersion=3.21 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm19-libs" + elif [[ -n "$__AlpineMajorVersion" ]]; then + # use whichever alpine version is provided and select the latest toolchain libs __AlpineLlvmLibsLookup=1 - __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive) else __AlpineVersion=3.13 # 3.13 to maximize compatibility __AlpinePackages+=" llvm10-libs" - - if [[ "$__AlpineArch" == "armv7" ]]; then - __AlpinePackages="${__AlpinePackages//numactl-dev/}" - fi fi esac @@ -444,11 +477,6 @@ if [[ "$__BuildArch" == "armel" ]]; then __LLDB_Package="lldb-3.5-dev" fi -if [[ "$__CodeName" == "xenial" && "$__UbuntuArch" == "armhf" ]]; then - # libnuma-dev is not available on armhf for xenial - __UbuntuPackages="${__UbuntuPackages//libnuma-dev/}" -fi - __UbuntuPackages+=" ${__LLDB_Package:-}" if [[ -z "$__UbuntuRepo" ]]; then @@ -513,11 +541,6 @@ if [[ "$__CodeName" == "alpine" ]]; then echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c chmod +x "$__ApkToolsDir/apk.static" - if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then - mkdir -p "$__RootfsDir"/usr/bin - cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin" - fi - if [[ "$__AlpineVersion" == "edge" ]]; then version=edge else @@ -537,6 +560,10 @@ if [[ "$__CodeName" == "alpine" ]]; then __ApkSignatureArg="--keys-dir $__ApkKeysDir" fi + if [[ "$__SkipEmulation" == "1" ]]; then + __NoEmulationArg="--no-scripts" + fi + # initialize DB # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ @@ -558,7 +585,7 @@ if [[ "$__CodeName" == "alpine" ]]; then "$__ApkToolsDir/apk.static" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ - -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \ add $__AlpinePackages rm -r "$__ApkToolsDir" @@ -574,7 +601,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version fi echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf - echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf mkdir -p "$__RootfsDir"/tmp # get and build package manager if [[ "$__hasWget" == 1 ]]; then @@ -753,25 +780,67 @@ elif [[ "$__CodeName" == "haiku" ]]; then popd rm -rf "$__RootfsDir/tmp" elif [[ -n "$__CodeName" ]]; then + __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)" + + if [[ "$__SkipEmulation" == "1" ]]; then + if [[ -z "$AR" ]]; then + if command -v ar &>/dev/null; then + AR="$(command -v ar)" + elif command -v llvm-ar &>/dev/null; then + AR="$(command -v llvm-ar)" + else + echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool" + exit 1 + fi + fi + + PYTHON=${PYTHON_EXECUTABLE:-python3} + + # shellcheck disable=SC2086,SC2046 + echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ + $__UbuntuPackages + + # shellcheck disable=SC2086,SC2046 + "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ + $__UbuntuPackages + + exit 0 + fi + __UpdateOptions= if [[ "$__SkipSigCheck" == "0" ]]; then __Keyring="$__Keyring --force-check-gpg" + else + __Keyring= + __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories" fi # shellcheck disable=SC2086 echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" - debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + # shellcheck disable=SC2086 + if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then + echo "debootstrap failed! dumping debootstrap.log" + cat "$__RootfsDir/debootstrap/debootstrap.log" + exit 1 + fi + + rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d mkdir -p "$__RootfsDir/etc/apt/sources.list.d/" + + # shellcheck disable=SC2086 cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2) + else: + return -1 if isinstance(token1, str) else 1 + + return len(tokens1) - len(tokens2) + +def compare_debian_versions(version1, version2): + """Compare two Debian package versions.""" + epoch1, upstream1, revision1 = parse_debian_version(version1) + epoch2, upstream2, revision2 = parse_debian_version(version2) + + if epoch1 != epoch2: + return epoch1 - epoch2 + + result = compare_upstream_version(upstream1, upstream2) + if result != 0: + return result + + return compare_upstream_version(revision1, revision2) + +def resolve_dependencies(packages, aliases, desired_packages): + """Recursively resolves dependencies for the desired packages.""" + resolved = [] + to_process = deque(desired_packages) + + while to_process: + current = to_process.popleft() + resolved_package = current if current in packages else aliases.get(current, [None])[0] + + if not resolved_package: + print(f"Error: Package '{current}' was not found in the available packages.") + sys.exit(1) + + if resolved_package not in resolved: + resolved.append(resolved_package) + + deps = packages.get(resolved_package, {}).get("Depends", "") + if deps: + deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep] + for dep in deps: + if dep not in resolved and dep not in to_process and dep in packages: + to_process.append(dep) + + return resolved + +def parse_package_index(content): + """Parses the Packages.gz file and returns package information.""" + packages = {} + aliases = {} + entries = re.split(r'\n\n+', content) + + for entry in entries: + fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE)) + if "Package" in fields: + package_name = fields["Package"] + version = fields.get("Version") + filename = fields.get("Filename") + depends = fields.get("Depends") + provides = fields.get("Provides", None) + + # Only update if package_name is not in packages or if the new version is higher + if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0: + packages[package_name] = { + "Version": version, + "Filename": filename, + "Depends": depends + } + + # Update aliases if package provides any alternatives + if provides: + provides_list = [x.strip() for x in provides.split(",")] + for alias in provides_list: + # Strip version specifiers + alias_name = re.sub(r'\s*\(=.*\)', '', alias) + if alias_name not in aliases: + aliases[alias_name] = [] + if package_name not in aliases[alias_name]: + aliases[alias_name].append(package_name) + + return packages, aliases + +def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages): + """Downloads .deb files and extracts them.""" + resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages) + print(f"Resolved packages (including dependencies): {resolved_packages}") + + packages_to_download = {} + + for pkg in resolved_packages: + if pkg in packages_info: + packages_to_download[pkg] = packages_info[pkg] + + if pkg in aliases: + for alias in aliases[pkg]: + if alias in packages_info: + packages_to_download[alias] = packages_info[alias] + + asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir)) + + package_to_deb_file_map = {} + for pkg in resolved_packages: + pkg_info = packages_info.get(pkg) + if pkg_info: + deb_filename = pkg_info.get("Filename") + if deb_filename: + deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename)) + package_to_deb_file_map[pkg] = deb_file_path + + for pkg in reversed(resolved_packages): + deb_file = package_to_deb_file_map.get(pkg) + if deb_file and os.path.exists(deb_file): + extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool) + + print("All done!") + +def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): + """Extract .deb file contents""" + + os.makedirs(extract_dir, exist_ok=True) + + with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir: + result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + tar_filename = None + for line in result.stdout.decode().splitlines(): + if line.startswith("data.tar"): + tar_filename = line.strip() + break + + if not tar_filename: + raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.") + + tar_file_path = os.path.join(tmp_subdir, tar_filename) + print(f"Extracting {tar_filename} from {deb_file}..") + + subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True) + + file_extension = os.path.splitext(tar_file_path)[1].lower() + + if file_extension == ".xz": + mode = "r:xz" + elif file_extension == ".gz": + mode = "r:gz" + elif file_extension == ".zst": + # zstd is not supported by standard library yet + decompressed_tar_path = tar_file_path.replace(".zst", "") + with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file: + dctx = zstandard.ZstdDecompressor() + dctx.copy_stream(zst_file, decompressed_file) + + tar_file_path = decompressed_tar_path + mode = "r" + else: + raise ValueError(f"Unsupported compression format: {file_extension}") + + with tarfile.open(tar_file_path, mode) as tar: + tar.extractall(path=extract_dir, filter='fully_trusted') + +def finalize_setup(rootfsdir): + lib_dir = os.path.join(rootfsdir, 'lib') + usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib') + + if os.path.exists(lib_dir): + if os.path.islink(lib_dir): + os.remove(lib_dir) + else: + os.makedirs(usr_lib_dir, exist_ok=True) + + for item in os.listdir(lib_dir): + src = os.path.join(lib_dir, item) + dest = os.path.join(usr_lib_dir, item) + + if os.path.isdir(src): + shutil.copytree(src, dest, dirs_exist_ok=True) + else: + shutil.copy2(src, dest) + + shutil.rmtree(lib_dir) + + os.symlink(usr_lib_dir, lib_dir) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS") + parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)") + parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)") + parser.add_argument("--rootfsdir", required=True, help="Destination directory.") + parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.') + parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") + parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)") + parser.add_argument("packages", nargs="+", help="List of package names to be installed.") + + args = parser.parse_args() + + if args.mirror is None: + if args.distro == "ubuntu": + args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports" + elif args.distro == "debian": + args.mirror = "http://ftp.debian.org/debian-ports" + else: + raise Exception("Unsupported distro") + + DESIRED_PACKAGES = args.packages + [ # base packages + "dpkg", + "busybox", + "libc-bin", + "base-files", + "base-passwd", + "debianutils" + ] + + print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") + + package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite)) + + packages_info, aliases = parse_package_index(package_index_content) + + with tempfile.TemporaryDirectory() as tmp_dir: + install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES) + + finalize_setup(args.rootfsdir) diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake index 9a4e285a5..0ff85cf03 100644 --- a/eng/common/cross/toolchain.cmake +++ b/eng/common/cross/toolchain.cmake @@ -40,7 +40,7 @@ if(TARGET_ARCH_NAME STREQUAL "arm") set(TOOLCHAIN "arm-linux-gnueabihf") endif() if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0") + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf") endif() elseif(TARGET_ARCH_NAME STREQUAL "arm64") set(CMAKE_SYSTEM_PROCESSOR aarch64) @@ -49,7 +49,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "arm64") elseif(LINUX) set(TOOLCHAIN "aarch64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu") endif() elseif(FREEBSD) set(triple "aarch64-unknown-freebsd12") @@ -58,7 +58,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "armel") set(CMAKE_SYSTEM_PROCESSOR armv7l) set(TOOLCHAIN "arm-linux-gnueabi") if(TIZEN) - set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0") + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi") endif() elseif(TARGET_ARCH_NAME STREQUAL "armv6") set(CMAKE_SYSTEM_PROCESSOR armv6l) @@ -67,6 +67,13 @@ elseif(TARGET_ARCH_NAME STREQUAL "armv6") else() set(TOOLCHAIN "arm-linux-gnueabihf") endif() +elseif(TARGET_ARCH_NAME STREQUAL "loongarch64") + set(CMAKE_SYSTEM_PROCESSOR "loongarch64") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl) + set(TOOLCHAIN "loongarch64-alpine-linux-musl") + else() + set(TOOLCHAIN "loongarch64-linux-gnu") + endif() elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") set(CMAKE_SYSTEM_PROCESSOR ppc64le) if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) @@ -81,7 +88,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "riscv64") else() set(TOOLCHAIN "riscv64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu/13.1.0") + set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu") endif() endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") @@ -98,7 +105,7 @@ elseif(TARGET_ARCH_NAME STREQUAL "x64") elseif(LINUX) set(TOOLCHAIN "x86_64-linux-gnu") if(TIZEN) - set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu") endif() elseif(FREEBSD) set(triple "x86_64-unknown-freebsd12") @@ -115,10 +122,10 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") set(TOOLCHAIN "i686-linux-gnu") endif() if(TIZEN) - set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0") + set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") endif() else() - message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!") + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!") endif() if(DEFINED ENV{TOOLCHAIN}) @@ -127,32 +134,46 @@ endif() # Specify include paths if(TIZEN) - if(TARGET_ARCH_NAME STREQUAL "arm") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf) - endif() - if(TARGET_ARCH_NAME STREQUAL "armel") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi) - endif() - if(TARGET_ARCH_NAME STREQUAL "arm64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "x86") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "x64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/x86_64-tizen-linux-gnu) - endif() - if(TARGET_ARCH_NAME STREQUAL "riscv64") - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/) - include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/riscv64-tizen-linux-gnu) + function(find_toolchain_dir prefix) + # Dynamically find the version subdirectory + file(GLOB DIRECTORIES "${prefix}/*") + list(GET DIRECTORIES 0 FIRST_MATCH) + get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME) + + set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE) + endfunction() + + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + else() + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") endif() + + message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}") + + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++) + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) endif() +function(locate_toolchain_exec exec var) + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) +endfunction() + if(ANDROID) if(TARGET_ARCH_NAME STREQUAL "arm") set(ANDROID_ABI armeabi-v7a) @@ -183,66 +204,24 @@ elseif(FREEBSD) set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") elseif(ILLUMOS) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") include_directories(SYSTEM ${CROSS_ROOTFS}/include) - set(TOOLSET_PREFIX ${TOOLCHAIN}-) - function(locate_toolchain_exec exec var) - string(TOUPPER ${exec} EXEC_UPPERCASE) - if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") - set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) - return() - endif() - - find_program(EXEC_LOCATION_${exec} - NAMES - "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" - "${TOOLSET_PREFIX}${exec}") - - if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") - message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") - endif() - set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) - endfunction() - - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") - locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) - - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") elseif(HAIKU) set(CMAKE_SYSROOT "${CROSS_ROOTFS}") set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") - - set(TOOLSET_PREFIX ${TOOLCHAIN}-) - function(locate_toolchain_exec exec var) - string(TOUPPER ${exec} EXEC_UPPERCASE) - if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") - set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) - return() - endif() - - find_program(EXEC_LOCATION_${exec} - NAMES - "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" - "${TOOLSET_PREFIX}${exec}") - - if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") - message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") - endif() - set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) - endfunction() - set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") locate_toolchain_exec(gcc CMAKE_C_COMPILER) locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) - set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") - set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") - # let CMake set up the correct search paths include(Platform/Haiku) else() @@ -272,21 +251,21 @@ endif() if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") endif() elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$") if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") - add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}") endif() elseif(TARGET_ARCH_NAME STREQUAL "s390x") add_toolchain_linker_flag("--target=${TOOLCHAIN}") @@ -297,10 +276,10 @@ elseif(TARGET_ARCH_NAME STREQUAL "x86") endif() add_toolchain_linker_flag(-m32) if(TIZEN) - add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") - add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") endif() elseif(ILLUMOS) add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") @@ -312,7 +291,7 @@ endif() # Specify compile options -if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj index 32f79dfb3..feaa6d208 100644 --- a/eng/common/internal/Tools.csproj +++ b/eng/common/internal/Tools.csproj @@ -15,16 +15,6 @@ - - - - https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json; - - - $(RestoreSources); - https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json; - - diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh new file mode 100644 index 000000000..ce661e9e5 --- /dev/null +++ b/eng/common/native/install-dependencies.sh @@ -0,0 +1,61 @@ +#!/bin/sh + +set -e + +# This is a simple script primarily used for CI to install necessary dependencies +# +# Usage: +# +# ./install-dependencies.sh + +os="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + +if [ -z "$os" ]; then + . "$(dirname "$0")"/init-os-and-arch.sh +fi + +case "$os" in + linux) + if [ -e /etc/os-release ]; then + . /etc/os-release + fi + + if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then + apt update + + apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \ + libssl-dev libkrb5-dev pigz cpio + + localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 + elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ]; then + dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio + elif [ "$ID" = "alpine" ]; then + apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio + else + echo "Unsupported distro. distro: $ID" + exit 1 + fi + ;; + + osx|maccatalyst|ios|iossimulator|tvos|tvossimulator) + echo "Installed xcode version: $(xcode-select -p)" + + export HOMEBREW_NO_INSTALL_CLEANUP=1 + export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 + # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 + # brew update --preinstall + brew bundle --no-upgrade --no-lock --file=- < Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" + echo " --restore Restore dependencies" + echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" + echo " --help Print help and exit" + echo "" + echo "Command line arguments not listed above are passed thru to msbuild." +} + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +Build() { + local target=$1 + local log_suffix="" + [[ "$target" != "Execute" ]] && log_suffix=".$target" + local log="$log_dir/$task$log_suffix.binlog" + local output_path="$toolset_dir/$task/" + + MSBuild "$taskProject" \ + /bl:"$log" \ + /t:"$target" \ + /p:Configuration="$configuration" \ + /p:RepoRoot="$repo_root" \ + /p:BaseIntermediateOutputPath="$output_path" \ + /v:"$verbosity" \ + $properties +} + +configuration="Debug" +verbosity="minimal" +restore=false +help=false +properties='' + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --task) + task=$2 + shift 2 + ;; + --restore) + restore=true + shift 1 + ;; + --verbosity) + verbosity=$2 + shift 2 + ;; + --help) + help=true + shift 1 + ;; + *) + properties="$properties $1" + shift 1 + ;; + esac +done + +ci=true +binaryLog=true +warnAsError=true + +if $help; then + show_usage + exit 0 +fi + +. "$scriptroot/tools.sh" +InitializeToolset + +if [[ -z "$task" ]]; then + Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '" + ExitWithExitCode 1 +fi + +taskProject=$(GetSdkTaskProject "$task") +if [[ ! -e "$taskProject" ]]; then + Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task" + ExitWithExitCode 1 +fi + +if $restore; then + Build "Restore" +fi + +Build "Execute" + + +ExitWithExitCode 0 diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 22954477a..80f9130b1 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -42,7 +42,7 @@ [bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1 +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1 [string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -262,7 +262,7 @@ function GetDotNetInstallScript([string] $dotnetRoot) { if (!(Test-Path $installScript)) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit - $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" + $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1" Retry({ Write-Host "GET $uri" @@ -320,7 +320,7 @@ function InstallDotNet([string] $dotnetRoot, $variations += @($installParameters) $dotnetBuilds = $installParameters.Clone() - $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public" + $dotnetbuilds.AzureFeed = "https://ci.dot.net/public" $variations += @($dotnetBuilds) if ($runtimeSourceFeed) { @@ -383,8 +383,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.10.0-pre.4.0 - $defaultXCopyMSBuildVersion = '17.10.0-pre.4.0' + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.12.0 + $defaultXCopyMSBuildVersion = '17.12.0' if (!$vsRequirements) { if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { @@ -604,14 +604,7 @@ function InitializeBuildTool() { } $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') - # Use override if it exists - commonly set by source-build - if ($null -eq $env:_OverrideArcadeInitializeBuildToolFramework) { - $initializeBuildToolFramework="net9.0" - } else { - $initializeBuildToolFramework=$env:_OverrideArcadeInitializeBuildToolFramework - } - - $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = $initializeBuildToolFramework } + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' } } elseif ($msbuildEngine -eq "vs") { try { $msbuildPath = InitializeVisualStudioMSBuild -install:$restore @@ -620,7 +613,7 @@ function InitializeBuildTool() { ExitWithExitCode 1 } - $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS } + $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS } } else { Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." ExitWithExitCode 1 @@ -779,8 +772,10 @@ function MSBuild() { # new scripts need to work with old packages, so we need to look for the old names/versions (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')), (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), - (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.ArcadeLogging.dll')), - (Join-Path $basePath (Join-Path net7.0 'Microsoft.DotNet.Arcade.Sdk.dll')), + + # This list doesn't need to be updated anymore and can eventually be removed. + (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.Arcade.Sdk.dll')), (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')), (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll')) ) diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 00473c9f9..4a5fa9947 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true} use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} # Enable repos to use a particular version of the on-line dotnet-install scripts. -# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} # True to use global NuGet cache instead of restoring packages to repository-local directory. @@ -232,7 +232,7 @@ function InstallDotNet { local public_location=("${installParameters[@]}") variations+=(public_location) - local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public") + local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public") variations+=(dotnetbuilds) if [[ -n "${6:-}" ]]; then @@ -295,7 +295,7 @@ function with_retries { function GetDotNetInstallScript { local root=$1 local install_script="$root/dotnet-install.sh" - local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" + local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh" if [[ ! -a "$install_script" ]]; then mkdir -p "$root" @@ -339,12 +339,6 @@ function InitializeBuildTool { # return values _InitializeBuildTool="$_InitializeDotNetCli/dotnet" _InitializeBuildToolCommand="msbuild" - # use override if it exists - commonly set by source-build - if [[ "${_OverrideArcadeInitializeBuildToolFramework:-x}" == "x" ]]; then - _InitializeBuildToolFramework="net9.0" - else - _InitializeBuildToolFramework="${_OverrideArcadeInitializeBuildToolFramework}" - fi } # Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116 @@ -454,10 +448,12 @@ function MSBuild { # new scripts need to work with old packages, so we need to look for the old names/versions local selectedPath= local possiblePaths=() - possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" ) - possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/net7.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.Arcade.Sdk.dll" ) + + # This list doesn't need to be updated anymore and can eventually be removed. + possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.Arcade.Sdk.dll" ) possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" ) possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" ) for path in "${possiblePaths[@]}"; do @@ -532,6 +528,12 @@ function GetDarc { "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version } +# Returns a full path to an Arcade SDK task project file. +function GetSdkTaskProject { + taskName=$1 + echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj" +} + ResolvePath "${BASH_SOURCE[0]}" _script_dir=`dirname "$_ResolvePath"` diff --git a/global.json b/global.json index 8bf14690b..c82bd50a8 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.0-alpha.1.24414.6", + "dotnet": "10.0.100-alpha.1.25077.2", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24510.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25106.4", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From a36d55307dd47cbcd973fd60341f63e2e8335fad Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 17 Feb 2025 05:01:58 +0000 Subject: [PATCH 02/43] Update dependencies from https://github.com/dotnet/arcade build 20250213.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25113.2 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/steps/source-build.yml | 2 +- global.json | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 49b41b196..a06275364 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 91630b31ce859c28f637b62b566ea8829b982f2c + ea0a0a28cccd4b63a9ec40df53ef2df260ffa5b1 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index f9ba1625c..2341706b0 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -37,7 +37,7 @@ steps: # in the default public locations. internalRuntimeDownloadArgs= if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' fi buildConfig=Release diff --git a/global.json b/global.json index c82bd50a8..eaaec12a5 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25106.4", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25113.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 155acd6f0c3f3603706660180c85ec62cb63698d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 24 Feb 2025 05:01:47 +0000 Subject: [PATCH 03/43] Update dependencies from https://github.com/dotnet/arcade build 20250220.6 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25120.6 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/publish-build-assets.yml | 9 ++++----- eng/common/sdk-task.ps1 | 2 +- eng/common/tools.ps1 | 4 ++-- global.json | 4 ++-- 5 files changed, 11 insertions(+), 12 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index a06275364..eb236514c 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - ea0a0a28cccd4b63a9ec40df53ef2df260ffa5b1 + ecdb2f499cb5f5c99b58fba1a14fdf977c56e1ac https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 3d3356e31..c7d59dcbf 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -76,12 +76,11 @@ jobs: fetchDepth: 3 clean: true - - task: DownloadBuildArtifacts@0 - displayName: Download artifact + - task: DownloadPipelineArtifact@2 + displayName: Download Asset Manifests inputs: artifactName: AssetManifests - downloadPath: '$(Build.StagingDirectory)/Download' - checkDownloadedFiles: true + targetPath: '$(Build.StagingDirectory)/AssetManifests' condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} @@ -95,7 +94,7 @@ jobs: scriptLocation: scriptPath scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet - /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests' + /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' /p:MaestroApiEndpoint=https://maestro.dot.net /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} /p:OfficialBuildId=$(Build.BuildNumber) diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 4f0546dce..ab2b13f63 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -64,7 +64,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.12.0" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.13.0" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 80f9130b1..95ccdf82e 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -383,8 +383,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.12.0 - $defaultXCopyMSBuildVersion = '17.12.0' + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.13.0 + $defaultXCopyMSBuildVersion = '17.13.0' if (!$vsRequirements) { if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { diff --git a/global.json b/global.json index eaaec12a5..6b839a0cd 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-alpha.1.25077.2", + "dotnet": "10.0.100-preview.2.25118.3", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25113.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25120.6", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 4a287c1d2d970834f0ce365c54f22e941b3284b9 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Tue, 25 Feb 2025 20:54:35 +0000 Subject: [PATCH 04/43] Update dependencies from https://github.com/dotnet/arcade build 20250225.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25125.2 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/steps/generate-sbom.yml | 2 +- global.json | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index eb236514c..16bfe7c2a 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - ecdb2f499cb5f5c99b58fba1a14fdf977c56e1ac + 5b1e2d133be215e729cf90016ae12b520950ce7a https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml index d938b60e1..b8e6dea83 100644 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -5,7 +5,7 @@ # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. parameters: - PackageVersion: 9.0.0 + PackageVersion: 10.0.0 BuildDropPath: '$(Build.SourcesDirectory)/artifacts' PackageName: '.NET' ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom diff --git a/global.json b/global.json index 6b839a0cd..fe0d96487 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.2.25118.3", + "dotnet": "10.0.100-preview.3.25125.5", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25120.6", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25125.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 5596b5f25025c7efa6f8a46a75aa863192950d42 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 3 Mar 2025 05:01:46 +0000 Subject: [PATCH 05/43] Update dependencies from https://github.com/dotnet/arcade build 20250226.4 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25126.4 --- eng/Version.Details.xml | 4 ++-- .../core-templates/steps/generate-sbom.yml | 2 +- eng/common/generate-sbom-prep.ps1 | 20 +++++++++++++------ eng/common/generate-sbom-prep.sh | 17 ++++++++++------ eng/common/templates-official/job/job.yml | 1 + global.json | 2 +- 6 files changed, 30 insertions(+), 16 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 16bfe7c2a..e04345352 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 5b1e2d133be215e729cf90016ae12b520950ce7a + 6d4b01bce3a29c172faff5abc0bfe2ae3d1fef3d https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml index b8e6dea83..44a9636cd 100644 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -38,7 +38,7 @@ steps: PackageName: ${{ parameters.packageName }} BuildDropPath: ${{ parameters.buildDropPath }} PackageVersion: ${{ parameters.packageVersion }} - ManifestDirPath: ${{ parameters.manifestDirPath }} + ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME) ${{ if ne(parameters.IgnoreDirectories, '') }}: AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1 index 3e5c1c74a..a0c7d792a 100644 --- a/eng/common/generate-sbom-prep.ps1 +++ b/eng/common/generate-sbom-prep.ps1 @@ -4,18 +4,26 @@ Param( . $PSScriptRoot\pipeline-logging-functions.ps1 +# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly +# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. +$ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" +$SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_' +$SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName + +Write-Host "Artifact name before : $ArtifactName" +Write-Host "Artifact name after : $SafeArtifactName" + Write-Host "Creating dir $ManifestDirPath" + # create directory for sbom manifest to be placed -if (!(Test-Path -path $ManifestDirPath)) +if (!(Test-Path -path $SbomGenerationDir)) { - New-Item -ItemType Directory -path $ManifestDirPath - Write-Host "Successfully created directory $ManifestDirPath" + New-Item -ItemType Directory -path $SbomGenerationDir + Write-Host "Successfully created directory $SbomGenerationDir" } else{ Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." } Write-Host "Updating artifact name" -$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_' -Write-Host "Artifact name $artifact_name" -Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name" +Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName" diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh index d5c76dc82..b8ecca72b 100644 --- a/eng/common/generate-sbom-prep.sh +++ b/eng/common/generate-sbom-prep.sh @@ -14,19 +14,24 @@ done scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" . $scriptroot/pipeline-logging-functions.sh + +# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. +artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" +safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" manifest_dir=$1 -if [ ! -d "$manifest_dir" ] ; then - mkdir -p "$manifest_dir" - echo "Sbom directory created." $manifest_dir +# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly +# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. +sbom_generation_dir="$manifest_dir/$safe_artifact_name" + +if [ ! -d "$sbom_generation_dir" ] ; then + mkdir -p "$sbom_generation_dir" + echo "Sbom directory created." $sbom_generation_dir else Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." fi -artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" echo "Artifact name before : "$artifact_name -# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. -safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" echo "Artifact name after : "$safe_artifact_name export ARTIFACT_NAME=$safe_artifact_name echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml index 605692d2f..817555505 100644 --- a/eng/common/templates-official/job/job.yml +++ b/eng/common/templates-official/job/job.yml @@ -16,6 +16,7 @@ jobs: parameters: PackageVersion: ${{ parameters.packageVersion }} BuildDropPath: ${{ parameters.buildDropPath }} + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom publishArtifacts: false # publish artifacts diff --git a/global.json b/global.json index fe0d96487..0fd109a20 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25125.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25126.4", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 40a37f838c8d1f8a10d12a689d8338a180307976 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 10 Mar 2025 05:01:35 +0000 Subject: [PATCH 06/43] Update dependencies from https://github.com/dotnet/arcade build 20250307.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25157.1 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/job.yml | 1 - eng/common/core-templates/job/publish-build-assets.yml | 4 ---- eng/common/core-templates/jobs/codeql-build.yml | 1 - eng/common/core-templates/jobs/jobs.yml | 4 ---- eng/common/native/install-dependencies.sh | 5 +++-- global.json | 2 +- 7 files changed, 6 insertions(+), 15 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index e04345352..a8d0e09c0 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 6d4b01bce3a29c172faff5abc0bfe2ae3d1fef3d + 1ec6078c26e4a60b77d8fe64881491cd28335a08 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index 295c9a231..4dfb3d868 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -23,7 +23,6 @@ parameters: enablePublishBuildArtifacts: false enablePublishBuildAssets: false enablePublishTestResults: false - enablePublishUsingPipelines: false enableBuildRetry: false mergeTestResults: false testRunTitle: '' diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index c7d59dcbf..5da3c2a2a 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -20,9 +20,6 @@ parameters: # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. runAsPublic: false - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing - publishUsingPipelines: false - # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing publishAssetsImmediately: false @@ -96,7 +93,6 @@ jobs: arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' /p:MaestroApiEndpoint=https://maestro.dot.net - /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }} /p:OfficialBuildId=$(Build.BuildNumber) condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml index f2144252c..693b00b37 100644 --- a/eng/common/core-templates/jobs/codeql-build.yml +++ b/eng/common/core-templates/jobs/codeql-build.yml @@ -15,7 +15,6 @@ jobs: enablePublishBuildArtifacts: false enablePublishTestResults: false enablePublishBuildAssets: false - enablePublishUsingPipelines: false enableTelemetry: true variables: diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index ea69be434..3d2deaa4a 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -5,9 +5,6 @@ parameters: # Optional: Include PublishBuildArtifacts task enablePublishBuildArtifacts: false - # Optional: Enable publishing using release pipelines - enablePublishUsingPipelines: false - # Optional: Enable running the source-build jobs to build repo from source enableSourceBuild: false @@ -112,7 +109,6 @@ jobs: - Source_Build_Complete runAsPublic: ${{ parameters.runAsPublic }} - publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }} publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index ce661e9e5..13d849ae9 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -27,8 +27,9 @@ case "$os" in libssl-dev libkrb5-dev pigz cpio localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 - elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ]; then - dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio + elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then + pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" + $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio elif [ "$ID" = "alpine" ]; then apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio else diff --git a/global.json b/global.json index 0fd109a20..c560ef59a 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25126.4", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25157.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From e50b4352ff73f67853b8641492548d72b0ffdb5c Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 17 Mar 2025 05:02:31 +0000 Subject: [PATCH 07/43] Update dependencies from https://github.com/dotnet/arcade build 20250314.6 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25164.6 --- eng/Version.Details.xml | 4 +- eng/common/cross/armel/armel.jessie.patch | 43 --------------- eng/common/cross/build-rootfs.sh | 53 +++++-------------- eng/common/native/install-dependencies.sh | 2 +- eng/common/templates-official/job/job.yml | 4 +- .../steps/publish-build-artifacts.yml | 7 ++- eng/common/templates/job/job.yml | 4 +- .../steps/publish-build-artifacts.yml | 8 ++- eng/common/tools.ps1 | 2 +- eng/common/tools.sh | 2 +- global.json | 2 +- 11 files changed, 37 insertions(+), 94 deletions(-) delete mode 100644 eng/common/cross/armel/armel.jessie.patch diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index a8d0e09c0..0ebe23445 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 1ec6078c26e4a60b77d8fe64881491cd28335a08 + 1912d9f4fc410d421a01b5a09131aae234b603fa https://github.com/dotnet/wpf diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch deleted file mode 100644 index 2d2615619..000000000 --- a/eng/common/cross/armel/armel.jessie.patch +++ /dev/null @@ -1,43 +0,0 @@ -diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h ---- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700 -+++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700 -@@ -69,10 +69,10 @@ - #endif - #ifdef UATOMIC_HAS_ATOMIC_SHORT - case 2: -- return __sync_val_compare_and_swap_2(addr, old, _new); -+ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new); - #endif - case 4: -- return __sync_val_compare_and_swap_4(addr, old, _new); -+ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new); - #if (CAA_BITS_PER_LONG == 64) - case 8: - return __sync_val_compare_and_swap_8(addr, old, _new); -@@ -109,7 +109,7 @@ - return; - #endif - case 4: -- __sync_and_and_fetch_4(addr, val); -+ __sync_and_and_fetch_4((uint32_t*) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -@@ -148,7 +148,7 @@ - return; - #endif - case 4: -- __sync_or_and_fetch_4(addr, val); -+ __sync_or_and_fetch_4((uint32_t*) addr, val); - return; - #if (CAA_BITS_PER_LONG == 64) - case 8: -@@ -187,7 +187,7 @@ - return __sync_add_and_fetch_2(addr, val); - #endif - case 4: -- return __sync_add_and_fetch_4(addr, val); -+ return __sync_add_and_fetch_4((uint32_t*) addr, val); - #if (CAA_BITS_PER_LONG == 64) - case 8: - return __sync_add_and_fetch_8(addr, val); diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 74f399716..d6f005b5d 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -164,9 +164,13 @@ while :; do armel) __BuildArch=armel __UbuntuArch=armel - __UbuntuRepo="http://ftp.debian.org/debian/" - __CodeName=jessie + __UbuntuRepo="http://archive.debian.org/debian/" + __CodeName=buster __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + __LLDB_Package="liblldb-6.0-dev" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + __UbuntuSuites= ;; armv6) __BuildArch=armv6 @@ -278,46 +282,23 @@ while :; do ;; xenial) # Ubuntu 16.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=xenial - fi - ;; - zesty) # Ubuntu 17.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=zesty - fi + __CodeName=xenial ;; bionic) # Ubuntu 18.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=bionic - fi + __CodeName=bionic ;; focal) # Ubuntu 20.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=focal - fi + __CodeName=focal ;; jammy) # Ubuntu 22.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=jammy - fi + __CodeName=jammy ;; noble) # Ubuntu 24.04 - if [[ "$__CodeName" != "jessie" ]]; then - __CodeName=noble - fi + __CodeName=noble if [[ -n "$__LLDB_Package" ]]; then __LLDB_Package="liblldb-18-dev" fi ;; - jessie) # Debian 8 - __CodeName=jessie - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - - if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" - fi - ;; stretch) # Debian 9 __CodeName=stretch __LLDB_Package="liblldb-6.0-dev" @@ -333,7 +314,7 @@ while :; do __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + __UbuntuRepo="http://archive.debian.org/debian/" fi ;; bullseye) # Debian 11 @@ -473,10 +454,6 @@ if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" fi -if [[ "$__BuildArch" == "armel" ]]; then - __LLDB_Package="lldb-3.5-dev" -fi - __UbuntuPackages+=" ${__LLDB_Package:-}" if [[ -z "$__UbuntuRepo" ]]; then @@ -850,12 +827,6 @@ EOF if [[ "$__SkipUnmount" == "0" ]]; then umount "$__RootfsDir"/* || true fi - - if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then - pushd "$__RootfsDir" - patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch" - popd - fi elif [[ "$__Tizen" == "tizen" ]]; then ROOTFS_DIR="$__RootfsDir" "$__CrossDir/tizen-build-rootfs.sh" "$__BuildArch" else diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index 13d849ae9..477a44f33 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -45,7 +45,7 @@ case "$os" in export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 # brew update --preinstall - brew bundle --no-upgrade --no-lock --file=- < Date: Mon, 24 Mar 2025 05:02:30 +0000 Subject: [PATCH 08/43] Update dependencies from https://github.com/dotnet/arcade build 20250321.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25171.1 --- eng/Version.Details.xml | 4 +-- eng/common/CIBuild.cmd | 2 +- eng/common/build.ps1 | 7 ++++- eng/common/build.sh | 13 +++++++- eng/common/cibuild.sh | 2 +- .../steps/install-microbuild.yml | 30 +++++++++++++++++-- global.json | 4 +-- 7 files changed, 52 insertions(+), 10 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0ebe23445..91d268317 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 1912d9f4fc410d421a01b5a09131aae234b603fa + 235443a5c1136571cacdfd40576f263f26bf5b9b https://github.com/dotnet/wpf diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd index 56c2f25ac..ac1f72bf9 100644 --- a/eng/common/CIBuild.cmd +++ b/eng/common/CIBuild.cmd @@ -1,2 +1,2 @@ @echo off -powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" \ No newline at end of file +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index 438f9920c..d419c3a2e 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -7,6 +7,7 @@ Param( [string] $msbuildEngine = $null, [bool] $warnAsError = $true, [bool] $nodeReuse = $true, + [switch] $buildCheck = $false, [switch][Alias('r')]$restore, [switch] $deployDeps, [switch][Alias('b')]$build, @@ -71,6 +72,8 @@ function Print-Usage() { Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" + Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" + Write-Host " -buildCheck Sets /check msbuild parameter" Write-Host "" Write-Host "Command line arguments not listed above are passed thru to msbuild." @@ -97,6 +100,7 @@ function Build { $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } + $check = if ($buildCheck) { '/check' } else { '' } if ($projects) { # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. @@ -113,6 +117,7 @@ function Build { MSBuild $toolsetBuildProj ` $bl ` $platformArg ` + $check ` /p:Configuration=$configuration ` /p:RepoRoot=$RepoRoot ` /p:Restore=$restore ` @@ -166,4 +171,4 @@ catch { ExitWithExitCode 1 } -ExitWithExitCode 0 +ExitWithExitCode 0 \ No newline at end of file diff --git a/eng/common/build.sh b/eng/common/build.sh index 483647daf..e24bb68f4 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -42,6 +42,7 @@ usage() echo " --prepareMachine Prepare machine for CI run, clean up processes after build" echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" + echo " --buildCheck Sets /check msbuild parameter" echo "" echo "Command line arguments not listed above are passed thru to msbuild." echo "Arguments can also be passed in with a single hyphen." @@ -76,6 +77,7 @@ clean=false warn_as_error=true node_reuse=true +build_check=false binary_log=false exclude_ci_binary_log=false pipelines_log=false @@ -173,6 +175,9 @@ while [[ $# > 0 ]]; do node_reuse=$2 shift ;; + -buildcheck) + build_check=true + ;; -runtimesourcefeed) runtime_source_feed=$2 shift @@ -224,8 +229,14 @@ function Build { bl="/bl:\"$log_dir/Build.binlog\"" fi + local check="" + if [[ "$build_check" == true ]]; then + check="/check" + fi + MSBuild $_InitializeToolset \ $bl \ + $check \ /p:Configuration=$configuration \ /p:RepoRoot="$repo_root" \ /p:Restore=$restore \ @@ -256,4 +267,4 @@ if [[ "$restore" == true ]]; then InitializeNativeTools fi -Build +Build \ No newline at end of file diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh index 1a02c0dec..66e3b0ac6 100644 --- a/eng/common/cibuild.sh +++ b/eng/common/cibuild.sh @@ -13,4 +13,4 @@ while [[ -h $source ]]; do done scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" -. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ \ No newline at end of file +. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index 2a6a52948..dba506e74 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -5,7 +5,7 @@ parameters: # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' enableMicrobuildForMacAndLinux: false # Location of the MicroBuild output folder - microBuildOutputFolder: '$(Agent.TempDirectory)' + microBuildOutputFolder: '$(Build.SourcesDirectory)' continueOnError: false steps: @@ -41,7 +41,7 @@ steps: inputs: packageType: sdk version: 8.0.x - installationPath: ${{ parameters.microBuildOutputFolder }}/dotnet + installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet workingDirectory: ${{ parameters.microBuildOutputFolder }} condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) @@ -53,6 +53,7 @@ steps: feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: azureSubscription: 'MicroBuild Signing Task (DevDiv)' + useEsrpCli: true env: TeamName: $(_TeamName) MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} @@ -71,3 +72,28 @@ steps: eq(variables['_SignType'], 'real') ) )) + + # Workaround for ESRP CLI on Linux - https://github.com/dotnet/source-build/issues/4964 + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: + - task: UseDotNet@2 + displayName: Install .NET 9.0 SDK for ESRP CLI Workaround + inputs: + packageType: sdk + version: 9.0.x + installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet + workingDirectory: ${{ parameters.microBuildOutputFolder }} + condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) + + - task: PowerShell@2 + displayName: Workaround for ESRP CLI on Linux + inputs: + targetType: 'inline' + script: | + Write-Host "Copying Linux Path" + $MBSIGN_APPFOLDER = '$(MBSIGN_APPFOLDER)' + $MBSIGN_APPFOLDER = $MBSIGN_APPFOLDER -replace '/build', '' + $MBSIGN_APPFOLDER = $MBSIGN_APPFOLDER + '/1.1.1032' + '/build' + $MBSIGN_APPFOLDER | Write-Host + $SignConfigPath = $MBSIGN_APPFOLDER + '/signconfig.xml' + Copy-Item -Path "$(MBSIGN_APPFOLDER)/signconfig.xml" -Destination $SignConfigPath -Force + condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) diff --git a/global.json b/global.json index 56d029c34..cc0c9b258 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.3.25125.5", + "dotnet": "10.0.100-preview.3.25167.3", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25164.6", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25171.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From cfcdf259ec10bc431c60f6b7938aa4d804d81971 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 31 Mar 2025 05:02:12 +0000 Subject: [PATCH 09/43] Update dependencies from https://github.com/dotnet/arcade build 20250328.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25178.2 --- eng/Version.Details.xml | 4 ++-- eng/common/build.ps1 | 3 ++- eng/common/build.sh | 3 ++- global.json | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 91d268317..ac5caa525 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 235443a5c1136571cacdfd40576f263f26bf5b9b + 7d1967403f5b12406763c666f41e3358bb542ced https://github.com/dotnet/wpf diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index d419c3a2e..6b3be1916 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -132,6 +132,7 @@ function Build { /p:PerformanceTest=$performanceTest ` /p:Sign=$sign ` /p:Publish=$publish ` + /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog ` @properties } @@ -171,4 +172,4 @@ catch { ExitWithExitCode 1 } -ExitWithExitCode 0 \ No newline at end of file +ExitWithExitCode 0 diff --git a/eng/common/build.sh b/eng/common/build.sh index e24bb68f4..36fba82a3 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -250,6 +250,7 @@ function Build { /p:PerformanceTest=$performance_test \ /p:Sign=$sign \ /p:Publish=$publish \ + /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \ $properties ExitWithExitCode 0 @@ -267,4 +268,4 @@ if [[ "$restore" == true ]]; then InitializeNativeTools fi -Build \ No newline at end of file +Build diff --git a/global.json b/global.json index cc0c9b258..fd8206028 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25171.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25178.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From da502c8883b31d77415dd4dad9d73f32b3eb23e4 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 7 Apr 2025 05:01:53 +0000 Subject: [PATCH 10/43] Update dependencies from https://github.com/dotnet/arcade build 20250406.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25206.1 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/job.yml | 13 ------------- global.json | 2 +- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ac5caa525..ac55f6127 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 7d1967403f5b12406763c666f41e3358bb542ced + 37f732fbfa006386f89a16be417278ea4fee375e https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index 4dfb3d868..6badecba7 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -73,9 +73,6 @@ jobs: - ${{ if ne(parameters.enableTelemetry, 'false') }}: - name: DOTNET_CLI_TELEMETRY_PROFILE value: '$(Build.Repository.Uri)' - - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}: - - name: EnableRichCodeNavigation - value: 'true' # Retry signature validation up to three times, waiting 2 seconds between attempts. # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY @@ -147,16 +144,6 @@ jobs: - ${{ each step in parameters.steps }}: - ${{ step }} - - ${{ if eq(parameters.enableRichCodeNavigation, true) }}: - - task: RichCodeNavIndexer@0 - displayName: RichCodeNav Upload - inputs: - languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }} - environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'internal') }} - richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin - uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }} - continueOnError: true - - ${{ each step in parameters.componentGovernanceSteps }}: - ${{ step }} diff --git a/global.json b/global.json index fd8206028..1b6cd781e 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25178.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25206.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 6380dc4936390dcc9e3d42644979b69fb14551e9 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 14 Apr 2025 05:01:35 +0000 Subject: [PATCH 11/43] Update dependencies from https://github.com/dotnet/arcade build 20250412.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25212.1 --- eng/Version.Details.xml | 4 ++-- .../job/publish-build-assets.yml | 24 ++++++++++++------- .../core-templates/post-build/post-build.yml | 6 +++++ .../steps/install-microbuild.yml | 14 +++++++++-- eng/common/post-build/publish-using-darc.ps1 | 7 +++++- eng/common/sdl/packages.config | 2 +- global.json | 4 ++-- 7 files changed, 44 insertions(+), 17 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ac55f6127..c3cc615bd 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 37f732fbfa006386f89a16be417278ea4fee375e + 87401be5731aa537bbf4cb71d7800d1c74d5e429 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 5da3c2a2a..4f1dc42e0 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -29,6 +29,9 @@ parameters: is1ESPipeline: '' + # Optional: 🌤️ or not the build has assets it wants to publish to BAR + isAssetlessBuild: false + jobs: - job: Asset_Registry_Publish @@ -72,14 +75,15 @@ jobs: - checkout: self fetchDepth: 3 clean: true - - - task: DownloadPipelineArtifact@2 - displayName: Download Asset Manifests - inputs: - artifactName: AssetManifests - targetPath: '$(Build.StagingDirectory)/AssetManifests' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} + + - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Asset Manifests + inputs: + artifactName: AssetManifests + targetPath: '$(Build.StagingDirectory)/AssetManifests' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} - task: NuGetAuthenticate@1 @@ -92,6 +96,7 @@ jobs: scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' + /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} /p:MaestroApiEndpoint=https://maestro.dot.net /p:OfficialBuildId=$(Build.BuildNumber) condition: ${{ parameters.condition }} @@ -124,7 +129,7 @@ jobs: publishLocation: Container artifactName: ReleaseConfigs - - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}: - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml parameters: BARBuildId: ${{ parameters.BARBuildId }} @@ -145,6 +150,7 @@ jobs: -WaitPublishingFinish true -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/core-templates/steps/publish-logs.yml diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index a8c0bd3b9..5757915ed 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -60,6 +60,11 @@ parameters: artifactNames: '' downloadArtifacts: true + - name: isAssetlessBuild + type: boolean + displayName: Is Assetless Build + default: false + # These parameters let the user customize the call to sdk-task.ps1 for publishing # symbols & general artifacts as well as for signing validation - name: symbolPublishingAdditionalParameters @@ -320,3 +325,4 @@ stages: -RequireDefaultChannels ${{ parameters.requireDefaultChannels }} -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index dba506e74..d0422085c 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -91,8 +91,18 @@ steps: script: | Write-Host "Copying Linux Path" $MBSIGN_APPFOLDER = '$(MBSIGN_APPFOLDER)' - $MBSIGN_APPFOLDER = $MBSIGN_APPFOLDER -replace '/build', '' - $MBSIGN_APPFOLDER = $MBSIGN_APPFOLDER + '/1.1.1032' + '/build' + $MBSIGN_APPFOLDER = ($MBSIGN_APPFOLDER -replace '/build', '') + + $versionRegex = '\d+\.\d+\.\d+' + $package = Get-ChildItem -Path $MBSIGN_APPFOLDER -Directory | + Where-Object { $_.Name -match $versionRegex } + + if ($package.Count -ne 1) { + Write-Host "There should be exactly one matching subfolder, but found $($package.Count)." + exit 1 + } + + $MBSIGN_APPFOLDER = $package[0].FullName + '/build' $MBSIGN_APPFOLDER | Write-Host $SignConfigPath = $MBSIGN_APPFOLDER + '/signconfig.xml' Copy-Item -Path "$(MBSIGN_APPFOLDER)/signconfig.xml" -Destination $SignConfigPath -Force diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index a261517ef..1eda208a3 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -6,7 +6,8 @@ param( [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters, - [Parameter(Mandatory=$false)][string] $RequireDefaultChannels + [Parameter(Mandatory=$false)][string] $RequireDefaultChannels, + [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing ) try { @@ -39,6 +40,10 @@ try { $optionalParams.Add("--default-channels-required") | Out-Null } + if ("true" -eq $SkipAssetsPublishing) { + $optionalParams.Add("--skip-assets-publishing") | Out-Null + } + & $darc add-build-to-channel ` --id $buildId ` --publishing-infra-version $PublishingInfraVersion ` diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config index 4585cfd6b..e5f543ea6 100644 --- a/eng/common/sdl/packages.config +++ b/eng/common/sdl/packages.config @@ -1,4 +1,4 @@ - + diff --git a/global.json b/global.json index 1b6cd781e..d0404bcb8 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.3.25167.3", + "dotnet": "10.0.100-preview.3.25201.16", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25206.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25212.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 898ba161ca36138e001d305c1d1d652d71ca9094 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Thu, 17 Apr 2025 12:02:03 +0000 Subject: [PATCH 12/43] Update dependencies from https://github.com/dotnet/arcade build 20250416.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25216.2 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/jobs/jobs.yml | 4 ++++ .../steps/install-microbuild.yml | 24 ------------------- eng/common/sdk-task.ps1 | 7 ++++-- eng/common/sdk-task.sh | 16 +++++++++++-- global.json | 2 +- 6 files changed, 26 insertions(+), 31 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index c3cc615bd..bc8be60c4 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 87401be5731aa537bbf4cb71d7800d1c74d5e429 + 85aaca76f75b109aa45b9e949f4a1c9047725d09 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index 3d2deaa4a..e653d1997 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -27,6 +27,9 @@ parameters: # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage. publishAssetsImmediately: false + # Optional: 🌤️ or not the build has assets it wants to publish to BAR + isAssetlessBuild: false + # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml) artifactsPublishingAdditionalParameters: '' signingValidationAdditionalParameters: '' @@ -110,6 +113,7 @@ jobs: runAsPublic: ${{ parameters.runAsPublic }} publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + isAssetlessBuild: ${{ parameters.isAssetlessBuild }} enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index d0422085c..2bcf974ee 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -11,30 +11,6 @@ parameters: steps: - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - # Install Python 3.12.x on when Python > 3.12.x is installed - https://github.com/dotnet/source-build/issues/4802 - - script: | - version=$(python3 --version | awk '{print $2}') - major=$(echo $version | cut -d. -f1) - minor=$(echo $version | cut -d. -f2) - - installPython=false - if [ "$major" -gt 3 ] || { [ "$major" -eq 3 ] && [ "$minor" -gt 12 ]; }; then - installPython=true - fi - - echo "Python version: $version." - echo "Install Python 3.12.x: $installPython." - echo "##vso[task.setvariable variable=installPython;isOutput=true]$installPython" - name: InstallPython - displayName: 'Determine Python installation' - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12.x' - displayName: 'Use Python 3.12.x' - condition: and(succeeded(), eq(variables['InstallPython.installPython'], 'true'), ne(variables['Agent.Os'], 'Windows_NT')) - # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable - task: UseDotNet@2 displayName: Install .NET 8.0 SDK for MicroBuild Plugin diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index ab2b13f63..a9d2a2d26 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -6,12 +6,13 @@ Param( [string] $msbuildEngine = $null, [switch] $restore, [switch] $prepareMachine, + [switch][Alias('nobl')]$excludeCIBinaryLog, [switch] $help, [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) $ci = $true -$binaryLog = $true +$binaryLog = if ($excludeCIBinaryLog) { $false } else { $true } $warnAsError = $true . $PSScriptRoot\tools.ps1 @@ -27,6 +28,7 @@ function Print-Usage() { Write-Host "Advanced settings:" Write-Host " -prepareMachine Prepare machine for CI run" Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)" Write-Host "" Write-Host "Command line arguments not listed above are passed thru to msbuild." } @@ -34,10 +36,11 @@ function Print-Usage() { function Build([string]$target) { $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } $log = Join-Path $LogDir "$task$logSuffix.binlog" + $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" } $outputPath = Join-Path $ToolsetDir "$task\" MSBuild $taskProject ` - /bl:$log ` + $binaryLogArg ` /t:$target ` /p:Configuration=$configuration ` /p:RepoRoot=$RepoRoot ` diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh index b9b9e58db..2f83adc02 100644 --- a/eng/common/sdk-task.sh +++ b/eng/common/sdk-task.sh @@ -7,6 +7,10 @@ show_usage() { echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" echo " --help Print help and exit" echo "" + + echo "Advanced settings:" + echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" + echo "" echo "Command line arguments not listed above are passed thru to msbuild." } @@ -27,10 +31,12 @@ Build() { local log_suffix="" [[ "$target" != "Execute" ]] && log_suffix=".$target" local log="$log_dir/$task$log_suffix.binlog" + local binaryLogArg="" + [[ $binary_log == true ]] && binaryLogArg="/bl:$log" local output_path="$toolset_dir/$task/" MSBuild "$taskProject" \ - /bl:"$log" \ + $binaryLogArg \ /t:"$target" \ /p:Configuration="$configuration" \ /p:RepoRoot="$repo_root" \ @@ -39,8 +45,10 @@ Build() { $properties } +binary_log=true configuration="Debug" verbosity="minimal" +exclude_ci_binary_log=false restore=false help=false properties='' @@ -60,6 +68,11 @@ while (($# > 0)); do verbosity=$2 shift 2 ;; + --excludecibinarylog|--nobl) + binary_log=false + exclude_ci_binary_log=true + shift 1 + ;; --help) help=true shift 1 @@ -72,7 +85,6 @@ while (($# > 0)); do done ci=true -binaryLog=true warnAsError=true if $help; then diff --git a/global.json b/global.json index d0404bcb8..ad1ff1a38 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25212.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25216.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 14eec39e542035088dbb15f952ad685c165ce16f Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 21 Apr 2025 05:02:15 +0000 Subject: [PATCH 13/43] Update dependencies from https://github.com/dotnet/arcade build 20250417.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25217.1 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index bc8be60c4..8a58a5f6f 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 85aaca76f75b109aa45b9e949f4a1c9047725d09 + 76dd1b4eb3b15881da350de93805ea6ab936364c https://github.com/dotnet/wpf diff --git a/global.json b/global.json index ad1ff1a38..4f402a9b6 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25216.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25217.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From bbfa125d7cd6b9be314a632c649423418391a42e Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 28 Apr 2025 05:17:09 +0000 Subject: [PATCH 14/43] Update dependencies from https://github.com/dotnet/arcade build 20250425.4 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25225.4 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/source-build.yml | 2 ++ eng/common/core-templates/jobs/jobs.yml | 4 ++-- eng/common/core-templates/steps/source-build.yml | 1 + .../core-templates/steps/source-index-stage1-publish.yml | 4 ++-- eng/common/cross/arm64/tizen/tizen.patch | 2 +- eng/common/cross/tizen-fetch.sh | 9 ++------- global.json | 2 +- 8 files changed, 13 insertions(+), 15 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 8a58a5f6f..0c1f18cd9 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 76dd1b4eb3b15881da350de93805ea6ab936364c + 5fb72aaffeff9c6f2ce46d3b226a84772fb72f55 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml index 05f7ad6ef..d805d5fae 100644 --- a/eng/common/core-templates/job/source-build.yml +++ b/eng/common/core-templates/job/source-build.yml @@ -27,6 +27,8 @@ parameters: # Specifies the build script to invoke to perform the build in the repo. The default # './build.sh' should work for typical Arcade repositories, but this is customizable for # difficult situations. + # buildArguments: '' + # Specifies additional build arguments to pass to the build script. # jobProperties: {} # A list of job properties to inject at the top level, for potential extensibility beyond # container and pool. diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index e653d1997..bf35b78fa 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -96,7 +96,7 @@ jobs: ${{ parameter.key }}: ${{ parameter.value }} - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}: + - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, ''), eq(parameters.isAssetlessBuild, true)) }}: - template: ../job/publish-build-assets.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} @@ -112,7 +112,7 @@ jobs: - Source_Build_Complete runAsPublic: ${{ parameters.runAsPublic }} - publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }} + publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }} isAssetlessBuild: ${{ parameters.isAssetlessBuild }} enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index 2341706b0..8c88ccd7b 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -84,6 +84,7 @@ steps: ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ --configuration $buildConfig \ --restore --build --pack $publishArgs -bl \ + ${{ parameters.platform.buildArguments }} \ $officialBuildArgs \ $internalRuntimeDownloadArgs \ $internalRestoreArgs \ diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index 473a22c47..99c2326fc 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -1,6 +1,6 @@ parameters: - sourceIndexUploadPackageVersion: 2.0.0-20240522.1 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20240522.1 + sourceIndexUploadPackageVersion: 2.0.0-20250425.2 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20250425.2 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json binlogPath: artifacts/log/Debug/Build.binlog diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch index af7c8be05..2cebc5473 100644 --- a/eng/common/cross/arm64/tizen/tizen.patch +++ b/eng/common/cross/arm64/tizen/tizen.patch @@ -5,5 +5,5 @@ diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so Use the shared library, but some functions are only in the static library, so try that secondarily. */ OUTPUT_FORMAT(elf64-littleaarch64) --GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) ) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) ) +GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) diff --git a/eng/common/cross/tizen-fetch.sh b/eng/common/cross/tizen-fetch.sh index 28936ceef..37c3a61f1 100644 --- a/eng/common/cross/tizen-fetch.sh +++ b/eng/common/cross/tizen-fetch.sh @@ -156,13 +156,8 @@ fetch_tizen_pkgs() done } -if [ "$TIZEN_ARCH" == "riscv64" ]; then - BASE="Tizen-Base-RISCV" - UNIFIED="Tizen-Unified-RISCV" -else - BASE="Tizen-Base" - UNIFIED="Tizen-Unified" -fi +BASE="Tizen-Base" +UNIFIED="Tizen-Unified" Inform "Initialize ${TIZEN_ARCH} base" fetch_tizen_pkgs_init standard $BASE diff --git a/global.json b/global.json index 4f402a9b6..c1090f44d 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25217.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25225.4", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From e4960e616f174be015df47c4f8e5e37b6c0d2314 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 5 May 2025 05:02:02 +0000 Subject: [PATCH 15/43] Update dependencies from https://github.com/dotnet/arcade build 20250503.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25253.1 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/jobs/source-build.yml | 2 +- eng/common/core-templates/steps/source-build.yml | 11 ----------- global.json | 2 +- 4 files changed, 4 insertions(+), 15 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0c1f18cd9..fad400a94 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 5fb72aaffeff9c6f2ce46d3b226a84772fb72f55 + cdf9c563205c673b7df205e24564aa48dbc341c3 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml index a10ccfbee..df24c948b 100644 --- a/eng/common/core-templates/jobs/source-build.yml +++ b/eng/common/core-templates/jobs/source-build.yml @@ -14,7 +14,7 @@ parameters: # This is the default platform provided by Arcade, intended for use by a managed-only repo. defaultManagedPlatform: name: 'Managed' - container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9' + container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream-10-amd64' # Defines the platforms on which to run build jobs. One job is created for each platform, and the # object in this array is sent to the job template as 'platform'. If no platforms are specified, diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index 8c88ccd7b..c6b9ef51a 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -121,14 +121,3 @@ steps: continueOnError: true condition: succeededOrFailed() sbomEnabled: false # we don't need SBOM for logs - -# Manually inject component detection so that we can ignore the source build upstream cache, which contains -# a nupkg cache of input packages (a local feed). -# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir' -# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets -- template: /eng/common/core-templates/steps/component-governance.yml - parameters: - displayName: Component Detection (Exclude upstream cache) - is1ESPipeline: ${{ parameters.is1ESPipeline }} - componentGovernanceIgnoreDirectories: '$(Build.SourcesDirectory)/artifacts/sb/src/artifacts/obj/source-built-upstream-cache' - disableComponentGovernance: ${{ eq(variables['System.TeamProject'], 'public') }} diff --git a/global.json b/global.json index c1090f44d..b0352909e 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25225.4", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25253.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From b3a8fd83eb2f0192d1862bcaa4ce9d7d26f17aaa Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 12 May 2025 05:02:06 +0000 Subject: [PATCH 16/43] Update dependencies from https://github.com/dotnet/arcade build 20250509.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25259.2 --- eng/Version.Details.xml | 4 +- eng/common/build.sh | 2 +- .../core-templates/steps/source-build.yml | 49 +- eng/common/darc-init.sh | 2 +- global.json | 2 +- .../build-configuration.json | 4 + src/arcade/eng/common/CIBuild.cmd | 2 + .../eng/common/PSScriptAnalyzerSettings.psd1 | 11 + src/arcade/eng/common/README.md | 28 + src/arcade/eng/common/SetupNugetSources.ps1 | 171 ++++ src/arcade/eng/common/SetupNugetSources.sh | 167 +++ src/arcade/eng/common/build.cmd | 3 + src/arcade/eng/common/build.ps1 | 175 ++++ src/arcade/eng/common/cibuild.sh | 16 + .../eng/common/core-templates/job/job.yml | 225 ++++ .../common/core-templates/job/onelocbuild.yml | 121 +++ .../job/publish-build-assets.yml | 159 +++ .../job/source-index-stage1.yml | 44 + .../core-templates/jobs/codeql-build.yml | 32 + .../post-build/common-variables.yml | 22 + .../core-templates/post-build/post-build.yml | 328 ++++++ .../post-build/setup-maestro-vars.yml | 74 ++ .../steps/cleanup-microbuild.yml | 28 + .../steps/component-governance.yml | 16 + .../steps/enable-internal-runtimes.yml | 32 + .../steps/enable-internal-sources.yml | 47 + .../core-templates/steps/generate-sbom.yml | 54 + .../steps/get-delegation-sas.yml | 46 + .../steps/get-federated-access-token.yml | 42 + .../steps/install-microbuild.yml | 85 ++ .../steps/publish-build-artifacts.yml | 20 + .../core-templates/steps/publish-logs.yml | 61 ++ .../steps/publish-pipeline-artifacts.yml | 20 + .../core-templates/steps/retain-build.yml | 28 + .../core-templates/steps/send-to-helix.yml | 93 ++ .../variables/pool-providers.yml | 8 + .../eng/common/cross/arm/tizen/tizen.patch | 9 + .../eng/common/cross/arm64/tizen/tizen.patch | 9 + .../eng/common/cross/armel/tizen/tizen.patch | 9 + .../eng/common/cross/build-android-rootfs.sh | 146 +++ src/arcade/eng/common/cross/build-rootfs.sh | 835 +++++++++++++++ src/arcade/eng/common/cross/install-debs.py | 334 ++++++ .../common/cross/riscv64/tizen/tizen.patch | 9 + .../eng/common/cross/tizen-build-rootfs.sh | 82 ++ src/arcade/eng/common/cross/tizen-fetch.sh | 178 ++++ src/arcade/eng/common/cross/toolchain.cmake | 387 +++++++ src/arcade/eng/common/darc-init.ps1 | 47 + src/arcade/eng/common/dotnet-install.cmd | 2 + src/arcade/eng/common/dotnet-install.ps1 | 28 + src/arcade/eng/common/dotnet-install.sh | 94 ++ .../common/enable-cross-org-publishing.ps1 | 13 + src/arcade/eng/common/generate-locproject.ps1 | 189 ++++ src/arcade/eng/common/generate-sbom-prep.ps1 | 29 + src/arcade/eng/common/generate-sbom-prep.sh | 39 + src/arcade/eng/common/helixpublish.proj | 27 + src/arcade/eng/common/init-tools-native.cmd | 3 + src/arcade/eng/common/init-tools-native.ps1 | 203 ++++ src/arcade/eng/common/init-tools-native.sh | 238 +++++ .../eng/common/internal-feed-operations.ps1 | 132 +++ .../eng/common/internal-feed-operations.sh | 141 +++ .../eng/common/internal/Directory.Build.props | 11 + src/arcade/eng/common/internal/NuGet.config | 7 + src/arcade/eng/common/internal/Tools.csproj | 22 + .../common/loc/P22DotNetHtmlLocalization.lss | 29 + src/arcade/eng/common/msbuild.ps1 | 28 + src/arcade/eng/common/msbuild.sh | 58 ++ .../eng/common/native/CommonLibrary.psm1 | 401 ++++++++ .../eng/common/native/common-library.sh | 172 ++++ src/arcade/eng/common/native/init-compiler.sh | 146 +++ .../eng/common/native/init-distro-rid.sh | 110 ++ .../eng/common/native/init-os-and-arch.sh | 85 ++ .../eng/common/native/install-cmake-test.sh | 117 +++ src/arcade/eng/common/native/install-cmake.sh | 117 +++ .../eng/common/native/install-dependencies.sh | 62 ++ src/arcade/eng/common/native/install-tool.ps1 | 132 +++ .../eng/common/pipeline-logging-functions.ps1 | 260 +++++ .../eng/common/pipeline-logging-functions.sh | 206 ++++ .../post-build/check-channel-consistency.ps1 | 48 + .../common/post-build/nuget-validation.ps1 | 22 + .../common/post-build/nuget-verification.ps1 | 121 +++ .../common/post-build/publish-using-darc.ps1 | 69 ++ .../eng/common/post-build/redact-logs.ps1 | 89 ++ .../post-build/sourcelink-validation.ps1 | 327 ++++++ .../common/post-build/symbols-validation.ps1 | 337 ++++++ src/arcade/eng/common/retain-build.ps1 | 45 + src/arcade/eng/common/sdk-task.ps1 | 100 ++ src/arcade/eng/common/sdk-task.sh | 116 +++ src/arcade/eng/common/sdl/NuGet.config | 18 + .../eng/common/sdl/configure-sdl-tool.ps1 | 130 +++ .../eng/common/sdl/execute-all-sdl-tools.ps1 | 167 +++ .../common/sdl/extract-artifact-archives.ps1 | 63 ++ .../common/sdl/extract-artifact-packages.ps1 | 82 ++ src/arcade/eng/common/sdl/init-sdl.ps1 | 55 + src/arcade/eng/common/sdl/packages.config | 4 + src/arcade/eng/common/sdl/run-sdl.ps1 | 49 + src/arcade/eng/common/sdl/sdl.ps1 | 38 + .../eng/common/sdl/trim-assets-version.ps1 | 75 ++ src/arcade/eng/common/template-guidance.md | 133 +++ .../eng/common/templates-official/job/job.yml | 83 ++ .../templates-official/job/onelocbuild.yml | 7 + .../job/publish-build-assets.yml | 7 + .../templates-official/job/source-build.yml | 7 + .../job/source-index-stage1.yml | 7 + .../templates-official/jobs/codeql-build.yml | 7 + .../common/templates-official/jobs/jobs.yml | 7 + .../templates-official/jobs/source-build.yml | 7 + .../post-build/common-variables.yml | 8 + .../post-build/post-build.yml | 8 + .../post-build/setup-maestro-vars.yml | 8 + .../steps/component-governance.yml | 7 + .../steps/enable-internal-runtimes.yml | 9 + .../steps/enable-internal-sources.yml | 7 + .../steps/generate-sbom.yml | 7 + .../steps/get-delegation-sas.yml | 7 + .../steps/get-federated-access-token.yml | 7 + .../steps/publish-build-artifacts.yml | 46 + .../templates-official/steps/publish-logs.yml | 7 + .../steps/publish-pipeline-artifacts.yml | 28 + .../templates-official/steps/retain-build.yml | 7 + .../steps/send-to-helix.yml | 7 + .../templates-official/steps/source-build.yml | 7 + .../steps/source-index-stage1-publish.yml | 7 + .../variables/pool-providers.yml | 45 + .../variables/sdl-variables.yml | 7 + src/arcade/eng/common/templates/job/job.yml | 84 ++ .../eng/common/templates/job/onelocbuild.yml | 7 + .../templates/job/publish-build-assets.yml | 7 + .../eng/common/templates/job/source-build.yml | 7 + .../templates/job/source-index-stage1.yml | 7 + .../common/templates/jobs/codeql-build.yml | 7 + src/arcade/eng/common/templates/jobs/jobs.yml | 7 + .../common/templates/jobs/source-build.yml | 7 + .../templates/post-build/common-variables.yml | 8 + .../templates/post-build/post-build.yml | 8 + .../post-build/setup-maestro-vars.yml | 8 + .../templates/steps/component-governance.yml | 7 + .../steps/enable-internal-runtimes.yml | 10 + .../steps/enable-internal-sources.yml | 7 + .../common/templates/steps/generate-sbom.yml | 7 + .../templates/steps/get-delegation-sas.yml | 7 + .../steps/get-federated-access-token.yml | 7 + .../steps/publish-build-artifacts.yml | 46 + .../common/templates/steps/publish-logs.yml | 7 + .../steps/publish-pipeline-artifacts.yml | 34 + .../common/templates/steps/retain-build.yml | 7 + .../common/templates/steps/send-to-helix.yml | 7 + .../common/templates/steps/source-build.yml | 7 + .../steps/source-index-stage1-publish.yml | 7 + .../templates/variables/pool-providers.yml | 59 ++ src/arcade/eng/common/tools.ps1 | 960 ++++++++++++++++++ src/arcade/eng/common/tools.sh | 591 +++++++++++ 151 files changed, 11691 insertions(+), 52 deletions(-) create mode 100644 src/arcade/eng/common/BuildConfiguration/build-configuration.json create mode 100644 src/arcade/eng/common/CIBuild.cmd create mode 100644 src/arcade/eng/common/PSScriptAnalyzerSettings.psd1 create mode 100644 src/arcade/eng/common/README.md create mode 100644 src/arcade/eng/common/SetupNugetSources.ps1 create mode 100644 src/arcade/eng/common/SetupNugetSources.sh create mode 100644 src/arcade/eng/common/build.cmd create mode 100644 src/arcade/eng/common/build.ps1 create mode 100644 src/arcade/eng/common/cibuild.sh create mode 100644 src/arcade/eng/common/core-templates/job/job.yml create mode 100644 src/arcade/eng/common/core-templates/job/onelocbuild.yml create mode 100644 src/arcade/eng/common/core-templates/job/publish-build-assets.yml create mode 100644 src/arcade/eng/common/core-templates/job/source-index-stage1.yml create mode 100644 src/arcade/eng/common/core-templates/jobs/codeql-build.yml create mode 100644 src/arcade/eng/common/core-templates/post-build/common-variables.yml create mode 100644 src/arcade/eng/common/core-templates/post-build/post-build.yml create mode 100644 src/arcade/eng/common/core-templates/post-build/setup-maestro-vars.yml create mode 100644 src/arcade/eng/common/core-templates/steps/cleanup-microbuild.yml create mode 100644 src/arcade/eng/common/core-templates/steps/component-governance.yml create mode 100644 src/arcade/eng/common/core-templates/steps/enable-internal-runtimes.yml create mode 100644 src/arcade/eng/common/core-templates/steps/enable-internal-sources.yml create mode 100644 src/arcade/eng/common/core-templates/steps/generate-sbom.yml create mode 100644 src/arcade/eng/common/core-templates/steps/get-delegation-sas.yml create mode 100644 src/arcade/eng/common/core-templates/steps/get-federated-access-token.yml create mode 100644 src/arcade/eng/common/core-templates/steps/install-microbuild.yml create mode 100644 src/arcade/eng/common/core-templates/steps/publish-build-artifacts.yml create mode 100644 src/arcade/eng/common/core-templates/steps/publish-logs.yml create mode 100644 src/arcade/eng/common/core-templates/steps/publish-pipeline-artifacts.yml create mode 100644 src/arcade/eng/common/core-templates/steps/retain-build.yml create mode 100644 src/arcade/eng/common/core-templates/steps/send-to-helix.yml create mode 100644 src/arcade/eng/common/core-templates/variables/pool-providers.yml create mode 100644 src/arcade/eng/common/cross/arm/tizen/tizen.patch create mode 100644 src/arcade/eng/common/cross/arm64/tizen/tizen.patch create mode 100644 src/arcade/eng/common/cross/armel/tizen/tizen.patch create mode 100644 src/arcade/eng/common/cross/build-android-rootfs.sh create mode 100644 src/arcade/eng/common/cross/build-rootfs.sh create mode 100644 src/arcade/eng/common/cross/install-debs.py create mode 100644 src/arcade/eng/common/cross/riscv64/tizen/tizen.patch create mode 100644 src/arcade/eng/common/cross/tizen-build-rootfs.sh create mode 100644 src/arcade/eng/common/cross/tizen-fetch.sh create mode 100644 src/arcade/eng/common/cross/toolchain.cmake create mode 100644 src/arcade/eng/common/darc-init.ps1 create mode 100644 src/arcade/eng/common/dotnet-install.cmd create mode 100644 src/arcade/eng/common/dotnet-install.ps1 create mode 100644 src/arcade/eng/common/dotnet-install.sh create mode 100644 src/arcade/eng/common/enable-cross-org-publishing.ps1 create mode 100644 src/arcade/eng/common/generate-locproject.ps1 create mode 100644 src/arcade/eng/common/generate-sbom-prep.ps1 create mode 100644 src/arcade/eng/common/generate-sbom-prep.sh create mode 100644 src/arcade/eng/common/helixpublish.proj create mode 100644 src/arcade/eng/common/init-tools-native.cmd create mode 100644 src/arcade/eng/common/init-tools-native.ps1 create mode 100644 src/arcade/eng/common/init-tools-native.sh create mode 100644 src/arcade/eng/common/internal-feed-operations.ps1 create mode 100644 src/arcade/eng/common/internal-feed-operations.sh create mode 100644 src/arcade/eng/common/internal/Directory.Build.props create mode 100644 src/arcade/eng/common/internal/NuGet.config create mode 100644 src/arcade/eng/common/internal/Tools.csproj create mode 100644 src/arcade/eng/common/loc/P22DotNetHtmlLocalization.lss create mode 100644 src/arcade/eng/common/msbuild.ps1 create mode 100644 src/arcade/eng/common/msbuild.sh create mode 100644 src/arcade/eng/common/native/CommonLibrary.psm1 create mode 100644 src/arcade/eng/common/native/common-library.sh create mode 100644 src/arcade/eng/common/native/init-compiler.sh create mode 100644 src/arcade/eng/common/native/init-distro-rid.sh create mode 100644 src/arcade/eng/common/native/init-os-and-arch.sh create mode 100644 src/arcade/eng/common/native/install-cmake-test.sh create mode 100644 src/arcade/eng/common/native/install-cmake.sh create mode 100644 src/arcade/eng/common/native/install-dependencies.sh create mode 100644 src/arcade/eng/common/native/install-tool.ps1 create mode 100644 src/arcade/eng/common/pipeline-logging-functions.ps1 create mode 100644 src/arcade/eng/common/pipeline-logging-functions.sh create mode 100644 src/arcade/eng/common/post-build/check-channel-consistency.ps1 create mode 100644 src/arcade/eng/common/post-build/nuget-validation.ps1 create mode 100644 src/arcade/eng/common/post-build/nuget-verification.ps1 create mode 100644 src/arcade/eng/common/post-build/publish-using-darc.ps1 create mode 100644 src/arcade/eng/common/post-build/redact-logs.ps1 create mode 100644 src/arcade/eng/common/post-build/sourcelink-validation.ps1 create mode 100644 src/arcade/eng/common/post-build/symbols-validation.ps1 create mode 100644 src/arcade/eng/common/retain-build.ps1 create mode 100644 src/arcade/eng/common/sdk-task.ps1 create mode 100644 src/arcade/eng/common/sdk-task.sh create mode 100644 src/arcade/eng/common/sdl/NuGet.config create mode 100644 src/arcade/eng/common/sdl/configure-sdl-tool.ps1 create mode 100644 src/arcade/eng/common/sdl/execute-all-sdl-tools.ps1 create mode 100644 src/arcade/eng/common/sdl/extract-artifact-archives.ps1 create mode 100644 src/arcade/eng/common/sdl/extract-artifact-packages.ps1 create mode 100644 src/arcade/eng/common/sdl/init-sdl.ps1 create mode 100644 src/arcade/eng/common/sdl/packages.config create mode 100644 src/arcade/eng/common/sdl/run-sdl.ps1 create mode 100644 src/arcade/eng/common/sdl/sdl.ps1 create mode 100644 src/arcade/eng/common/sdl/trim-assets-version.ps1 create mode 100644 src/arcade/eng/common/template-guidance.md create mode 100644 src/arcade/eng/common/templates-official/job/job.yml create mode 100644 src/arcade/eng/common/templates-official/job/onelocbuild.yml create mode 100644 src/arcade/eng/common/templates-official/job/publish-build-assets.yml create mode 100644 src/arcade/eng/common/templates-official/job/source-build.yml create mode 100644 src/arcade/eng/common/templates-official/job/source-index-stage1.yml create mode 100644 src/arcade/eng/common/templates-official/jobs/codeql-build.yml create mode 100644 src/arcade/eng/common/templates-official/jobs/jobs.yml create mode 100644 src/arcade/eng/common/templates-official/jobs/source-build.yml create mode 100644 src/arcade/eng/common/templates-official/post-build/common-variables.yml create mode 100644 src/arcade/eng/common/templates-official/post-build/post-build.yml create mode 100644 src/arcade/eng/common/templates-official/post-build/setup-maestro-vars.yml create mode 100644 src/arcade/eng/common/templates-official/steps/component-governance.yml create mode 100644 src/arcade/eng/common/templates-official/steps/enable-internal-runtimes.yml create mode 100644 src/arcade/eng/common/templates-official/steps/enable-internal-sources.yml create mode 100644 src/arcade/eng/common/templates-official/steps/generate-sbom.yml create mode 100644 src/arcade/eng/common/templates-official/steps/get-delegation-sas.yml create mode 100644 src/arcade/eng/common/templates-official/steps/get-federated-access-token.yml create mode 100644 src/arcade/eng/common/templates-official/steps/publish-build-artifacts.yml create mode 100644 src/arcade/eng/common/templates-official/steps/publish-logs.yml create mode 100644 src/arcade/eng/common/templates-official/steps/publish-pipeline-artifacts.yml create mode 100644 src/arcade/eng/common/templates-official/steps/retain-build.yml create mode 100644 src/arcade/eng/common/templates-official/steps/send-to-helix.yml create mode 100644 src/arcade/eng/common/templates-official/steps/source-build.yml create mode 100644 src/arcade/eng/common/templates-official/steps/source-index-stage1-publish.yml create mode 100644 src/arcade/eng/common/templates-official/variables/pool-providers.yml create mode 100644 src/arcade/eng/common/templates-official/variables/sdl-variables.yml create mode 100644 src/arcade/eng/common/templates/job/job.yml create mode 100644 src/arcade/eng/common/templates/job/onelocbuild.yml create mode 100644 src/arcade/eng/common/templates/job/publish-build-assets.yml create mode 100644 src/arcade/eng/common/templates/job/source-build.yml create mode 100644 src/arcade/eng/common/templates/job/source-index-stage1.yml create mode 100644 src/arcade/eng/common/templates/jobs/codeql-build.yml create mode 100644 src/arcade/eng/common/templates/jobs/jobs.yml create mode 100644 src/arcade/eng/common/templates/jobs/source-build.yml create mode 100644 src/arcade/eng/common/templates/post-build/common-variables.yml create mode 100644 src/arcade/eng/common/templates/post-build/post-build.yml create mode 100644 src/arcade/eng/common/templates/post-build/setup-maestro-vars.yml create mode 100644 src/arcade/eng/common/templates/steps/component-governance.yml create mode 100644 src/arcade/eng/common/templates/steps/enable-internal-runtimes.yml create mode 100644 src/arcade/eng/common/templates/steps/enable-internal-sources.yml create mode 100644 src/arcade/eng/common/templates/steps/generate-sbom.yml create mode 100644 src/arcade/eng/common/templates/steps/get-delegation-sas.yml create mode 100644 src/arcade/eng/common/templates/steps/get-federated-access-token.yml create mode 100644 src/arcade/eng/common/templates/steps/publish-build-artifacts.yml create mode 100644 src/arcade/eng/common/templates/steps/publish-logs.yml create mode 100644 src/arcade/eng/common/templates/steps/publish-pipeline-artifacts.yml create mode 100644 src/arcade/eng/common/templates/steps/retain-build.yml create mode 100644 src/arcade/eng/common/templates/steps/send-to-helix.yml create mode 100644 src/arcade/eng/common/templates/steps/source-build.yml create mode 100644 src/arcade/eng/common/templates/steps/source-index-stage1-publish.yml create mode 100644 src/arcade/eng/common/templates/variables/pool-providers.yml create mode 100644 src/arcade/eng/common/tools.ps1 create mode 100644 src/arcade/eng/common/tools.sh diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index fad400a94..39441f99c 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - cdf9c563205c673b7df205e24564aa48dbc341c3 + 80c4e4d26cb85c86f7e1be77d2d9eceeef0f3493 https://github.com/dotnet/wpf diff --git a/eng/common/build.sh b/eng/common/build.sh index 36fba82a3..27ae2c856 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -136,7 +136,7 @@ while [[ $# > 0 ]]; do restore=true pack=true ;; - -productBuild|-pb) + -productbuild|-pb) build=true product_build=true restore=true diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index c6b9ef51a..c7c062e88 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -19,19 +19,6 @@ steps: set -x df -h - # If file changes are detected, set CopyWipIntoInnerSourceBuildRepo to copy the WIP changes into the inner source build repo. - internalRestoreArgs= - if ! git diff --quiet; then - internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true' - # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo. - # This only works if there is a username/email configured, which won't be the case in most CI runs. - git config --get user.email - if [ $? -ne 0 ]; then - git config user.email dn-bot@microsoft.com - git config user.name dn-bot - fi - fi - # If building on the internal project, the internal storage variable may be available (usually only if needed) # In that case, add variables to allow the download of internal runtimes if the specified versions are not found # in the default public locations. @@ -46,11 +33,6 @@ steps: buildConfig='$(_BuildConfig)' fi - officialBuildArgs= - if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then - officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)' - fi - targetRidArgs= if [ '${{ parameters.platform.targetRID }}' != '' ]; then targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' @@ -66,16 +48,6 @@ steps: baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' fi - publishArgs= - if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then - publishArgs='--publish' - fi - - assetManifestFileName=SourceBuild_RidSpecific.xml - if [ '${{ parameters.platform.name }}' != '' ]; then - assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml - fi - portableBuildArgs= if [ '${{ parameters.platform.portableBuild }}' != '' ]; then portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}' @@ -83,40 +55,23 @@ steps: ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ --configuration $buildConfig \ - --restore --build --pack $publishArgs -bl \ + --restore --build --pack -bl \ ${{ parameters.platform.buildArguments }} \ - $officialBuildArgs \ $internalRuntimeDownloadArgs \ - $internalRestoreArgs \ $targetRidArgs \ $runtimeOsArgs \ $baseOsArgs \ $portableBuildArgs \ /p:DotNetBuildSourceOnly=true \ /p:DotNetBuildRepo=true \ - /p:AssetManifestFileName=$assetManifestFileName displayName: Build -# Upload build logs for diagnosis. -- task: CopyFiles@2 - displayName: Prepare BuildLogs staging directory - inputs: - SourceFolder: '$(Build.SourcesDirectory)' - Contents: | - **/*.log - **/*.binlog - artifacts/sb/prebuilt-report/** - TargetFolder: '$(Build.StagingDirectory)/BuildLogs' - CleanTargetFolder: true - continueOnError: true - condition: succeededOrFailed() - - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} args: displayName: Publish BuildLogs - targetPath: '$(Build.StagingDirectory)/BuildLogs' + targetPath: artifacts/log/${{ coalesce(variables._BuildConfig, 'Release') }} artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt) continueOnError: true condition: succeededOrFailed() diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index 36dbd45e1..e889f439b 100644 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -68,7 +68,7 @@ function InstallDarcCli { fi fi - local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" + local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" echo "Installing Darc CLI version $darcVersion..." echo "You may need to restart your command shell if this is the first dotnet tool you have installed." diff --git a/global.json b/global.json index b0352909e..2a9d40f3a 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25253.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25259.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, diff --git a/src/arcade/eng/common/BuildConfiguration/build-configuration.json b/src/arcade/eng/common/BuildConfiguration/build-configuration.json new file mode 100644 index 000000000..3d1cc8989 --- /dev/null +++ b/src/arcade/eng/common/BuildConfiguration/build-configuration.json @@ -0,0 +1,4 @@ +{ + "RetryCountLimit": 1, + "RetryByAnyError": false +} diff --git a/src/arcade/eng/common/CIBuild.cmd b/src/arcade/eng/common/CIBuild.cmd new file mode 100644 index 000000000..ac1f72bf9 --- /dev/null +++ b/src/arcade/eng/common/CIBuild.cmd @@ -0,0 +1,2 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" diff --git a/src/arcade/eng/common/PSScriptAnalyzerSettings.psd1 b/src/arcade/eng/common/PSScriptAnalyzerSettings.psd1 new file mode 100644 index 000000000..4c1ea7c98 --- /dev/null +++ b/src/arcade/eng/common/PSScriptAnalyzerSettings.psd1 @@ -0,0 +1,11 @@ +@{ + IncludeRules=@('PSAvoidUsingCmdletAliases', + 'PSAvoidUsingWMICmdlet', + 'PSAvoidUsingPositionalParameters', + 'PSAvoidUsingInvokeExpression', + 'PSUseDeclaredVarsMoreThanAssignments', + 'PSUseCmdletCorrectly', + 'PSStandardDSCFunctionsInResource', + 'PSUseIdenticalMandatoryParametersForDSC', + 'PSUseIdenticalParametersForDSC') +} \ No newline at end of file diff --git a/src/arcade/eng/common/README.md b/src/arcade/eng/common/README.md new file mode 100644 index 000000000..ff49c3715 --- /dev/null +++ b/src/arcade/eng/common/README.md @@ -0,0 +1,28 @@ +# Don't touch this folder + + uuuuuuuuuuuuuuuuuuuu + u" uuuuuuuuuuuuuuuuuu "u + u" u$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $ + $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $ + $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $ + $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $ + $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $ + $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $ + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$$$$$" u" + "u "$$$$$$$$$$$$$$$$$$$$" u" + "u """""""""""""""""" u" + """""""""""""""""""" + +!!! Changes made in this directory are subject to being overwritten by automation !!! + +The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first. diff --git a/src/arcade/eng/common/SetupNugetSources.ps1 b/src/arcade/eng/common/SetupNugetSources.ps1 new file mode 100644 index 000000000..5db4ad71e --- /dev/null +++ b/src/arcade/eng/common/SetupNugetSources.ps1 @@ -0,0 +1,171 @@ +# This script adds internal feeds required to build commits that depend on internal package sources. For instance, +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables +# disabled internal Maestro (darc-int*) feeds. +# +# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. +# +# See example call for this script below. +# +# - task: PowerShell@2 +# displayName: Setup Private Feeds Credentials +# condition: eq(variables['Agent.OS'], 'Windows_NT') +# inputs: +# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 +# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token +# env: +# Token: $(dn-bot-dnceng-artifact-feeds-rw) +# +# Note that the NuGetAuthenticate task should be called after SetupNugetSources. +# This ensures that: +# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) +# - The credential provider is installed. +# +# This logic is also abstracted into enable-internal-sources.yml. + +[CmdletBinding()] +param ( + [Parameter(Mandatory = $true)][string]$ConfigFile, + $Password +) + +$ErrorActionPreference = "Stop" +Set-StrictMode -Version 2.0 +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +. $PSScriptRoot\tools.ps1 + +# Add source entry to PackageSources +function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { + $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") + + if ($packageSource -eq $null) + { + $packageSource = $doc.CreateElement("add") + $packageSource.SetAttribute("key", $SourceName) + $packageSource.SetAttribute("value", $SourceEndPoint) + $sources.AppendChild($packageSource) | Out-Null + } + else { + Write-Host "Package source $SourceName already present." + } + + AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd +} + +# Add a credential node for the specified source +function AddCredential($creds, $source, $username, $pwd) { + # If no cred supplied, don't do anything. + if (!$pwd) { + return; + } + + # Looks for credential configuration for the given SourceName. Create it if none is found. + $sourceElement = $creds.SelectSingleNode($Source) + if ($sourceElement -eq $null) + { + $sourceElement = $doc.CreateElement($Source) + $creds.AppendChild($sourceElement) | Out-Null + } + + # Add the node to the credential if none is found. + $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']") + if ($usernameElement -eq $null) + { + $usernameElement = $doc.CreateElement("add") + $usernameElement.SetAttribute("key", "Username") + $sourceElement.AppendChild($usernameElement) | Out-Null + } + $usernameElement.SetAttribute("value", $Username) + + # Add the to the credential if none is found. + # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs. + # -> https://github.com/NuGet/Home/issues/5526 + $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']") + if ($passwordElement -eq $null) + { + $passwordElement = $doc.CreateElement("add") + $passwordElement.SetAttribute("key", "ClearTextPassword") + $sourceElement.AppendChild($passwordElement) | Out-Null + } + + $passwordElement.SetAttribute("value", $pwd) +} + +function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { + $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") + + Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." + + ForEach ($PackageSource in $maestroPrivateSources) { + Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key + AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd + } +} + +function EnablePrivatePackageSources($DisabledPackageSources) { + $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") + ForEach ($DisabledPackageSource in $maestroPrivateSources) { + Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource" + # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries + $DisabledPackageSources.RemoveChild($DisabledPackageSource) + } +} + +if (!(Test-Path $ConfigFile -PathType Leaf)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" + ExitWithExitCode 1 +} + +# Load NuGet.config +$doc = New-Object System.Xml.XmlDocument +$filename = (Get-Item $ConfigFile).FullName +$doc.Load($filename) + +# Get reference to or create one if none exist already +$sources = $doc.DocumentElement.SelectSingleNode("packageSources") +if ($sources -eq $null) { + $sources = $doc.CreateElement("packageSources") + $doc.DocumentElement.AppendChild($sources) | Out-Null +} + +$creds = $null +if ($Password) { + # Looks for a node. Create it if none is found. + $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") + if ($creds -eq $null) { + $creds = $doc.CreateElement("packageSourceCredentials") + $doc.DocumentElement.AppendChild($creds) | Out-Null + } +} + +# Check for disabledPackageSources; we'll enable any darc-int ones we find there +$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources") +if ($disabledSources -ne $null) { + Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node" + EnablePrivatePackageSources -DisabledPackageSources $disabledSources +} + +$userName = "dn-bot" + +# Insert credential nodes for Maestro's private feeds +InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password + +# 3.1 uses a different feed url format so it's handled differently here +$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") +if ($dotnet31Source -ne $null) { + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password +} + +$dotnetVersions = @('5','6','7','8','9') + +foreach ($dotnetVersion in $dotnetVersions) { + $feedPrefix = "dotnet" + $dotnetVersion; + $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") + if ($dotnetSource -ne $null) { + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password + } +} + +$doc.Save($filename) diff --git a/src/arcade/eng/common/SetupNugetSources.sh b/src/arcade/eng/common/SetupNugetSources.sh new file mode 100644 index 000000000..4604b61b0 --- /dev/null +++ b/src/arcade/eng/common/SetupNugetSources.sh @@ -0,0 +1,167 @@ +#!/usr/bin/env bash + +# This script adds internal feeds required to build commits that depend on internal package sources. For instance, +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables +# disabled internal Maestro (darc-int*) feeds. +# +# Optionally, this script also adds a credential entry for each of the internal feeds if supplied. +# +# See example call for this script below. +# +# - task: Bash@3 +# displayName: Setup Internal Feeds +# inputs: +# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh +# arguments: $(Build.SourcesDirectory)/NuGet.config +# condition: ne(variables['Agent.OS'], 'Windows_NT') +# - task: NuGetAuthenticate@1 +# +# Note that the NuGetAuthenticate task should be called after SetupNugetSources. +# This ensures that: +# - Appropriate creds are set for the added internal feeds (if not supplied to the scrupt) +# - The credential provider is installed. +# +# This logic is also abstracted into enable-internal-sources.yml. + +ConfigFile=$1 +CredToken=$2 +NL='\n' +TB=' ' + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +if [ ! -f "$ConfigFile" ]; then + Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile" + ExitWithExitCode 1 +fi + +if [[ `uname -s` == "Darwin" ]]; then + NL=$'\\\n' + TB='' +fi + +# Ensure there is a ... section. +grep -i "" $ConfigFile +if [ "$?" != "0" ]; then + echo "Adding ... section." + ConfigNodeHeader="" + PackageSourcesTemplate="${TB}${NL}${TB}" + + sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile +fi + +# Ensure there is a ... section. +grep -i "" $ConfigFile +if [ "$?" != "0" ]; then + echo "Adding ... section." + + PackageSourcesNodeFooter="" + PackageSourceCredentialsTemplate="${TB}${NL}${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile +fi + +PackageSources=() + +# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present +grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet3.1-internal') + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding dotnet3.1-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=('dotnet3.1-internal-transport') +fi + +DotNetVersions=('5' '6' '7' '8' '9') + +for DotNetVersion in ${DotNetVersions[@]} ; do + FeedPrefix="dotnet${DotNetVersion}"; + grep -i "" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal") + + grep -i "" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding $FeedPrefix-internal-transport to the packageSources." + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + fi + PackageSources+=("$FeedPrefix-internal-transport") + fi +done + +# I want things split line by line +PrevIFS=$IFS +IFS=$'\n' +PackageSources+="$IFS" +PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"') +IFS=$PrevIFS + +if [ "$CredToken" ]; then + for FeedName in ${PackageSources[@]} ; do + # Check if there is no existing credential for this FeedName + grep -i "<$FeedName>" $ConfigFile + if [ "$?" != "0" ]; then + echo "Adding credentials for $FeedName." + + PackageSourceCredentialsNodeFooter="" + NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" + + sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile + fi + done +fi + +# Re-enable any entries in disabledPackageSources where the feed name contains darc-int +grep -i "" $ConfigFile +if [ "$?" == "0" ]; then + DisabledDarcIntSources=() + echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile" + DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"') + for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do + if [[ $DisabledSourceName == darc-int* ]] + then + OldDisableValue="" + NewDisableValue="" + sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile + echo "Neutralized disablePackageSources entry for '$DisabledSourceName'" + fi + done +fi diff --git a/src/arcade/eng/common/build.cmd b/src/arcade/eng/common/build.cmd new file mode 100644 index 000000000..99daf368a --- /dev/null +++ b/src/arcade/eng/common/build.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0build.ps1""" %*" +exit /b %ErrorLevel% diff --git a/src/arcade/eng/common/build.ps1 b/src/arcade/eng/common/build.ps1 new file mode 100644 index 000000000..6b3be1916 --- /dev/null +++ b/src/arcade/eng/common/build.ps1 @@ -0,0 +1,175 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string][Alias('c')]$configuration = "Debug", + [string]$platform = $null, + [string] $projects, + [string][Alias('v')]$verbosity = "minimal", + [string] $msbuildEngine = $null, + [bool] $warnAsError = $true, + [bool] $nodeReuse = $true, + [switch] $buildCheck = $false, + [switch][Alias('r')]$restore, + [switch] $deployDeps, + [switch][Alias('b')]$build, + [switch] $rebuild, + [switch] $deploy, + [switch][Alias('t')]$test, + [switch] $integrationTest, + [switch] $performanceTest, + [switch] $sign, + [switch] $pack, + [switch] $publish, + [switch] $clean, + [switch][Alias('pb')]$productBuild, + [switch][Alias('bl')]$binaryLog, + [switch][Alias('nobl')]$excludeCIBinarylog, + [switch] $ci, + [switch] $prepareMachine, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $excludePrereleaseVS, + [switch] $nativeToolsOnMachine, + [switch] $help, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties +) + +# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file +# some computer has this env var defined (e.g. Some HP) +if($env:Platform) { + $env:Platform="" +} +function Print-Usage() { + Write-Host "Common settings:" + Write-Host " -configuration Build configuration: 'Debug' or 'Release' (short: -c)" + Write-Host " -platform Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild" + Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)" + Write-Host " -binaryLog Output binary log (short: -bl)" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Actions:" + Write-Host " -restore Restore dependencies (short: -r)" + Write-Host " -build Build solution (short: -b)" + Write-Host " -rebuild Rebuild solution" + Write-Host " -deploy Deploy built VSIXes" + Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)" + Write-Host " -test Run all unit tests in the solution (short: -t)" + Write-Host " -integrationTest Run all integration tests in the solution" + Write-Host " -performanceTest Run all performance tests in the solution" + Write-Host " -pack Package build outputs into NuGet packages and Willow components" + Write-Host " -sign Sign build outputs" + Write-Host " -publish Publish artifacts (e.g. symbols)" + Write-Host " -clean Clean the solution" + Write-Host " -productBuild Build the solution in the way it will be built in the full .NET product (VMR) build (short: -pb)" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -projects Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)" + Write-Host " -ci Set when running on CI server" + Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)" + Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build" + Write-Host " -warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio" + Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" + Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" + Write-Host " -buildCheck Sets /check msbuild parameter" + Write-Host "" + + Write-Host "Command line arguments not listed above are passed thru to msbuild." + Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)." +} + +. $PSScriptRoot\tools.ps1 + +function InitializeCustomToolset { + if (-not $restore) { + return + } + + $script = Join-Path $EngRoot 'restore-toolset.ps1' + + if (Test-Path $script) { + . $script + } +} + +function Build { + $toolsetBuildProj = InitializeToolset + InitializeCustomToolset + + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' } + $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' } + $check = if ($buildCheck) { '/check' } else { '' } + + if ($projects) { + # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons. + # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty. + [string[]] $msbuildArgs = $properties + + # Resolve relative project paths into full paths + $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';') + + $msbuildArgs += "/p:Projects=$projects" + $properties = $msbuildArgs + } + + MSBuild $toolsetBuildProj ` + $bl ` + $platformArg ` + $check ` + /p:Configuration=$configuration ` + /p:RepoRoot=$RepoRoot ` + /p:Restore=$restore ` + /p:DeployDeps=$deployDeps ` + /p:Build=$build ` + /p:Rebuild=$rebuild ` + /p:Deploy=$deploy ` + /p:Test=$test ` + /p:Pack=$pack ` + /p:DotNetBuildRepo=$productBuild ` + /p:IntegrationTest=$integrationTest ` + /p:PerformanceTest=$performanceTest ` + /p:Sign=$sign ` + /p:Publish=$publish ` + /p:RestoreStaticGraphEnableBinaryLogger=$binaryLog ` + @properties +} + +try { + if ($clean) { + if (Test-Path $ArtifactsDir) { + Remove-Item -Recurse -Force $ArtifactsDir + Write-Host 'Artifacts directory deleted.' + } + exit 0 + } + + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { + Print-Usage + exit 0 + } + + if ($ci) { + if (-not $excludeCIBinarylog) { + $binaryLog = $true + } + $nodeReuse = $false + } + + if ($nativeToolsOnMachine) { + $env:NativeToolsOnMachine = $true + } + if ($restore) { + InitializeNativeTools + } + + Build +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/cibuild.sh b/src/arcade/eng/common/cibuild.sh new file mode 100644 index 000000000..66e3b0ac6 --- /dev/null +++ b/src/arcade/eng/common/cibuild.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where + # the symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ diff --git a/src/arcade/eng/common/core-templates/job/job.yml b/src/arcade/eng/common/core-templates/job/job.yml new file mode 100644 index 000000000..6badecba7 --- /dev/null +++ b/src/arcade/eng/common/core-templates/job/job.yml @@ -0,0 +1,225 @@ +parameters: +# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + cancelTimeoutInMinutes: '' + condition: '' + container: '' + continueOnError: false + dependsOn: '' + displayName: '' + pool: '' + steps: [] + strategy: '' + timeoutInMinutes: '' + variables: [] + workspace: '' + templateContext: {} + +# Job base template specific parameters + # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md + # publishing defaults + artifacts: '' + enableMicrobuild: false + enableMicrobuildForMacAndLinux: false + enablePublishBuildArtifacts: false + enablePublishBuildAssets: false + enablePublishTestResults: false + enableBuildRetry: false + mergeTestResults: false + testRunTitle: '' + testResultsFormat: '' + name: '' + componentGovernanceSteps: [] + preSteps: [] + artifactPublishSteps: [] + runAsPublic: false + +# 1es specific parameters + is1ESPipeline: '' + +jobs: +- job: ${{ parameters.name }} + + ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}: + cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }} + + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + + ${{ if ne(parameters.container, '') }}: + container: ${{ parameters.container }} + + ${{ if ne(parameters.continueOnError, '') }}: + continueOnError: ${{ parameters.continueOnError }} + + ${{ if ne(parameters.dependsOn, '') }}: + dependsOn: ${{ parameters.dependsOn }} + + ${{ if ne(parameters.displayName, '') }}: + displayName: ${{ parameters.displayName }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + + ${{ if ne(parameters.strategy, '') }}: + strategy: ${{ parameters.strategy }} + + ${{ if ne(parameters.timeoutInMinutes, '') }}: + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + ${{ if ne(parameters.templateContext, '') }}: + templateContext: ${{ parameters.templateContext }} + + variables: + - ${{ if ne(parameters.enableTelemetry, 'false') }}: + - name: DOTNET_CLI_TELEMETRY_PROFILE + value: '$(Build.Repository.Uri)' + # Retry signature validation up to three times, waiting 2 seconds between attempts. + # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures + - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY + value: 3,2000 + - ${{ each variable in parameters.variables }}: + # handle name-value variable syntax + # example: + # - name: [key] + # value: [value] + - ${{ if ne(variable.name, '') }}: + - name: ${{ variable.name }} + value: ${{ variable.value }} + + # handle variable groups + - ${{ if ne(variable.group, '') }}: + - group: ${{ variable.group }} + + # handle template variable syntax + # example: + # - template: path/to/template.yml + # parameters: + # [key]: [value] + - ${{ if ne(variable.template, '') }}: + - template: ${{ variable.template }} + ${{ if ne(variable.parameters, '') }}: + parameters: ${{ variable.parameters }} + + # handle key-value variable syntax. + # example: + # - [key]: [value] + - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}: + - ${{ each pair in variable }}: + - name: ${{ pair.key }} + value: ${{ pair.value }} + + # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds + - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: DotNet-HelixApi-Access + + ${{ if ne(parameters.workspace, '') }}: + workspace: ${{ parameters.workspace }} + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.preSteps, '') }}: + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/common/core-templates/steps/install-microbuild.yml + parameters: + enableMicrobuild: ${{ parameters.enableMicrobuild }} + enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} + continueOnError: ${{ parameters.continueOnError }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: + - task: NuGetAuthenticate@1 + + - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}: + - task: DownloadPipelineArtifact@2 + inputs: + buildType: current + artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }} + targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }} + itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }} + + - ${{ each step in parameters.steps }}: + - ${{ step }} + + - ${{ each step in parameters.componentGovernanceSteps }}: + - ${{ step }} + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/common/core-templates/steps/cleanup-microbuild.yml + parameters: + enableMicrobuild: ${{ parameters.enableMicrobuild }} + enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} + continueOnError: ${{ parameters.continueOnError }} + + # Publish test results + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}: + - task: PublishTestResults@2 + displayName: Publish XUnit Test Results + inputs: + testResultsFormat: 'xUnit' + testResultsFiles: '*.xml' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}: + - task: PublishTestResults@2 + displayName: Publish TRX Test Results + inputs: + testResultsFormat: 'VSTest' + testResultsFiles: '*.trx' + searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx + mergeTestResults: ${{ parameters.mergeTestResults }} + continueOnError: true + condition: always() + + # gather artifacts + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - task: CopyFiles@2 + displayName: Gather binaries for publish to artifacts + inputs: + SourceFolder: 'artifacts/bin' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin' + - task: CopyFiles@2 + displayName: Gather packages for publish to artifacts + inputs: + SourceFolder: 'artifacts/packages' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages' + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log' + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - task: CopyFiles@2 + displayName: Gather logs for publish to artifacts + inputs: + SourceFolder: 'artifacts/log/$(_BuildConfig)' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + continueOnError: true + condition: always() + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - task: CopyFiles@2 + displayName: Gather buildconfiguration for build retry + inputs: + SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' + continueOnError: true + condition: always() + - ${{ each step in parameters.artifactPublishSteps }}: + - ${{ step }} diff --git a/src/arcade/eng/common/core-templates/job/onelocbuild.yml b/src/arcade/eng/common/core-templates/job/onelocbuild.yml new file mode 100644 index 000000000..00feec8eb --- /dev/null +++ b/src/arcade/eng/common/core-templates/job/onelocbuild.yml @@ -0,0 +1,121 @@ +parameters: + # Optional: dependencies of the job + dependsOn: '' + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: '' + + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex + GithubPat: $(BotAccount-dotnet-bot-repo-PAT) + + SourcesDirectory: $(Build.SourcesDirectory) + CreatePr: true + AutoCompletePr: false + ReusePr: true + UseLfLineEndings: true + UseCheckedInLocProjectJson: false + SkipLocProjectJsonGeneration: false + LanguageSet: VS_Main_Languages + LclSource: lclFilesInRepo + LclPackageId: '' + RepoType: gitHub + GitHubOrg: dotnet + MirrorRepo: '' + MirrorBranch: main + condition: '' + JobNameSuffix: '' + is1ESPipeline: '' +jobs: +- job: OneLocBuild${{ parameters.JobNameSuffix }} + + dependsOn: ${{ parameters.dependsOn }} + + displayName: OneLocBuild${{ parameters.JobNameSuffix }} + + variables: + - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat + - name: _GenerateLocProjectArguments + value: -SourcesDirectory ${{ parameters.SourcesDirectory }} + -LanguageSet "${{ parameters.LanguageSet }}" + -CreateNeutralXlfs + - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}: + - name: _GenerateLocProjectArguments + value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: + - task: Powershell@2 + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + arguments: $(_GenerateLocProjectArguments) + displayName: Generate LocProject.json + condition: ${{ parameters.condition }} + + - task: OneLocBuild@2 + displayName: OneLocBuild + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + inputs: + locProj: eng/Localize/LocProject.json + outDir: $(Build.ArtifactStagingDirectory) + lclSource: ${{ parameters.LclSource }} + lclPackageId: ${{ parameters.LclPackageId }} + isCreatePrSelected: ${{ parameters.CreatePr }} + isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} + ${{ if eq(parameters.CreatePr, true) }}: + isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + isShouldReusePrSelected: ${{ parameters.ReusePr }} + packageSourceAuth: patAuth + patVariable: ${{ parameters.CeapexPat }} + ${{ if eq(parameters.RepoType, 'gitHub') }}: + repoType: ${{ parameters.RepoType }} + gitHubPatVariable: "${{ parameters.GithubPat }}" + ${{ if ne(parameters.MirrorRepo, '') }}: + isMirrorRepoSelected: true + gitHubOrganization: ${{ parameters.GitHubOrg }} + mirrorRepo: ${{ parameters.MirrorRepo }} + mirrorBranch: ${{ parameters.MirrorBranch }} + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Localization Files + pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish LocProject.json + pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' + publishLocation: Container + artifactName: Loc + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/job/publish-build-assets.yml b/src/arcade/eng/common/core-templates/job/publish-build-assets.yml new file mode 100644 index 000000000..4f1dc42e0 --- /dev/null +++ b/src/arcade/eng/common/core-templates/job/publish-build-assets.yml @@ -0,0 +1,159 @@ +parameters: + configuration: 'Debug' + + # Optional: condition for the job to run + condition: '' + + # Optional: 'true' if future jobs should run even if this job fails + continueOnError: false + + # Optional: dependencies of the job + dependsOn: '' + + # Optional: Include PublishBuildArtifacts task + enablePublishBuildArtifacts: false + + # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool + pool: {} + + # Optional: should run as a public build even in the internal project + # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects. + runAsPublic: false + + # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing + publishAssetsImmediately: false + + artifactsPublishingAdditionalParameters: '' + + signingValidationAdditionalParameters: '' + + is1ESPipeline: '' + + # Optional: 🌤️ or not the build has assets it wants to publish to BAR + isAssetlessBuild: false + +jobs: +- job: Asset_Registry_Publish + + dependsOn: ${{ parameters.dependsOn }} + timeoutInMinutes: 150 + + ${{ if eq(parameters.publishAssetsImmediately, 'true') }}: + displayName: Publish Assets + ${{ else }}: + displayName: Publish to Build Asset Registry + + variables: + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - group: Publish-Build-Assets + - group: AzureDevOps-Artifact-Feeds-Pats + - name: runCodesignValidationInjection + value: false + # unconditional - needed for logs publishing (redactor tool version) + - template: /eng/common/core-templates/post-build/common-variables.yml + + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - checkout: self + fetchDepth: 3 + clean: true + + - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Asset Manifests + inputs: + artifactName: AssetManifests + targetPath: '$(Build.StagingDirectory)/AssetManifests' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Build Assets + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 + arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet + /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' + /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} + /p:MaestroApiEndpoint=https://maestro.dot.net + /p:OfficialBuildId=$(Build.BuildNumber) + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + + - task: powershell@2 + displayName: Create ReleaseConfigs Artifact + inputs: + targetType: inline + script: | + New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force + $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt" + Add-Content -Path $filePath -Value $(BARBuildId) + Add-Content -Path $filePath -Value "$(DefaultChannels)" + Add-Content -Path $filePath -Value $(IsStableBuild) + + $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + if (Test-Path -Path $symbolExclusionfile) + { + Write-Host "SymbolExclusionFile exists" + Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs" + } + + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish ReleaseConfigs Artifact + pathToPublish: '$(Build.StagingDirectory)/ReleaseConfigs' + publishLocation: Container + artifactName: ReleaseConfigs + + - ${{ if or(eq(parameters.publishAssetsImmediately, 'true'), eq(parameters.isAssetlessBuild, 'true')) }}: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > + -BuildId $(BARBuildId) + -PublishingInfraVersion 3 + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' + + - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + JobLabel: 'Publish_Artifacts_Logs' diff --git a/src/arcade/eng/common/core-templates/job/source-index-stage1.yml b/src/arcade/eng/common/core-templates/job/source-index-stage1.yml new file mode 100644 index 000000000..30530359a --- /dev/null +++ b/src/arcade/eng/common/core-templates/job/source-index-stage1.yml @@ -0,0 +1,44 @@ +parameters: + runAsPublic: false + sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci" + preSteps: [] + binlogPath: artifacts/log/Debug/Build.binlog + condition: '' + dependsOn: '' + pool: '' + is1ESPipeline: '' + +jobs: +- job: SourceIndexStage1 + dependsOn: ${{ parameters.dependsOn }} + condition: ${{ parameters.condition }} + variables: + - name: BinlogPath + value: ${{ parameters.binlogPath }} + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + ${{ if ne(parameters.pool, '') }}: + pool: ${{ parameters.pool }} + ${{ if eq(parameters.pool, '') }}: + pool: + ${{ if eq(variables['System.TeamProject'], 'public') }}: + name: $(DncEngPublicBuildPool) + image: windows.vs2022.amd64.open + ${{ if eq(variables['System.TeamProject'], 'internal') }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + + steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ each preStep in parameters.preSteps }}: + - ${{ preStep }} + - script: ${{ parameters.sourceIndexBuildCommand }} + displayName: Build Repository + + - template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + binLogPath: ${{ parameters.binLogPath }} \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/jobs/codeql-build.yml b/src/arcade/eng/common/core-templates/jobs/codeql-build.yml new file mode 100644 index 000000000..693b00b37 --- /dev/null +++ b/src/arcade/eng/common/core-templates/jobs/codeql-build.yml @@ -0,0 +1,32 @@ +parameters: + # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md + continueOnError: false + # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job + jobs: [] + # Optional: if specified, restore and use this version of Guardian instead of the default. + overrideGuardianVersion: '' + is1ESPipeline: '' + +jobs: +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + enableMicrobuild: false + enablePublishBuildArtifacts: false + enablePublishTestResults: false + enablePublishBuildAssets: false + enableTelemetry: true + + variables: + - group: Publish-Build-Assets + # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in + # sync with the packages.config file. + - name: DefaultGuardianVersion + value: 0.109.0 + - name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + - name: GuardianVersion + value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} + + jobs: ${{ parameters.jobs }} + diff --git a/src/arcade/eng/common/core-templates/post-build/common-variables.yml b/src/arcade/eng/common/core-templates/post-build/common-variables.yml new file mode 100644 index 000000000..d5627a994 --- /dev/null +++ b/src/arcade/eng/common/core-templates/post-build/common-variables.yml @@ -0,0 +1,22 @@ +variables: + - group: Publish-Build-Assets + + # Whether the build is internal or not + - name: IsInternalBuild + value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }} + + # Default Maestro++ API Endpoint and API Version + - name: MaestroApiEndPoint + value: "https://maestro.dot.net" + - name: MaestroApiVersion + value: "2020-02-20" + + - name: SourceLinkCLIVersion + value: 3.0.0 + - name: SymbolToolVersion + value: 1.0.1 + - name: BinlogToolVersion + value: 1.0.11 + + - name: runCodesignValidationInjection + value: false diff --git a/src/arcade/eng/common/core-templates/post-build/post-build.yml b/src/arcade/eng/common/core-templates/post-build/post-build.yml new file mode 100644 index 000000000..5757915ed --- /dev/null +++ b/src/arcade/eng/common/core-templates/post-build/post-build.yml @@ -0,0 +1,328 @@ +parameters: + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: requireDefaultChannels + displayName: Fail the build if there are no default channel(s) registrations for the current build + type: boolean + default: false + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + - name: isAssetlessBuild + type: boolean + displayName: Is Assetless Build + default: false + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + + - name: is1ESPipeline + type: boolean + default: false + +stages: +- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + - stage: Validate + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Validate Build Assets + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: NuGet Validation + condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: windows.vs2022.amd64 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + + - job: + displayName: Signing Validation + condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true')) + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + itemPattern: | + ** + !**/Microsoft.SourceBuild.Intermediate.*.nupkg + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) + + - job: + displayName: SourceLink Validation + condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true') + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: $(DncEngInternalBuildPool) + image: 1es-windows-2022 + os: windows + ${{ else }}: + name: $(DncEngInternalBuildPool) + demands: ImageOverride -equals windows.vs2022.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true + +- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: + - stage: publish_using_darc + ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: + dependsOn: ${{ parameters.publishDependsOn }} + ${{ else }}: + dependsOn: ${{ parameters.validateDependsOn }} + displayName: Publish using Darc + variables: + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + jobs: + - job: + displayName: Publish Using Darc + timeoutInMinutes: 120 + pool: + # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + name: AzurePipelines-EO + image: 1ESPT-Windows2022 + demands: Cmd + os: windows + # If it's not devdiv, it's dnceng + ${{ else }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: + name: NetCore1ESPool-Publishing-Internal + image: windows.vs2019.amd64 + os: windows + ${{ else }}: + name: NetCore1ESPool-Publishing-Internal + demands: ImageOverride -equals windows.vs2019.amd64 + steps: + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: NuGetAuthenticate@1 + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > + -BuildId $(BARBuildId) + -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} + -AzdoToken '$(System.AccessToken)' + -WaitPublishingFinish true + -RequireDefaultChannels ${{ parameters.requireDefaultChannels }} + -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' + -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' + -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' diff --git a/src/arcade/eng/common/core-templates/post-build/setup-maestro-vars.yml b/src/arcade/eng/common/core-templates/post-build/setup-maestro-vars.yml new file mode 100644 index 000000000..f7602980d --- /dev/null +++ b/src/arcade/eng/common/core-templates/post-build/setup-maestro-vars.yml @@ -0,0 +1,74 @@ +parameters: + BARBuildId: '' + PromoteToChannelIds: '' + is1ESPipeline: '' + +steps: + - ${{ if eq(parameters.is1ESPipeline, '') }}: + - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error + + - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}: + - task: DownloadBuildArtifacts@0 + displayName: Download Release Configs + inputs: + buildType: current + artifactName: ReleaseConfigs + checkDownloadedFiles: true + + - task: AzureCLI@2 + name: setReleaseVars + displayName: Set Release Configs Vars + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: pscore + scriptLocation: inlineScript + inlineScript: | + try { + if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') { + $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt + + $BarId = $Content | Select -Index 0 + $Channels = $Content | Select -Index 1 + $IsStableBuild = $Content | Select -Index 2 + + $AzureDevOpsProject = $Env:System_TeamProject + $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId + $AzureDevOpsBuildId = $Env:Build_BuildId + } + else { + . $(Build.SourcesDirectory)\eng\common\tools.ps1 + $darc = Get-Darc + $buildInfo = & $darc get-build ` + --id ${{ parameters.BARBuildId }} ` + --extended ` + --output-format json ` + --ci ` + | convertFrom-Json + + $BarId = ${{ parameters.BARBuildId }} + $Channels = $Env:PromoteToMaestroChannels -split "," + $Channels = $Channels -join "][" + $Channels = "[$Channels]" + + $IsStableBuild = $buildInfo.stable + $AzureDevOpsProject = $buildInfo.azureDevOpsProject + $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId + $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId + } + + Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId" + Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels" + Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild" + + Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject" + Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId" + Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId" + } + catch { + Write-Host $_ + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + exit 1 + } + env: + PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }} diff --git a/src/arcade/eng/common/core-templates/steps/cleanup-microbuild.yml b/src/arcade/eng/common/core-templates/steps/cleanup-microbuild.yml new file mode 100644 index 000000000..c0fdcd337 --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/cleanup-microbuild.yml @@ -0,0 +1,28 @@ +parameters: + # Enable cleanup tasks for MicroBuild + enableMicrobuild: false + # Enable cleanup tasks for MicroBuild on Mac and Linux + # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' + enableMicrobuildForMacAndLinux: false + continueOnError: false + +steps: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - task: MicroBuildCleanup@1 + displayName: Execute Microbuild cleanup tasks + condition: and( + always(), + or( + and( + eq(variables['Agent.Os'], 'Windows_NT'), + in(variables['_SignType'], 'real', 'test') + ), + and( + ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, + ne(variables['Agent.Os'], 'Windows_NT'), + eq(variables['_SignType'], 'real') + ) + )) + continueOnError: ${{ parameters.continueOnError }} + env: + TeamName: $(_TeamName) diff --git a/src/arcade/eng/common/core-templates/steps/component-governance.yml b/src/arcade/eng/common/core-templates/steps/component-governance.yml new file mode 100644 index 000000000..cf0649aa9 --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/component-governance.yml @@ -0,0 +1,16 @@ +parameters: + disableComponentGovernance: false + componentGovernanceIgnoreDirectories: '' + is1ESPipeline: false + displayName: 'Component Detection' + +steps: +- ${{ if eq(parameters.disableComponentGovernance, 'true') }}: + - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true" + displayName: Set skipComponentGovernanceDetection variable +- ${{ if ne(parameters.disableComponentGovernance, 'true') }}: + - task: ComponentGovernanceComponentDetection@0 + continueOnError: true + displayName: ${{ parameters.displayName }} + inputs: + ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} diff --git a/src/arcade/eng/common/core-templates/steps/enable-internal-runtimes.yml b/src/arcade/eng/common/core-templates/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..6bdbf62ac --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/enable-internal-runtimes.yml @@ -0,0 +1,32 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +parameters: +- name: federatedServiceConnection + type: string + default: 'dotnetbuilds-internal-read' +- name: outputVariableName + type: string + default: 'dotnetbuilds-internal-container-read-token-base64' +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: true +- name: is1ESPipeline + type: boolean + default: false + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + federatedServiceConnection: ${{ parameters.federatedServiceConnection }} + outputVariableName: ${{ parameters.outputVariableName }} + expiryInHours: ${{ parameters.expiryInHours }} + base64Encode: ${{ parameters.base64Encode }} + storageAccount: dotnetbuilds + container: internal + permissions: rl + is1ESPipeline: ${{ parameters.is1ESPipeline }} \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/steps/enable-internal-sources.yml b/src/arcade/eng/common/core-templates/steps/enable-internal-sources.yml new file mode 100644 index 000000000..64f881bff --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/enable-internal-sources.yml @@ -0,0 +1,47 @@ +parameters: +# This is the Azure federated service connection that we log into to get an access token. +- name: nugetFederatedServiceConnection + type: string + default: 'dnceng-artifacts-feeds-read' +- name: is1ESPipeline + type: boolean + default: false +# Legacy parameters to allow for PAT usage +- name: legacyCredential + type: string + default: '' + +steps: +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - ${{ if ne(parameters.legacyCredential, '') }}: + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token + env: + Token: ${{ parameters.legacyCredential }} + # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate. + # If running on DevDiv, NuGetAuthenticate is not really an option. It's scoped to a single feed, and we have many feeds that + # may be added. Instead, we'll use the traditional approach (add cred to nuget.config), but use an account token. + - ${{ else }}: + - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config + - ${{ else }}: + - template: /eng/common/templates/steps/get-federated-access-token.yml + parameters: + federatedServiceConnection: ${{ parameters.nugetFederatedServiceConnection }} + outputVariableName: 'dnceng-artifacts-feeds-read-access-token' + - task: PowerShell@2 + displayName: Setup Internal Feeds + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) + # This is required in certain scenarios to install the ADO credential provider. + # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others + # (e.g. dotnet msbuild). + - task: NuGetAuthenticate@1 diff --git a/src/arcade/eng/common/core-templates/steps/generate-sbom.yml b/src/arcade/eng/common/core-templates/steps/generate-sbom.yml new file mode 100644 index 000000000..44a9636cd --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/generate-sbom.yml @@ -0,0 +1,54 @@ +# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated. +# PackageName - The name of the package this SBOM represents. +# PackageVersion - The version of the package this SBOM represents. +# ManifestDirPath - The path of the directory where the generated manifest files will be placed +# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. + +parameters: + PackageVersion: 10.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + PackageName: '.NET' + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + IgnoreDirectories: '' + sbomContinueOnError: true + is1ESPipeline: false + # disable publishArtifacts if some other step is publishing the artifacts (like job.yml). + publishArtifacts: true + +steps: +- task: PowerShell@2 + displayName: Prep for SBOM generation in (Non-linux) + condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin')) + inputs: + filePath: ./eng/common/generate-sbom-prep.ps1 + arguments: ${{parameters.manifestDirPath}} + +# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461 +- script: | + chmod +x ./eng/common/generate-sbom-prep.sh + ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}} + displayName: Prep for SBOM generation in (Linux) + condition: eq(variables['Agent.Os'], 'Linux') + continueOnError: ${{ parameters.sbomContinueOnError }} + +- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0 + displayName: 'Generate SBOM manifest' + continueOnError: ${{ parameters.sbomContinueOnError }} + inputs: + PackageName: ${{ parameters.packageName }} + BuildDropPath: ${{ parameters.buildDropPath }} + PackageVersion: ${{ parameters.packageVersion }} + ManifestDirPath: ${{ parameters.manifestDirPath }}/$(ARTIFACT_NAME) + ${{ if ne(parameters.IgnoreDirectories, '') }}: + AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}' + +- ${{ if eq(parameters.publishArtifacts, 'true')}}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish SBOM manifest + continueOnError: ${{parameters.sbomContinueOnError}} + targetPath: '${{ parameters.manifestDirPath }}' + artifactName: $(ARTIFACT_NAME) + diff --git a/src/arcade/eng/common/core-templates/steps/get-delegation-sas.yml b/src/arcade/eng/common/core-templates/steps/get-delegation-sas.yml new file mode 100644 index 000000000..d2901470a --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/get-delegation-sas.yml @@ -0,0 +1,46 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: expiryInHours + type: number + default: 1 +- name: base64Encode + type: boolean + default: false +- name: storageAccount + type: string +- name: container + type: string +- name: permissions + type: string + default: 'rl' +- name: is1ESPipeline + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Generate delegation SAS Token for ${{ parameters.storageAccount }}/${{ parameters.container }}' + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + # Calculate the expiration of the SAS token and convert to UTC + $expiry = (Get-Date).AddHours(${{ parameters.expiryInHours }}).ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ssZ") + + $sas = az storage container generate-sas --account-name ${{ parameters.storageAccount }} --name ${{ parameters.container }} --permissions ${{ parameters.permissions }} --expiry $expiry --auth-mode login --as-user -o tsv + + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to generate SAS token." + exit 1 + } + + if ('${{ parameters.base64Encode }}' -eq 'true') { + $sas = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($sas)) + } + + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true]$sas" diff --git a/src/arcade/eng/common/core-templates/steps/get-federated-access-token.yml b/src/arcade/eng/common/core-templates/steps/get-federated-access-token.yml new file mode 100644 index 000000000..3a4d4410c --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/get-federated-access-token.yml @@ -0,0 +1,42 @@ +parameters: +- name: federatedServiceConnection + type: string +- name: outputVariableName + type: string +- name: is1ESPipeline + type: boolean +- name: stepName + type: string + default: 'getFederatedAccessToken' +- name: condition + type: string + default: '' +# Resource to get a token for. Common values include: +# - '499b84ac-1321-427f-aa17-267ca6975798' for Azure DevOps +# - 'https://storage.azure.com/' for storage +# Defaults to Azure DevOps +- name: resource + type: string + default: '499b84ac-1321-427f-aa17-267ca6975798' +- name: isStepOutputVariable + type: boolean + default: false + +steps: +- task: AzureCLI@2 + displayName: 'Getting federated access token for feeds' + name: ${{ parameters.stepName }} + ${{ if ne(parameters.condition, '') }}: + condition: ${{ parameters.condition }} + inputs: + azureSubscription: ${{ parameters.federatedServiceConnection }} + scriptType: 'pscore' + scriptLocation: 'inlineScript' + inlineScript: | + $accessToken = az account get-access-token --query accessToken --resource ${{ parameters.resource }} --output tsv + if ($LASTEXITCODE -ne 0) { + Write-Error "Failed to get access token for resource '${{ parameters.resource }}'" + exit 1 + } + Write-Host "Setting '${{ parameters.outputVariableName }}' with the access token value" + Write-Host "##vso[task.setvariable variable=${{ parameters.outputVariableName }};issecret=true;isOutput=${{ parameters.isStepOutputVariable }}]$accessToken" \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/steps/install-microbuild.yml b/src/arcade/eng/common/core-templates/steps/install-microbuild.yml new file mode 100644 index 000000000..2bcf974ee --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/install-microbuild.yml @@ -0,0 +1,85 @@ +parameters: + # Enable install tasks for MicroBuild + enableMicrobuild: false + # Enable install tasks for MicroBuild on Mac and Linux + # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' + enableMicrobuildForMacAndLinux: false + # Location of the MicroBuild output folder + microBuildOutputFolder: '$(Build.SourcesDirectory)' + continueOnError: false + +steps: + - ${{ if eq(parameters.enableMicrobuild, 'true') }}: + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: + # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable + - task: UseDotNet@2 + displayName: Install .NET 8.0 SDK for MicroBuild Plugin + inputs: + packageType: sdk + version: 8.0.x + installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet + workingDirectory: ${{ parameters.microBuildOutputFolder }} + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: + azureSubscription: 'MicroBuild Signing Task (DevDiv)' + useEsrpCli: true + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + continueOnError: ${{ parameters.continueOnError }} + condition: and( + succeeded(), + or( + and( + eq(variables['Agent.Os'], 'Windows_NT'), + in(variables['_SignType'], 'real', 'test') + ), + and( + ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, + ne(variables['Agent.Os'], 'Windows_NT'), + eq(variables['_SignType'], 'real') + ) + )) + + # Workaround for ESRP CLI on Linux - https://github.com/dotnet/source-build/issues/4964 + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: + - task: UseDotNet@2 + displayName: Install .NET 9.0 SDK for ESRP CLI Workaround + inputs: + packageType: sdk + version: 9.0.x + installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet + workingDirectory: ${{ parameters.microBuildOutputFolder }} + condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) + + - task: PowerShell@2 + displayName: Workaround for ESRP CLI on Linux + inputs: + targetType: 'inline' + script: | + Write-Host "Copying Linux Path" + $MBSIGN_APPFOLDER = '$(MBSIGN_APPFOLDER)' + $MBSIGN_APPFOLDER = ($MBSIGN_APPFOLDER -replace '/build', '') + + $versionRegex = '\d+\.\d+\.\d+' + $package = Get-ChildItem -Path $MBSIGN_APPFOLDER -Directory | + Where-Object { $_.Name -match $versionRegex } + + if ($package.Count -ne 1) { + Write-Host "There should be exactly one matching subfolder, but found $($package.Count)." + exit 1 + } + + $MBSIGN_APPFOLDER = $package[0].FullName + '/build' + $MBSIGN_APPFOLDER | Write-Host + $SignConfigPath = $MBSIGN_APPFOLDER + '/signconfig.xml' + Copy-Item -Path "$(MBSIGN_APPFOLDER)/signconfig.xml" -Destination $SignConfigPath -Force + condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) diff --git a/src/arcade/eng/common/core-templates/steps/publish-build-artifacts.yml b/src/arcade/eng/common/core-templates/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..f24ce3466 --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/publish-build-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false +- name: args + type: object + default: {} +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + ${{ each parameter in parameters.args }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/steps/publish-logs.yml b/src/arcade/eng/common/core-templates/steps/publish-logs.yml new file mode 100644 index 000000000..de24d0087 --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/publish-logs.yml @@ -0,0 +1,61 @@ +parameters: + StageLabel: '' + JobLabel: '' + CustomSensitiveDataList: '' + # A default - in case value from eng/common/core-templates/post-build/common-variables.yml is not passed + BinlogToolVersion: '1.0.11' + is1ESPipeline: false + +steps: +- task: Powershell@2 + displayName: Prepare Binlogs to Upload + inputs: + targetType: inline + script: | + New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + continueOnError: true + condition: always() + +- task: PowerShell@2 + displayName: Redact Logs + inputs: + filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 + # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml + # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + # If the file exists - sensitive data for redaction will be sourced from it + # (single entry per line, lines starting with '# ' are considered comments and skipped) + arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' + -BinlogToolVersion ${{parameters.BinlogToolVersion}} + -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + '$(publishing-dnceng-devdiv-code-r-build-re)' + '$(MaestroAccessToken)' + '$(dn-bot-all-orgs-artifact-feeds-rw)' + '$(akams-client-id)' + '$(microsoft-symbol-server-pat)' + '$(symweb-symbol-server-pat)' + '$(dnceng-symbol-server-pat)' + '$(dn-bot-all-orgs-build-rw-code-rw)' + '$(System.AccessToken)' + ${{parameters.CustomSensitiveDataList}} + continueOnError: true + condition: always() + +- task: CopyFiles@2 + displayName: Gather post build logs + inputs: + SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + condition: always() + +- template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' + publishLocation: Container + artifactName: PostBuildLogs + continueOnError: true + condition: always() diff --git a/src/arcade/eng/common/core-templates/steps/publish-pipeline-artifacts.yml b/src/arcade/eng/common/core-templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..2efec04dc --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,20 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - template: /eng/common/templates/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} +- ${{ else }}: + - template: /eng/common/templates-official/steps/publish-pipeline-artifacts.yml + parameters: + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/core-templates/steps/retain-build.yml b/src/arcade/eng/common/core-templates/steps/retain-build.yml new file mode 100644 index 000000000..83d97a26a --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/retain-build.yml @@ -0,0 +1,28 @@ +parameters: + # Optional azure devops PAT with build execute permissions for the build's organization, + # only needed if the build that should be retained ran on a different organization than + # the pipeline where this template is executing from + Token: '' + # Optional BuildId to retain, defaults to the current running build + BuildId: '' + # Azure devops Organization URI for the build in the https://dev.azure.com/ format. + # Defaults to the organization the current pipeline is running on + AzdoOrgUri: '$(System.CollectionUri)' + # Azure devops project for the build. Defaults to the project the current pipeline is running on + AzdoProject: '$(System.TeamProject)' + +steps: + - task: powershell@2 + inputs: + targetType: 'filePath' + filePath: eng/common/retain-build.ps1 + pwsh: true + arguments: > + -AzdoOrgUri: ${{parameters.AzdoOrgUri}} + -AzdoProject ${{parameters.AzdoProject}} + -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }} + -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}} + displayName: Enable permanent build retention + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + BUILD_ID: $(Build.BuildId) \ No newline at end of file diff --git a/src/arcade/eng/common/core-templates/steps/send-to-helix.yml b/src/arcade/eng/common/core-templates/steps/send-to-helix.yml new file mode 100644 index 000000000..68fa739c4 --- /dev/null +++ b/src/arcade/eng/common/core-templates/steps/send-to-helix.yml @@ -0,0 +1,93 @@ +# Please remember to update the documentation if you make changes to these parameters! +parameters: + HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/ + HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/' + HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number + HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues + HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group + HelixProjectPath: 'eng/common/helixpublish.proj' # optional -- path to the project file to build relative to BUILD_SOURCESDIRECTORY + HelixProjectArguments: '' # optional -- arguments passed to the build command + HelixConfiguration: '' # optional -- additional property attached to a job + HelixPreCommands: '' # optional -- commands to run before Helix work item execution + HelixPostCommands: '' # optional -- commands to run after Helix work item execution + WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects + WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects + WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects + CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload + XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true + XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects + XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects + XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner + XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects + IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion + DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json + WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget." + IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set + HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net ) + Creator: '' # optional -- if the build is external, use this to specify who is sending the job + DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO + condition: succeeded() # optional -- condition for step to execute; defaults to succeeded() + continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false + +steps: + - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"' + displayName: ${{ parameters.DisplayNamePrefix }} (Windows) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} + - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/${{ parameters.HelixProjectPath }} /restore /p:TreatWarningsAsErrors=false ${{ parameters.HelixProjectArguments }} /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog + displayName: ${{ parameters.DisplayNamePrefix }} (Unix) + env: + BuildConfig: $(_BuildConfig) + HelixSource: ${{ parameters.HelixSource }} + HelixType: ${{ parameters.HelixType }} + HelixBuild: ${{ parameters.HelixBuild }} + HelixConfiguration: ${{ parameters.HelixConfiguration }} + HelixTargetQueues: ${{ parameters.HelixTargetQueues }} + HelixAccessToken: ${{ parameters.HelixAccessToken }} + HelixPreCommands: ${{ parameters.HelixPreCommands }} + HelixPostCommands: ${{ parameters.HelixPostCommands }} + WorkItemDirectory: ${{ parameters.WorkItemDirectory }} + WorkItemCommand: ${{ parameters.WorkItemCommand }} + WorkItemTimeout: ${{ parameters.WorkItemTimeout }} + CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }} + XUnitProjects: ${{ parameters.XUnitProjects }} + XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }} + XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }} + XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }} + XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }} + IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }} + DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }} + DotNetCliVersion: ${{ parameters.DotNetCliVersion }} + WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }} + HelixBaseUri: ${{ parameters.HelixBaseUri }} + Creator: ${{ parameters.Creator }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT')) + continueOnError: ${{ parameters.continueOnError }} diff --git a/src/arcade/eng/common/core-templates/variables/pool-providers.yml b/src/arcade/eng/common/core-templates/variables/pool-providers.yml new file mode 100644 index 000000000..41053d382 --- /dev/null +++ b/src/arcade/eng/common/core-templates/variables/pool-providers.yml @@ -0,0 +1,8 @@ +parameters: + is1ESPipeline: false + +variables: + - ${{ if eq(parameters.is1ESPipeline, 'true') }}: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ else }}: + - template: /eng/common/templates/variables/pool-providers.yml \ No newline at end of file diff --git a/src/arcade/eng/common/cross/arm/tizen/tizen.patch b/src/arcade/eng/common/cross/arm/tizen/tizen.patch new file mode 100644 index 000000000..fb12ade72 --- /dev/null +++ b/src/arcade/eng/common/cross/arm/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) ) diff --git a/src/arcade/eng/common/cross/arm64/tizen/tizen.patch b/src/arcade/eng/common/cross/arm64/tizen/tizen.patch new file mode 100644 index 000000000..2cebc5473 --- /dev/null +++ b/src/arcade/eng/common/cross/arm64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-littleaarch64) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-aarch64.so.1 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) ) diff --git a/src/arcade/eng/common/cross/armel/tizen/tizen.patch b/src/arcade/eng/common/cross/armel/tizen/tizen.patch new file mode 100644 index 000000000..ca7c7c1ff --- /dev/null +++ b/src/arcade/eng/common/cross/armel/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-littlearm) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) ) diff --git a/src/arcade/eng/common/cross/build-android-rootfs.sh b/src/arcade/eng/common/cross/build-android-rootfs.sh new file mode 100644 index 000000000..fbd8d8084 --- /dev/null +++ b/src/arcade/eng/common/cross/build-android-rootfs.sh @@ -0,0 +1,146 @@ +#!/usr/bin/env bash +set -e +__NDK_Version=r21 + +usage() +{ + echo "Creates a toolchain and sysroot used for cross-compiling for Android." + echo + echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]" + echo + echo "BuildArch is the target architecture of Android. Currently only arm64 is supported." + echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html" + echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history" + echo + echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior" + echo "by setting the TOOLCHAIN_DIR environment variable" + echo + echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation," + echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK." + echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android." + exit 1 +} + +__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h +__BuildArch=arm64 +__AndroidArch=aarch64 +__AndroidToolchain=aarch64-linux-android + +while :; do + if [[ "$#" -le 0 ]]; then + break + fi + + i=$1 + + lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")" + case $lowerI in + -?|-h|--help) + usage + exit 1 + ;; + arm64) + __BuildArch=arm64 + __AndroidArch=aarch64 + __AndroidToolchain=aarch64-linux-android + ;; + arm) + __BuildArch=arm + __AndroidArch=arm + __AndroidToolchain=arm-linux-androideabi + ;; + --ndk) + shift + __NDK_Version=$1 + ;; + *[0-9]) + __ApiLevel=$i + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i" + ;; + esac + shift +done + +if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then + __NDK_File_Arch_Spec=-x86_64 + __SysRoot=sysroot +else + __NDK_File_Arch_Spec= + __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot +fi + +# Obtain the location of the bash script to figure out where the root of the repo is. +__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs" + +if [[ ! -f "$__CrossDir" ]]; then + mkdir -p "$__CrossDir" +fi + +# Resolve absolute path to avoid `../` in build logs +__CrossDir="$( cd "$__CrossDir" && pwd )" + +__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version" +__lldb_Dir="$__CrossDir/lldb" +__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version" + +if [[ -n "$TOOLCHAIN_DIR" ]]; then + __ToolchainDir=$TOOLCHAIN_DIR +fi + +if [[ -n "$NDK_DIR" ]]; then + __NDK_Dir=$NDK_DIR +fi + +echo "Target API level: $__ApiLevel" +echo "Target architecture: $__BuildArch" +echo "NDK version: $__NDK_Version" +echo "NDK location: $__NDK_Dir" +echo "Target Toolchain location: $__ToolchainDir" + +# Download the NDK if required +if [ ! -d $__NDK_Dir ]; then + echo Downloading the NDK into $__NDK_Dir + mkdir -p $__NDK_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip + unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir +fi + +if [ ! -d $__lldb_Dir ]; then + mkdir -p $__lldb_Dir + echo Downloading LLDB into $__lldb_Dir + wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip + unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir +fi + +echo "Download dependencies..." +__TmpDir=$__CrossDir/tmp/$__BuildArch/ +mkdir -p "$__TmpDir" + +# combined dependencies for coreclr, installer and libraries +__AndroidPackages="libicu" +__AndroidPackages+=" libandroid-glob" +__AndroidPackages+=" liblzma" +__AndroidPackages+=" krb5" +__AndroidPackages+=" openssl" + +for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/main/binary-$__AndroidArch/Packages |\ + grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do + + if [[ "$path" != "Filename:" ]]; then + echo "Working on: $path" + wget -qO- https://packages.termux.dev/termux-main-21/$path | dpkg -x - "$__TmpDir" + fi +done + +cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/" + +# Generate platform file for build.sh script to assign to __DistroRid +echo "Generating platform file..." +echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform + +echo "Now to build coreclr, libraries and host; run:" +echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch diff --git a/src/arcade/eng/common/cross/build-rootfs.sh b/src/arcade/eng/common/cross/build-rootfs.sh new file mode 100644 index 000000000..d6f005b5d --- /dev/null +++ b/src/arcade/eng/common/cross/build-rootfs.sh @@ -0,0 +1,835 @@ +#!/usr/bin/env bash + +set -e + +usage() +{ + echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" + echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86" + echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" + echo " for alpine can be specified with version: alpineX.YY or alpineedge" + echo " for FreeBSD can be: freebsd13, freebsd14" + echo " for illumos can be: illumos" + echo " for Haiku can be: haiku." + echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD" + echo "llvmx[.y] - optional, LLVM version for LLVM related packages." + echo "--skipunmount - optional, will skip the unmount of rootfs folder." + echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)." + echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems." + echo "--use-mirror - optional, use mirror URL to fetch resources, when available." + echo "--jobs N - optional, restrict to N jobs." + exit 1 +} + +__CodeName=xenial +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__BuildArch=arm +__AlpineArch=armv7 +__FreeBSDArch=arm +__FreeBSDMachineArch=armv7 +__IllumosArch=arm7 +__HaikuArch=arm +__QEMUArch=arm +__UbuntuArch=armhf +__UbuntuRepo= +__UbuntuSuites="updates security backports" +__LLDB_Package="liblldb-3.9-dev" +__SkipUnmount=0 + +# base development support +__UbuntuPackages="build-essential" + +__AlpinePackages="alpine-base" +__AlpinePackages+=" build-base" +__AlpinePackages+=" linux-headers" +__AlpinePackages+=" lldb-dev" +__AlpinePackages+=" python3" +__AlpinePackages+=" libedit" + +# symlinks fixer +__UbuntuPackages+=" symlinks" + +# runtime dependencies +__UbuntuPackages+=" libicu-dev" +__UbuntuPackages+=" liblttng-ust-dev" +__UbuntuPackages+=" libunwind8-dev" + +__AlpinePackages+=" gettext-dev" +__AlpinePackages+=" icu-dev" +__AlpinePackages+=" libunwind-dev" +__AlpinePackages+=" lttng-ust-dev" +__AlpinePackages+=" compiler-rt" + +# runtime libraries' dependencies +__UbuntuPackages+=" libcurl4-openssl-dev" +__UbuntuPackages+=" libkrb5-dev" +__UbuntuPackages+=" libssl-dev" +__UbuntuPackages+=" zlib1g-dev" +__UbuntuPackages+=" libbrotli-dev" + +__AlpinePackages+=" curl-dev" +__AlpinePackages+=" krb5-dev" +__AlpinePackages+=" openssl-dev" +__AlpinePackages+=" zlib-dev" + +__FreeBSDBase="13.4-RELEASE" +__FreeBSDPkg="1.21.3" +__FreeBSDABI="13" +__FreeBSDPackages="libunwind" +__FreeBSDPackages+=" icu" +__FreeBSDPackages+=" libinotify" +__FreeBSDPackages+=" openssl" +__FreeBSDPackages+=" krb5" +__FreeBSDPackages+=" terminfo-db" + +__IllumosPackages="icu" +__IllumosPackages+=" mit-krb5" +__IllumosPackages+=" openssl" +__IllumosPackages+=" zlib" + +__HaikuPackages="gcc_syslibs" +__HaikuPackages+=" gcc_syslibs_devel" +__HaikuPackages+=" gmp" +__HaikuPackages+=" gmp_devel" +__HaikuPackages+=" icu[0-9]+" +__HaikuPackages+=" icu[0-9]*_devel" +__HaikuPackages+=" krb5" +__HaikuPackages+=" krb5_devel" +__HaikuPackages+=" libiconv" +__HaikuPackages+=" libiconv_devel" +__HaikuPackages+=" llvm[0-9]*_libunwind" +__HaikuPackages+=" llvm[0-9]*_libunwind_devel" +__HaikuPackages+=" mpfr" +__HaikuPackages+=" mpfr_devel" +__HaikuPackages+=" openssl3" +__HaikuPackages+=" openssl3_devel" +__HaikuPackages+=" zlib" +__HaikuPackages+=" zlib_devel" + +# ML.NET dependencies +__UbuntuPackages+=" libomp5" +__UbuntuPackages+=" libomp-dev" + +# Taken from https://github.com/alpinelinux/alpine-chroot-install/blob/6d08f12a8a70dd9b9dc7d997c88aa7789cc03c42/alpine-chroot-install#L85-L133 +__AlpineKeys=' +4a6a0840:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1yHJxQgsHQREclQu4Ohe\nqxTxd1tHcNnvnQTu/UrTky8wWvgXT+jpveroeWWnzmsYlDI93eLI2ORakxb3gA2O\nQ0Ry4ws8vhaxLQGC74uQR5+/yYrLuTKydFzuPaS1dK19qJPXB8GMdmFOijnXX4SA\njixuHLe1WW7kZVtjL7nufvpXkWBGjsfrvskdNA/5MfxAeBbqPgaq0QMEfxMAn6/R\nL5kNepi/Vr4S39Xvf2DzWkTLEK8pcnjNkt9/aafhWqFVW7m3HCAII6h/qlQNQKSo\nGuH34Q8GsFG30izUENV9avY7hSLq7nggsvknlNBZtFUcmGoQrtx3FmyYsIC8/R+B\nywIDAQAB +5243ef4b:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvNijDxJ8kloskKQpJdx+\nmTMVFFUGDoDCbulnhZMJoKNkSuZOzBoFC94omYPtxnIcBdWBGnrm6ncbKRlR+6oy\nDO0W7c44uHKCFGFqBhDasdI4RCYP+fcIX/lyMh6MLbOxqS22TwSLhCVjTyJeeH7K\naA7vqk+QSsF4TGbYzQDDpg7+6aAcNzg6InNePaywA6hbT0JXbxnDWsB+2/LLSF2G\nmnhJlJrWB1WGjkz23ONIWk85W4S0XB/ewDefd4Ly/zyIciastA7Zqnh7p3Ody6Q0\nsS2MJzo7p3os1smGjUF158s6m/JbVh4DN6YIsxwl2OjDOz9R0OycfJSDaBVIGZzg\ncQIDAQAB +524d27bb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr8s1q88XpuJWLCZALdKj\nlN8wg2ePB2T9aIcaxryYE/Jkmtu+ZQ5zKq6BT3y/udt5jAsMrhHTwroOjIsF9DeG\ne8Y3vjz+Hh4L8a7hZDaw8jy3CPag47L7nsZFwQOIo2Cl1SnzUc6/owoyjRU7ab0p\niWG5HK8IfiybRbZxnEbNAfT4R53hyI6z5FhyXGS2Ld8zCoU/R4E1P0CUuXKEN4p0\n64dyeUoOLXEWHjgKiU1mElIQj3k/IF02W89gDj285YgwqA49deLUM7QOd53QLnx+\nxrIrPv3A+eyXMFgexNwCKQU9ZdmWa00MjjHlegSGK8Y2NPnRoXhzqSP9T9i2HiXL\nVQIDAQAB +5261cecb:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwlzMkl7b5PBdfMzGdCT0\ncGloRr5xGgVmsdq5EtJvFkFAiN8Ac9MCFy/vAFmS8/7ZaGOXoCDWbYVLTLOO2qtX\nyHRl+7fJVh2N6qrDDFPmdgCi8NaE+3rITWXGrrQ1spJ0B6HIzTDNEjRKnD4xyg4j\ng01FMcJTU6E+V2JBY45CKN9dWr1JDM/nei/Pf0byBJlMp/mSSfjodykmz4Oe13xB\nCa1WTwgFykKYthoLGYrmo+LKIGpMoeEbY1kuUe04UiDe47l6Oggwnl+8XD1MeRWY\nsWgj8sF4dTcSfCMavK4zHRFFQbGp/YFJ/Ww6U9lA3Vq0wyEI6MCMQnoSMFwrbgZw\nwwIDAQAB +58199dcc:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3v8/ye/V/t5xf4JiXLXa\nhWFRozsnmn3hobON20GdmkrzKzO/eUqPOKTpg2GtvBhK30fu5oY5uN2ORiv2Y2ht\neLiZ9HVz3XP8Fm9frha60B7KNu66FO5P2o3i+E+DWTPqqPcCG6t4Znk2BypILcit\nwiPKTsgbBQR2qo/cO01eLLdt6oOzAaF94NH0656kvRewdo6HG4urbO46tCAizvCR\nCA7KGFMyad8WdKkTjxh8YLDLoOCtoZmXmQAiwfRe9pKXRH/XXGop8SYptLqyVVQ+\ntegOD9wRs2tOlgcLx4F/uMzHN7uoho6okBPiifRX+Pf38Vx+ozXh056tjmdZkCaV\naQIDAQAB +58cbb476:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoSPnuAGKtRIS5fEgYPXD\n8pSGvKAmIv3A08LBViDUe+YwhilSHbYXUEAcSH1KZvOo1WT1x2FNEPBEFEFU1Eyc\n+qGzbA03UFgBNvArurHQ5Z/GngGqE7IarSQFSoqewYRtFSfp+TL9CUNBvM0rT7vz\n2eMu3/wWG+CBmb92lkmyWwC1WSWFKO3x8w+Br2IFWvAZqHRt8oiG5QtYvcZL6jym\nY8T6sgdDlj+Y+wWaLHs9Fc+7vBuyK9C4O1ORdMPW15qVSl4Lc2Wu1QVwRiKnmA+c\nDsH/m7kDNRHM7TjWnuj+nrBOKAHzYquiu5iB3Qmx+0gwnrSVf27Arc3ozUmmJbLj\nzQIDAQAB +58e4f17d:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvBxJN9ErBgdRcPr5g4hV\nqyUSGZEKuvQliq2Z9SRHLh2J43+EdB6A+yzVvLnzcHVpBJ+BZ9RV30EM9guck9sh\nr+bryZcRHyjG2wiIEoduxF2a8KeWeQH7QlpwGhuobo1+gA8L0AGImiA6UP3LOirl\nI0G2+iaKZowME8/tydww4jx5vG132JCOScMjTalRsYZYJcjFbebQQolpqRaGB4iG\nWqhytWQGWuKiB1A22wjmIYf3t96l1Mp+FmM2URPxD1gk/BIBnX7ew+2gWppXOK9j\n1BJpo0/HaX5XoZ/uMqISAAtgHZAqq+g3IUPouxTphgYQRTRYpz2COw3NF43VYQrR\nbQIDAQAB +60ac2099:MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwR4uJVtJOnOFGchnMW5Y\nj5/waBdG1u5BTMlH+iQMcV5+VgWhmpZHJCBz3ocD+0IGk2I68S5TDOHec/GSC0lv\n6R9o6F7h429GmgPgVKQsc8mPTPtbjJMuLLs4xKc+viCplXc0Nc0ZoHmCH4da6fCV\ntdpHQjVe6F9zjdquZ4RjV6R6JTiN9v924dGMAkbW/xXmamtz51FzondKC52Gh8Mo\n/oA0/T0KsCMCi7tb4QNQUYrf+Xcha9uus4ww1kWNZyfXJB87a2kORLiWMfs2IBBJ\nTmZ2Fnk0JnHDb8Oknxd9PvJPT0mvyT8DA+KIAPqNvOjUXP4bnjEHJcoCP9S5HkGC\nIQIDAQAB +6165ee59:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAutQkua2CAig4VFSJ7v54\nALyu/J1WB3oni7qwCZD3veURw7HxpNAj9hR+S5N/pNeZgubQvJWyaPuQDm7PTs1+\ntFGiYNfAsiibX6Rv0wci3M+z2XEVAeR9Vzg6v4qoofDyoTbovn2LztaNEjTkB+oK\ntlvpNhg1zhou0jDVYFniEXvzjckxswHVb8cT0OMTKHALyLPrPOJzVtM9C1ew2Nnc\n3848xLiApMu3NBk0JqfcS3Bo5Y2b1FRVBvdt+2gFoKZix1MnZdAEZ8xQzL/a0YS5\nHd0wj5+EEKHfOd3A75uPa/WQmA+o0cBFfrzm69QDcSJSwGpzWrD1ScH3AK8nWvoj\nv7e9gukK/9yl1b4fQQ00vttwJPSgm9EnfPHLAtgXkRloI27H6/PuLoNvSAMQwuCD\nhQRlyGLPBETKkHeodfLoULjhDi1K2gKJTMhtbnUcAA7nEphkMhPWkBpgFdrH+5z4\nLxy+3ek0cqcI7K68EtrffU8jtUj9LFTUC8dERaIBs7NgQ/LfDbDfGh9g6qVj1hZl\nk9aaIPTm/xsi8v3u+0qaq7KzIBc9s59JOoA8TlpOaYdVgSQhHHLBaahOuAigH+VI\nisbC9vmqsThF2QdDtQt37keuqoda2E6sL7PUvIyVXDRfwX7uMDjlzTxHTymvq2Ck\nhtBqojBnThmjJQFgZXocHG8CAwEAAQ== +61666e3f:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAlEyxkHggKCXC2Wf5Mzx4\nnZLFZvU2bgcA3exfNPO/g1YunKfQY+Jg4fr6tJUUTZ3XZUrhmLNWvpvSwDS19ZmC\nIXOu0+V94aNgnhMsk9rr59I8qcbsQGIBoHzuAl8NzZCgdbEXkiY90w1skUw8J57z\nqCsMBydAueMXuWqF5nGtYbi5vHwK42PffpiZ7G5Kjwn8nYMW5IZdL6ZnMEVJUWC9\nI4waeKg0yskczYDmZUEAtrn3laX9677ToCpiKrvmZYjlGl0BaGp3cxggP2xaDbUq\nqfFxWNgvUAb3pXD09JM6Mt6HSIJaFc9vQbrKB9KT515y763j5CC2KUsilszKi3mB\nHYe5PoebdjS7D1Oh+tRqfegU2IImzSwW3iwA7PJvefFuc/kNIijfS/gH/cAqAK6z\nbhdOtE/zc7TtqW2Wn5Y03jIZdtm12CxSxwgtCF1NPyEWyIxAQUX9ACb3M0FAZ61n\nfpPrvwTaIIxxZ01L3IzPLpbc44x/DhJIEU+iDt6IMTrHOphD9MCG4631eIdB0H1b\n6zbNX1CXTsafqHRFV9XmYYIeOMggmd90s3xIbEujA6HKNP/gwzO6CDJ+nHFDEqoF\nSkxRdTkEqjTjVKieURW7Swv7zpfu5PrsrrkyGnsRrBJJzXlm2FOOxnbI2iSL1B5F\nrO5kbUxFeZUIDq+7Yv4kLWcCAwEAAQ== +616a9724:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnC+bR4bHf/L6QdU4puhQ\ngl1MHePszRC38bzvVFDUJsmCaMCL2suCs2A2yxAgGb9pu9AJYLAmxQC4mM3jNqhg\n/E7yuaBbek3O02zN/ctvflJ250wZCy+z0ZGIp1ak6pu1j14IwHokl9j36zNfGtfv\nADVOcdpWITFFlPqwq1qt/H3UsKVmtiF3BNWWTeUEQwKvlU8ymxgS99yn0+4OPyNT\nL3EUeS+NQJtDS01unau0t7LnjUXn+XIneWny8bIYOQCuVR6s/gpIGuhBaUqwaJOw\n7jkJZYF2Ij7uPb4b5/R3vX2FfxxqEHqssFSg8FFUNTZz3qNZs0CRVyfA972g9WkJ\nhPfn31pQYil4QGRibCMIeU27YAEjXoqfJKEPh4UWMQsQLrEfdGfb8VgwrPbniGfU\nL3jKJR3VAafL9330iawzVQDlIlwGl6u77gEXMl9K0pfazunYhAp+BMP+9ot5ckK+\nosmrqj11qMESsAj083GeFdfV3pXEIwUytaB0AKEht9DbqUfiE/oeZ/LAXgySMtVC\nsbC4ESmgVeY2xSBIJdDyUap7FR49GGrw0W49NUv9gRgQtGGaNVQQO9oGL2PBC41P\niWF9GLoX30HIz1P8PF/cZvicSSPkQf2Z6TV+t0ebdGNS5DjapdnCrq8m9Z0pyKsQ\nuxAL2a7zX8l5i1CZh1ycUGsCAwEAAQ== +616abc23:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0MfCDrhODRCIxR9Dep1s\neXafh5CE5BrF4WbCgCsevyPIdvTeyIaW4vmO3bbG4VzhogDZju+R3IQYFuhoXP5v\nY+zYJGnwrgz3r5wYAvPnLEs1+dtDKYOgJXQj+wLJBW1mzRDL8FoRXOe5iRmn1EFS\nwZ1DoUvyu7/J5r0itKicZp3QKED6YoilXed+1vnS4Sk0mzN4smuMR9eO1mMCqNp9\n9KTfRDHTbakIHwasECCXCp50uXdoW6ig/xUAFanpm9LtK6jctNDbXDhQmgvAaLXZ\nLvFqoaYJ/CvWkyYCgL6qxvMvVmPoRv7OPcyni4xR/WgWa0MSaEWjgPx3+yj9fiMA\n1S02pFWFDOr5OUF/O4YhFJvUCOtVsUPPfA/Lj6faL0h5QI9mQhy5Zb9TTaS9jB6p\nLw7u0dJlrjFedk8KTJdFCcaGYHP6kNPnOxMylcB/5WcztXZVQD5WpCicGNBxCGMm\nW64SgrV7M07gQfL/32QLsdqPUf0i8hoVD8wfQ3EpbQzv6Fk1Cn90bZqZafg8XWGY\nwddhkXk7egrr23Djv37V2okjzdqoyLBYBxMz63qQzFoAVv5VoY2NDTbXYUYytOvG\nGJ1afYDRVWrExCech1mX5ZVUB1br6WM+psFLJFoBFl6mDmiYt0vMYBddKISsvwLl\nIJQkzDwtXzT2cSjoj3T5QekCAwEAAQ== +616ac3bc:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvaaoSLab+IluixwKV5Od\n0gib2YurjPatGIbn5Ov2DLUFYiebj2oJINXJSwUOO+4WcuHFEqiL/1rya+k5hLZt\nhnPL1tn6QD4rESznvGSasRCQNT2vS/oyZbTYJRyAtFkEYLlq0t3S3xBxxHWuvIf0\nqVxVNYpQWyM3N9RIeYBR/euXKJXileSHk/uq1I5wTC0XBIHWcthczGN0m9wBEiWS\n0m3cnPk4q0Ea8mUJ91Rqob19qETz6VbSPYYpZk3qOycjKosuwcuzoMpwU8KRiMFd\n5LHtX0Hx85ghGsWDVtS0c0+aJa4lOMGvJCAOvDfqvODv7gKlCXUpgumGpLdTmaZ8\n1RwqspAe3IqBcdKTqRD4m2mSg23nVx2FAY3cjFvZQtfooT7q1ItRV5RgH6FhQSl7\n+6YIMJ1Bf8AAlLdRLpg+doOUGcEn+pkDiHFgI8ylH1LKyFKw+eXaAml/7DaWZk1d\ndqggwhXOhc/UUZFQuQQ8A8zpA13PcbC05XxN2hyP93tCEtyynMLVPtrRwDnHxFKa\nqKzs3rMDXPSXRn3ZZTdKH3069ApkEjQdpcwUh+EmJ1Ve/5cdtzT6kKWCjKBFZP/s\n91MlRrX2BTRdHaU5QJkUheUtakwxuHrdah2F94lRmsnQlpPr2YseJu6sIE+Dnx4M\nCfhdVbQL2w54R645nlnohu8CAwEAAQ== +616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ== +616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ== +616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== +66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ== +' +__Keyring= +__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" +__SkipSigCheck=0 +__SkipEmulation=0 +__UseMirror=0 + +__UnprocessedBuildArgs= +while :; do + if [[ "$#" -le 0 ]]; then + break + fi + + lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case $lowerI in + -\?|-h|--help) + usage + ;; + arm) + __BuildArch=arm + __UbuntuArch=armhf + __AlpineArch=armv7 + __QEMUArch=arm + ;; + arm64) + __BuildArch=arm64 + __UbuntuArch=arm64 + __AlpineArch=aarch64 + __QEMUArch=aarch64 + __FreeBSDArch=arm64 + __FreeBSDMachineArch=aarch64 + ;; + armel) + __BuildArch=armel + __UbuntuArch=armel + __UbuntuRepo="http://archive.debian.org/debian/" + __CodeName=buster + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + __LLDB_Package="liblldb-6.0-dev" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + __UbuntuSuites= + ;; + armv6) + __BuildArch=armv6 + __UbuntuArch=armhf + __QEMUArch=arm + __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" + __CodeName=buster + __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg" + __LLDB_Package="liblldb-6.0-dev" + __UbuntuSuites= + + if [[ -e "$__KeyringFile" ]]; then + __Keyring="--keyring $__KeyringFile" + fi + ;; + loongarch64) + __BuildArch=loongarch64 + __AlpineArch=loongarch64 + __QEMUArch=loongarch64 + __UbuntuArch=loong64 + __UbuntuSuites=unreleased + __LLDB_Package="liblldb-19-dev" + + if [[ "$__CodeName" == "sid" ]]; then + __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" + fi + ;; + riscv64) + __BuildArch=riscv64 + __AlpineArch=riscv64 + __AlpinePackages="${__AlpinePackages// lldb-dev/}" + __QEMUArch=riscv64 + __UbuntuArch=riscv64 + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + unset __LLDB_Package + ;; + ppc64le) + __BuildArch=ppc64le + __AlpineArch=ppc64le + __QEMUArch=ppc64le + __UbuntuArch=ppc64el + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package + ;; + s390x) + __BuildArch=s390x + __AlpineArch=s390x + __QEMUArch=s390x + __UbuntuArch=s390x + __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" + __UbuntuPackages="${__UbuntuPackages// libomp5/}" + unset __LLDB_Package + ;; + x64) + __BuildArch=x64 + __AlpineArch=x86_64 + __UbuntuArch=amd64 + __FreeBSDArch=amd64 + __FreeBSDMachineArch=amd64 + __illumosArch=x86_64 + __HaikuArch=x86_64 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + ;; + x86) + __BuildArch=x86 + __UbuntuArch=i386 + __AlpineArch=x86 + __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + ;; + lldb*) + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && minorVersion="${version#*.}" + if [ -z "$minorVersion" ]; then + minorVersion=0 + fi + + # for versions > 6.0, lldb has dropped the minor version + if [ "$majorVersion" -le 6 ]; then + version="$majorVersion.$minorVersion" + else + version="$majorVersion" + fi + + __LLDB_Package="liblldb-${version}-dev" + ;; + no-lldb) + unset __LLDB_Package + ;; + llvm*) + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __LLVM_MajorVersion="${version%%.*}" + + [ -z "${version##*.*}" ] && __LLVM_MinorVersion="${version#*.}" + if [ -z "$__LLVM_MinorVersion" ]; then + __LLVM_MinorVersion=0 + fi + + # for versions > 6.0, lldb has dropped the minor version + if [ "$__LLVM_MajorVersion" -gt 6 ]; then + __LLVM_MinorVersion= + fi + + ;; + xenial) # Ubuntu 16.04 + __CodeName=xenial + ;; + bionic) # Ubuntu 18.04 + __CodeName=bionic + ;; + focal) # Ubuntu 20.04 + __CodeName=focal + ;; + jammy) # Ubuntu 22.04 + __CodeName=jammy + ;; + noble) # Ubuntu 24.04 + __CodeName=noble + if [[ -n "$__LLDB_Package" ]]; then + __LLDB_Package="liblldb-18-dev" + fi + ;; + stretch) # Debian 9 + __CodeName=stretch + __LLDB_Package="liblldb-6.0-dev" + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + buster) # Debian 10 + __CodeName=buster + __LLDB_Package="liblldb-6.0-dev" + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://archive.debian.org/debian/" + fi + ;; + bullseye) # Debian 11 + __CodeName=bullseye + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + bookworm) # Debian 12 + __CodeName=bookworm + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + sid) # Debian sid + __CodeName=sid + __UbuntuSuites= + + # Debian-Ports architectures need different values + case "$__UbuntuArch" in + amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x) + __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.debian.org/debian/" + fi + ;; + *) + __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg" + + if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" + fi + ;; + esac + + if [[ -e "$__KeyringFile" ]]; then + __Keyring="--keyring $__KeyringFile" + fi + ;; + tizen) + __CodeName= + __UbuntuRepo= + __Tizen=tizen + ;; + alpine*) + __CodeName=alpine + __UbuntuRepo= + + if [[ "$lowerI" == "alpineedge" ]]; then + __AlpineVersion=edge + else + version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" + __AlpineMajorVersion="${version%%.*}" + __AlpineMinorVersion="${version#*.}" + __AlpineVersion="$__AlpineMajorVersion.$__AlpineMinorVersion" + fi + ;; + freebsd13) + __CodeName=freebsd + __SkipUnmount=1 + ;; + freebsd14) + __CodeName=freebsd + __FreeBSDBase="14.2-RELEASE" + __FreeBSDABI="14" + __SkipUnmount=1 + ;; + illumos) + __CodeName=illumos + __SkipUnmount=1 + ;; + haiku) + __CodeName=haiku + __SkipUnmount=1 + ;; + --skipunmount) + __SkipUnmount=1 + ;; + --skipsigcheck) + __SkipSigCheck=1 + ;; + --skipemulation) + __SkipEmulation=1 + ;; + --rootfsdir|-rootfsdir) + shift + __RootfsDir="$1" + ;; + --use-mirror) + __UseMirror=1 + ;; + --use-jobs) + shift + MAXJOBS=$1 + ;; + *) + __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1" + ;; + esac + + shift +done + +case "$__AlpineVersion" in + 3.14) __AlpinePackages+=" llvm11-libs" ;; + 3.15) __AlpinePackages+=" llvm12-libs" ;; + 3.16) __AlpinePackages+=" llvm13-libs" ;; + 3.17) __AlpinePackages+=" llvm15-libs" ;; + edge) __AlpineLlvmLibsLookup=1 ;; + *) + if [[ "$__AlpineArch" =~ s390x|ppc64le ]]; then + __AlpineVersion=3.15 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm12-libs" + elif [[ "$__AlpineArch" == "x86" ]]; then + __AlpineVersion=3.17 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm15-libs" + elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then + __AlpineVersion=3.21 # minimum version that supports lldb-dev + __AlpinePackages+=" llvm19-libs" + elif [[ -n "$__AlpineMajorVersion" ]]; then + # use whichever alpine version is provided and select the latest toolchain libs + __AlpineLlvmLibsLookup=1 + else + __AlpineVersion=3.13 # 3.13 to maximize compatibility + __AlpinePackages+=" llvm10-libs" + fi +esac + +if [[ "$__AlpineVersion" =~ 3\.1[345] ]]; then + # compiler-rt--static was merged in compiler-rt package in alpine 3.16 + # for older versions, we need compiler-rt--static, so replace the name + __AlpinePackages="${__AlpinePackages/compiler-rt/compiler-rt-static}" +fi + +__UbuntuPackages+=" ${__LLDB_Package:-}" + +if [[ -z "$__UbuntuRepo" ]]; then + __UbuntuRepo="http://ports.ubuntu.com/" +fi + +if [[ -n "$__LLVM_MajorVersion" ]]; then + __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev" +fi + +if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then + __RootfsDir="$ROOTFS_DIR" +fi + +if [[ -z "$__RootfsDir" ]]; then + __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch" +fi + +if [[ -d "$__RootfsDir" ]]; then + if [[ "$__SkipUnmount" == "0" ]]; then + umount "$__RootfsDir"/* || true + fi + rm -rf "$__RootfsDir" +fi + +mkdir -p "$__RootfsDir" +__RootfsDir="$( cd "$__RootfsDir" && pwd )" + +__hasWget= +ensureDownloadTool() +{ + if command -v wget &> /dev/null; then + __hasWget=1 + elif command -v curl &> /dev/null; then + __hasWget=0 + else + >&2 echo "ERROR: either wget or curl is required by this script." + exit 1 + fi +} + +if [[ "$__CodeName" == "alpine" ]]; then + __ApkToolsVersion=2.12.11 + __ApkToolsDir="$(mktemp -d)" + __ApkKeysDir="$(mktemp -d)" + arch="$(uname -m)" + + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + else + curl -SLO --create-dirs --output-dir "$__ApkToolsDir" "https://gitlab.alpinelinux.org/api/v4/projects/5/packages/generic/v$__ApkToolsVersion/$arch/apk.static" + fi + if [[ "$arch" == "x86_64" ]]; then + __ApkToolsSHA512SUM="53e57b49230da07ef44ee0765b9592580308c407a8d4da7125550957bb72cb59638e04f8892a18b584451c8d841d1c7cb0f0ab680cc323a3015776affaa3be33" + elif [[ "$arch" == "aarch64" ]]; then + __ApkToolsSHA512SUM="9e2b37ecb2b56c05dad23d379be84fd494c14bd730b620d0d576bda760588e1f2f59a7fcb2f2080577e0085f23a0ca8eadd993b4e61c2ab29549fdb71969afd0" + else + echo "WARNING: add missing hash for your host architecture. To find the value, use: 'find /tmp -name apk.static -exec sha512sum {} \;'" + fi + echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c + chmod +x "$__ApkToolsDir/apk.static" + + if [[ "$__AlpineVersion" == "edge" ]]; then + version=edge + else + version="v$__AlpineVersion" + fi + + for line in $__AlpineKeys; do + id="${line%%:*}" + content="${line#*:}" + + echo -e "-----BEGIN PUBLIC KEY-----\n$content\n-----END PUBLIC KEY-----" > "$__ApkKeysDir/alpine-devel@lists.alpinelinux.org-$id.rsa.pub" + done + + if [[ "$__SkipSigCheck" == "1" ]]; then + __ApkSignatureArg="--allow-untrusted" + else + __ApkSignatureArg="--keys-dir $__ApkKeysDir" + fi + + if [[ "$__SkipEmulation" == "1" ]]; then + __NoEmulationArg="--no-scripts" + fi + + # initialize DB + # shellcheck disable=SC2086 + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add + + if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then + # shellcheck disable=SC2086 + __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ + search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')" + fi + + # install all packages in one go + # shellcheck disable=SC2086 + "$__ApkToolsDir/apk.static" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \ + add $__AlpinePackages + + rm -r "$__ApkToolsDir" +elif [[ "$__CodeName" == "freebsd" ]]; then + mkdir -p "$__RootfsDir"/usr/local/etc + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + ensureDownloadTool + + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + else + curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version + fi + echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf + echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + mkdir -p "$__RootfsDir"/tmp + # get and build package manager + if [[ "$__hasWget" == 1 ]]; then + wget -O- "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + else + curl -SL "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf - + fi + cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # needed for install to succeed + mkdir -p "$__RootfsDir"/host/etc + ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install + rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}" + # install packages we need. + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update + # shellcheck disable=SC2086 + INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages +elif [[ "$__CodeName" == "illumos" ]]; then + mkdir "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + ensureDownloadTool + + echo "Downloading sysroot." + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + else + curl -SL https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf - + fi + echo "Building binutils. Please wait.." + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - + else + curl -SL https://ftp.gnu.org/gnu/binutils/binutils-2.42.tar.xz | tar -xJf - + fi + mkdir build-binutils && cd build-binutils + ../binutils-2.42/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" + make -j "$JOBS" && make install && cd .. + echo "Building gcc. Please wait.." + if [[ "$__hasWget" == 1 ]]; then + wget -O- https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - + else + curl -SL https://ftp.gnu.org/gnu/gcc/gcc-13.3.0/gcc-13.3.0.tar.xz | tar -xJf - + fi + CFLAGS="-fPIC" + CXXFLAGS="-fPIC" + CXXFLAGS_FOR_TARGET="-fPIC" + CFLAGS_FOR_TARGET="-fPIC" + export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET + mkdir build-gcc && cd build-gcc + ../gcc-13.3.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.11" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \ + --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \ + --disable-libquadmath-support --disable-shared --enable-tls + make -j "$JOBS" && make install && cd .. + BaseUrl=https://pkgsrc.smartos.org + if [[ "$__UseMirror" == 1 ]]; then + BaseUrl=https://pkgsrc.smartos.skylime.net + fi + BaseUrl="$BaseUrl/packages/SmartOS/2019Q4/${__illumosArch}/All" + echo "Downloading manifest" + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl" + else + curl -SLO "$BaseUrl" + fi + echo "Downloading dependencies." + read -ra array <<<"$__IllumosPackages" + for package in "${array[@]}"; do + echo "Installing '$package'" + # find last occurrence of package in listing and extract its name + package="$(sed -En '/.*href="('"$package"'-[0-9].*).tgz".*/h;$!d;g;s//\1/p' All)" + echo "Resolved name '$package'" + if [[ "$__hasWget" == 1 ]]; then + wget "$BaseUrl"/"$package".tgz + else + curl -SLO "$BaseUrl"/"$package".tgz + fi + ar -x "$package".tgz + tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null + done + echo "Cleaning up temporary files." + popd + rm -rf "$__RootfsDir"/{tmp,+*} + mkdir -p "$__RootfsDir"/usr/include/net + mkdir -p "$__RootfsDir"/usr/include/netpacket + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + else + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h + curl -SLO --create-dirs --output-dir "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h + fi +elif [[ "$__CodeName" == "haiku" ]]; then + JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"} + + echo "Building Haiku sysroot for $__HaikuArch" + mkdir -p "$__RootfsDir/tmp" + pushd "$__RootfsDir/tmp" + + mkdir "$__RootfsDir/tmp/download" + + ensureDownloadTool + + echo "Downloading Haiku package tools" + git clone https://github.com/haiku/haiku-toolchains-ubuntu --depth 1 "$__RootfsDir/tmp/script" + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + else + curl -SLo "$__RootfsDir/tmp/download/hosttools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --hosttools)" + fi + + unzip -o "$__RootfsDir/tmp/download/hosttools.zip" -d "$__RootfsDir/tmp/bin" + + HaikuBaseUrl="https://eu.hpkg.haiku-os.org/haiku/master/$__HaikuArch/current" + HaikuPortsBaseUrl="https://eu.hpkg.haiku-os.org/haikuports/master/$__HaikuArch/current" + + echo "Downloading HaikuPorts package repository index..." + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" + else + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuPortsBaseUrl/repo" + fi + + echo "Downloading Haiku packages" + read -ra array <<<"$__HaikuPackages" + for package in "${array[@]}"; do + echo "Downloading $package..." + hpkgFilename="$(LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package_repo" list -f "$__RootfsDir/tmp/download/repo" | + grep -E "${package}-" | sort -V | tail -n 1 | xargs)" + if [ -z "$hpkgFilename" ]; then + >&2 echo "ERROR: package $package missing." + exit 1 + fi + echo "Resolved filename: $hpkgFilename..." + hpkgDownloadUrl="$HaikuPortsBaseUrl/packages/$hpkgFilename" + if [[ "$__hasWget" == 1 ]]; then + wget -P "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + else + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$hpkgDownloadUrl" + fi + done + for package in haiku haiku_devel; do + echo "Downloading $package..." + if [[ "$__hasWget" == 1 ]]; then + hpkgVersion="$(wget -qO- "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + wget -P "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + else + hpkgVersion="$(curl -sSL "$HaikuBaseUrl" | sed -n 's/^.*version: "\([^"]*\)".*$/\1/p')" + curl -SLO --create-dirs --output-dir "$__RootfsDir/tmp/download" "$HaikuBaseUrl/packages/$package-$hpkgVersion-1-$__HaikuArch.hpkg" + fi + done + + # Set up the sysroot + echo "Setting up sysroot and extracting required packages" + mkdir -p "$__RootfsDir/boot/system" + for file in "$__RootfsDir/tmp/download/"*.hpkg; do + echo "Extracting $file..." + LD_LIBRARY_PATH="$__RootfsDir/tmp/bin" "$__RootfsDir/tmp/bin/package" extract -C "$__RootfsDir/boot/system" "$file" + done + + # Download buildtools + echo "Downloading Haiku buildtools" + if [[ "$__hasWget" == 1 ]]; then + wget -O "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + else + curl -SLo "$__RootfsDir/tmp/download/buildtools.zip" "$("$__RootfsDir/tmp/script/fetch.sh" --buildtools --arch=$__HaikuArch)" + fi + unzip -o "$__RootfsDir/tmp/download/buildtools.zip" -d "$__RootfsDir" + + # Cleaning up temporary files + echo "Cleaning up temporary files" + popd + rm -rf "$__RootfsDir/tmp" +elif [[ -n "$__CodeName" ]]; then + __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)" + + if [[ "$__SkipEmulation" == "1" ]]; then + if [[ -z "$AR" ]]; then + if command -v ar &>/dev/null; then + AR="$(command -v ar)" + elif command -v llvm-ar &>/dev/null; then + AR="$(command -v llvm-ar)" + else + echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool" + exit 1 + fi + fi + + PYTHON=${PYTHON_EXECUTABLE:-python3} + + # shellcheck disable=SC2086,SC2046 + echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ + $__UbuntuPackages + + # shellcheck disable=SC2086,SC2046 + "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ + $__UbuntuPackages + + exit 0 + fi + + __UpdateOptions= + if [[ "$__SkipSigCheck" == "0" ]]; then + __Keyring="$__Keyring --force-check-gpg" + else + __Keyring= + __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories" + fi + + # shellcheck disable=SC2086 + echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + + # shellcheck disable=SC2086 + if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then + echo "debootstrap failed! dumping debootstrap.log" + cat "$__RootfsDir/debootstrap/debootstrap.log" + exit 1 + fi + + rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d + mkdir -p "$__RootfsDir/etc/apt/sources.list.d/" + + # shellcheck disable=SC2086 + cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2) + else: + return -1 if isinstance(token1, str) else 1 + + return len(tokens1) - len(tokens2) + +def compare_debian_versions(version1, version2): + """Compare two Debian package versions.""" + epoch1, upstream1, revision1 = parse_debian_version(version1) + epoch2, upstream2, revision2 = parse_debian_version(version2) + + if epoch1 != epoch2: + return epoch1 - epoch2 + + result = compare_upstream_version(upstream1, upstream2) + if result != 0: + return result + + return compare_upstream_version(revision1, revision2) + +def resolve_dependencies(packages, aliases, desired_packages): + """Recursively resolves dependencies for the desired packages.""" + resolved = [] + to_process = deque(desired_packages) + + while to_process: + current = to_process.popleft() + resolved_package = current if current in packages else aliases.get(current, [None])[0] + + if not resolved_package: + print(f"Error: Package '{current}' was not found in the available packages.") + sys.exit(1) + + if resolved_package not in resolved: + resolved.append(resolved_package) + + deps = packages.get(resolved_package, {}).get("Depends", "") + if deps: + deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep] + for dep in deps: + if dep not in resolved and dep not in to_process and dep in packages: + to_process.append(dep) + + return resolved + +def parse_package_index(content): + """Parses the Packages.gz file and returns package information.""" + packages = {} + aliases = {} + entries = re.split(r'\n\n+', content) + + for entry in entries: + fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE)) + if "Package" in fields: + package_name = fields["Package"] + version = fields.get("Version") + filename = fields.get("Filename") + depends = fields.get("Depends") + provides = fields.get("Provides", None) + + # Only update if package_name is not in packages or if the new version is higher + if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0: + packages[package_name] = { + "Version": version, + "Filename": filename, + "Depends": depends + } + + # Update aliases if package provides any alternatives + if provides: + provides_list = [x.strip() for x in provides.split(",")] + for alias in provides_list: + # Strip version specifiers + alias_name = re.sub(r'\s*\(=.*\)', '', alias) + if alias_name not in aliases: + aliases[alias_name] = [] + if package_name not in aliases[alias_name]: + aliases[alias_name].append(package_name) + + return packages, aliases + +def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages): + """Downloads .deb files and extracts them.""" + resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages) + print(f"Resolved packages (including dependencies): {resolved_packages}") + + packages_to_download = {} + + for pkg in resolved_packages: + if pkg in packages_info: + packages_to_download[pkg] = packages_info[pkg] + + if pkg in aliases: + for alias in aliases[pkg]: + if alias in packages_info: + packages_to_download[alias] = packages_info[alias] + + asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir)) + + package_to_deb_file_map = {} + for pkg in resolved_packages: + pkg_info = packages_info.get(pkg) + if pkg_info: + deb_filename = pkg_info.get("Filename") + if deb_filename: + deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename)) + package_to_deb_file_map[pkg] = deb_file_path + + for pkg in reversed(resolved_packages): + deb_file = package_to_deb_file_map.get(pkg) + if deb_file and os.path.exists(deb_file): + extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool) + + print("All done!") + +def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): + """Extract .deb file contents""" + + os.makedirs(extract_dir, exist_ok=True) + + with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir: + result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + tar_filename = None + for line in result.stdout.decode().splitlines(): + if line.startswith("data.tar"): + tar_filename = line.strip() + break + + if not tar_filename: + raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.") + + tar_file_path = os.path.join(tmp_subdir, tar_filename) + print(f"Extracting {tar_filename} from {deb_file}..") + + subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True) + + file_extension = os.path.splitext(tar_file_path)[1].lower() + + if file_extension == ".xz": + mode = "r:xz" + elif file_extension == ".gz": + mode = "r:gz" + elif file_extension == ".zst": + # zstd is not supported by standard library yet + decompressed_tar_path = tar_file_path.replace(".zst", "") + with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file: + dctx = zstandard.ZstdDecompressor() + dctx.copy_stream(zst_file, decompressed_file) + + tar_file_path = decompressed_tar_path + mode = "r" + else: + raise ValueError(f"Unsupported compression format: {file_extension}") + + with tarfile.open(tar_file_path, mode) as tar: + tar.extractall(path=extract_dir, filter='fully_trusted') + +def finalize_setup(rootfsdir): + lib_dir = os.path.join(rootfsdir, 'lib') + usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib') + + if os.path.exists(lib_dir): + if os.path.islink(lib_dir): + os.remove(lib_dir) + else: + os.makedirs(usr_lib_dir, exist_ok=True) + + for item in os.listdir(lib_dir): + src = os.path.join(lib_dir, item) + dest = os.path.join(usr_lib_dir, item) + + if os.path.isdir(src): + shutil.copytree(src, dest, dirs_exist_ok=True) + else: + shutil.copy2(src, dest) + + shutil.rmtree(lib_dir) + + os.symlink(usr_lib_dir, lib_dir) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS") + parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)") + parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)") + parser.add_argument("--rootfsdir", required=True, help="Destination directory.") + parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.') + parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") + parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)") + parser.add_argument("packages", nargs="+", help="List of package names to be installed.") + + args = parser.parse_args() + + if args.mirror is None: + if args.distro == "ubuntu": + args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports" + elif args.distro == "debian": + args.mirror = "http://ftp.debian.org/debian-ports" + else: + raise Exception("Unsupported distro") + + DESIRED_PACKAGES = args.packages + [ # base packages + "dpkg", + "busybox", + "libc-bin", + "base-files", + "base-passwd", + "debianutils" + ] + + print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") + + package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite)) + + packages_info, aliases = parse_package_index(package_index_content) + + with tempfile.TemporaryDirectory() as tmp_dir: + install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES) + + finalize_setup(args.rootfsdir) diff --git a/src/arcade/eng/common/cross/riscv64/tizen/tizen.patch b/src/arcade/eng/common/cross/riscv64/tizen/tizen.patch new file mode 100644 index 000000000..eb6d1c074 --- /dev/null +++ b/src/arcade/eng/common/cross/riscv64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-littleriscv) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-riscv64-lp64d.so.1 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-riscv64-lp64d.so.1 ) ) diff --git a/src/arcade/eng/common/cross/tizen-build-rootfs.sh b/src/arcade/eng/common/cross/tizen-build-rootfs.sh new file mode 100644 index 000000000..ba31c9328 --- /dev/null +++ b/src/arcade/eng/common/cross/tizen-build-rootfs.sh @@ -0,0 +1,82 @@ +#!/usr/bin/env bash +set -e + +ARCH=$1 +LINK_ARCH=$ARCH + +case "$ARCH" in + arm) + TIZEN_ARCH="armv7hl" + ;; + armel) + TIZEN_ARCH="armv7l" + LINK_ARCH="arm" + ;; + arm64) + TIZEN_ARCH="aarch64" + ;; + x86) + TIZEN_ARCH="i686" + ;; + x64) + TIZEN_ARCH="x86_64" + LINK_ARCH="x86" + ;; + riscv64) + TIZEN_ARCH="riscv64" + LINK_ARCH="riscv" + ;; + *) + echo "Unsupported architecture for tizen: $ARCH" + exit 1 +esac + +__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) +__TIZEN_CROSSDIR="$__CrossDir/${ARCH}/tizen" + +if [[ -z "$ROOTFS_DIR" ]]; then + echo "ROOTFS_DIR is not defined." + exit 1; +fi + +TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp +mkdir -p $TIZEN_TMP_DIR + +# Download files +echo ">>Start downloading files" +VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR $TIZEN_ARCH +echo "<>Start constructing Tizen rootfs" +TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm` +cd $ROOTFS_DIR +for f in $TIZEN_RPM_FILES; do + rpm2cpio $f | cpio -idm --quiet +done +echo "<>Start configuring Tizen rootfs" +ln -sfn asm-${LINK_ARCH} ./usr/include/asm +patch -p1 < $__TIZEN_CROSSDIR/tizen.patch +if [[ "$TIZEN_ARCH" == "riscv64" ]]; then + echo "Fixing broken symlinks in $PWD" + rm ./usr/lib64/libresolv.so + ln -s ../../lib64/libresolv.so.2 ./usr/lib64/libresolv.so + rm ./usr/lib64/libpthread.so + ln -s ../../lib64/libpthread.so.0 ./usr/lib64/libpthread.so + rm ./usr/lib64/libdl.so + ln -s ../../lib64/libdl.so.2 ./usr/lib64/libdl.so + rm ./usr/lib64/libutil.so + ln -s ../../lib64/libutil.so.1 ./usr/lib64/libutil.so + rm ./usr/lib64/libm.so + ln -s ../../lib64/libm.so.6 ./usr/lib64/libm.so + rm ./usr/lib64/librt.so + ln -s ../../lib64/librt.so.1 ./usr/lib64/librt.so + rm ./lib/ld-linux-riscv64-lp64d.so.1 + ln -s ../lib64/ld-linux-riscv64-lp64d.so.1 ./lib/ld-linux-riscv64-lp64d.so.1 +fi +echo "</dev/null; then + VERBOSE=0 +fi + +Log() +{ + if [ $VERBOSE -ge 1 ]; then + echo ${@:2} + fi +} + +Inform() +{ + Log 1 -e "\x1B[0;34m$@\x1B[m" +} + +Debug() +{ + Log 2 -e "\x1B[0;32m$@\x1B[m" +} + +Error() +{ + >&2 Log 0 -e "\x1B[0;31m$@\x1B[m" +} + +Fetch() +{ + URL=$1 + FILE=$2 + PROGRESS=$3 + if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then + CURL_OPT="--progress-bar" + else + CURL_OPT="--silent" + fi + curl $CURL_OPT $URL > $FILE +} + +hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; } +hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; } +hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; } + +TMPDIR=$1 +if [ ! -d $TMPDIR ]; then + TMPDIR=./tizen_tmp + Debug "Create temporary directory : $TMPDIR" + mkdir -p $TMPDIR +fi + +TIZEN_ARCH=$2 + +TIZEN_URL=http://download.tizen.org/snapshots/TIZEN/Tizen +BUILD_XML=build.xml +REPOMD_XML=repomd.xml +PRIMARY_XML=primary.xml +TARGET_URL="http://__not_initialized" + +Xpath_get() +{ + XPATH_RESULT='' + XPATH=$1 + XML_FILE=$2 + RESULT=$(xmllint --xpath $XPATH $XML_FILE) + if [[ -z ${RESULT// } ]]; then + Error "Can not find target from $XML_FILE" + Debug "Xpath = $XPATH" + exit 1 + fi + XPATH_RESULT=$RESULT +} + +fetch_tizen_pkgs_init() +{ + TARGET=$1 + PROFILE=$2 + Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE" + + TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs + if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi + mkdir -p $TMP_PKG_DIR + + PKG_URL=$TIZEN_URL/$PROFILE/latest + + BUILD_XML_URL=$PKG_URL/$BUILD_XML + TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML + TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML + TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML + TMP_PRIMARYGZ=${TMP_PRIMARY}.gz + + Fetch $BUILD_XML_URL $TMP_BUILD + + Debug "fetch $BUILD_XML_URL to $TMP_BUILD" + + TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()" + Xpath_get $TARGET_XPATH $TMP_BUILD + TARGET_PATH=$XPATH_RESULT + TARGET_URL=$PKG_URL/$TARGET_PATH + + REPOMD_URL=$TARGET_URL/repodata/repomd.xml + PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)' + + Fetch $REPOMD_URL $TMP_REPOMD + + Debug "fetch $REPOMD_URL to $TMP_REPOMD" + + Xpath_get $PRIMARY_XPATH $TMP_REPOMD + PRIMARY_XML_PATH=$XPATH_RESULT + PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH + + Fetch $PRIMARY_URL $TMP_PRIMARYGZ + + Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ" + + gunzip $TMP_PRIMARYGZ + + Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY" +} + +fetch_tizen_pkgs() +{ + ARCH=$1 + PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)' + + PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())' + + for pkg in ${@:2} + do + Inform "Fetching... $pkg" + XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + PKG_PATH=$XPATH_RESULT + + XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg} + XPATH=${XPATH/_ARCH_/$ARCH} + Xpath_get $XPATH $TMP_PRIMARY + CHECKSUM=$XPATH_RESULT + + PKG_URL=$TARGET_URL/$PKG_PATH + PKG_FILE=$(basename $PKG_PATH) + PKG_PATH=$TMPDIR/$PKG_FILE + + Debug "Download $PKG_URL to $PKG_PATH" + Fetch $PKG_URL $PKG_PATH true + + echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null + if [ $? -ne 0 ]; then + Error "Fail to fetch $PKG_URL to $PKG_PATH" + Debug "Checksum = $CHECKSUM" + exit 1 + fi + done +} + +BASE="Tizen-Base" +UNIFIED="Tizen-Unified" + +Inform "Initialize ${TIZEN_ARCH} base" +fetch_tizen_pkgs_init standard $BASE +Inform "fetch common packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils +Inform "fetch coreclr packages" +fetch_tizen_pkgs ${TIZEN_ARCH} libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu +if [ "$TIZEN_ARCH" != "riscv64" ]; then + fetch_tizen_pkgs ${TIZEN_ARCH} lldb lldb-devel +fi +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel + +Inform "Initialize standard unified" +fetch_tizen_pkgs_init standard $UNIFIED +Inform "fetch corefx packages" +fetch_tizen_pkgs ${TIZEN_ARCH} gssdp gssdp-devel tizen-release + diff --git a/src/arcade/eng/common/cross/toolchain.cmake b/src/arcade/eng/common/cross/toolchain.cmake new file mode 100644 index 000000000..0ff85cf03 --- /dev/null +++ b/src/arcade/eng/common/cross/toolchain.cmake @@ -0,0 +1,387 @@ +set(CROSS_ROOTFS $ENV{ROOTFS_DIR}) + +# reset platform variables (e.g. cmake 3.25 sets LINUX=1) +unset(LINUX) +unset(FREEBSD) +unset(ILLUMOS) +unset(ANDROID) +unset(TIZEN) +unset(HAIKU) + +set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH}) +if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version) + set(CMAKE_SYSTEM_NAME FreeBSD) + set(FREEBSD 1) +elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc) + set(CMAKE_SYSTEM_NAME SunOS) + set(ILLUMOS 1) +elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h) + set(CMAKE_SYSTEM_NAME Haiku) + set(HAIKU 1) +else() + set(CMAKE_SYSTEM_NAME Linux) + set(LINUX 1) +endif() +set(CMAKE_SYSTEM_VERSION 1) + +if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release) + set(TIZEN 1) +elseif(EXISTS ${CROSS_ROOTFS}/android_platform) + set(ANDROID 1) +endif() + +if(TARGET_ARCH_NAME STREQUAL "arm") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf) + set(TOOLCHAIN "armv7-alpine-linux-musleabihf") + elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(CMAKE_SYSTEM_PROCESSOR aarch64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl) + set(TOOLCHAIN "aarch64-alpine-linux-musl") + elseif(LINUX) + set(TOOLCHAIN "aarch64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu") + endif() + elseif(FREEBSD) + set(triple "aarch64-unknown-freebsd12") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armel") + set(CMAKE_SYSTEM_PROCESSOR armv7l) + set(TOOLCHAIN "arm-linux-gnueabi") + if(TIZEN) + set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "armv6") + set(CMAKE_SYSTEM_PROCESSOR armv6l) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf) + set(TOOLCHAIN "armv6-alpine-linux-musleabihf") + else() + set(TOOLCHAIN "arm-linux-gnueabihf") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "loongarch64") + set(CMAKE_SYSTEM_PROCESSOR "loongarch64") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/loongarch64-alpine-linux-musl) + set(TOOLCHAIN "loongarch64-alpine-linux-musl") + else() + set(TOOLCHAIN "loongarch64-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "ppc64le") + set(CMAKE_SYSTEM_PROCESSOR ppc64le) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/powerpc64le-alpine-linux-musl) + set(TOOLCHAIN "powerpc64le-alpine-linux-musl") + else() + set(TOOLCHAIN "powerpc64le-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "riscv64") + set(CMAKE_SYSTEM_PROCESSOR riscv64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/riscv64-alpine-linux-musl) + set(TOOLCHAIN "riscv64-alpine-linux-musl") + else() + set(TOOLCHAIN "riscv64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "riscv64-tizen-linux-gnu") + endif() + endif() +elseif(TARGET_ARCH_NAME STREQUAL "s390x") + set(CMAKE_SYSTEM_PROCESSOR s390x) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/s390x-alpine-linux-musl) + set(TOOLCHAIN "s390x-alpine-linux-musl") + else() + set(TOOLCHAIN "s390x-linux-gnu") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x64") + set(CMAKE_SYSTEM_PROCESSOR x86_64) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/x86_64-alpine-linux-musl) + set(TOOLCHAIN "x86_64-alpine-linux-musl") + elseif(LINUX) + set(TOOLCHAIN "x86_64-linux-gnu") + if(TIZEN) + set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu") + endif() + elseif(FREEBSD) + set(triple "x86_64-unknown-freebsd12") + elseif(ILLUMOS) + set(TOOLCHAIN "x86_64-illumos") + elseif(HAIKU) + set(TOOLCHAIN "x86_64-unknown-haiku") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "x86") + set(CMAKE_SYSTEM_PROCESSOR i686) + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + set(TOOLCHAIN "i586-alpine-linux-musl") + else() + set(TOOLCHAIN "i686-linux-gnu") + endif() + if(TIZEN) + set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu") + endif() +else() + message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64 and x86 are supported!") +endif() + +if(DEFINED ENV{TOOLCHAIN}) + set(TOOLCHAIN $ENV{TOOLCHAIN}) +endif() + +# Specify include paths +if(TIZEN) + function(find_toolchain_dir prefix) + # Dynamically find the version subdirectory + file(GLOB DIRECTORIES "${prefix}/*") + list(GET DIRECTORIES 0 FIRST_MATCH) + get_filename_component(TOOLCHAIN_VERSION ${FIRST_MATCH} NAME) + + set(TIZEN_TOOLCHAIN_PATH "${prefix}/${TOOLCHAIN_VERSION}" PARENT_SCOPE) + endfunction() + + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}") + else() + find_toolchain_dir("${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}") + endif() + + message(STATUS "TIZEN_TOOLCHAIN_PATH set to: ${TIZEN_TOOLCHAIN_PATH}") + + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++) + include_directories(SYSTEM ${TIZEN_TOOLCHAIN_PATH}/include/c++/${TIZEN_TOOLCHAIN}) +endif() + +function(locate_toolchain_exec exec var) + set(TOOLSET_PREFIX ${TOOLCHAIN}-) + string(TOUPPER ${exec} EXEC_UPPERCASE) + if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "") + set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE) + return() + endif() + + find_program(EXEC_LOCATION_${exec} + NAMES + "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}" + "${TOOLSET_PREFIX}${exec}") + + if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND") + message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.") + endif() + set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE) +endfunction() + +if(ANDROID) + if(TARGET_ARCH_NAME STREQUAL "arm") + set(ANDROID_ABI armeabi-v7a) + elseif(TARGET_ARCH_NAME STREQUAL "arm64") + set(ANDROID_ABI arm64-v8a) + endif() + + # extract platform number required by the NDK's toolchain + file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS) + string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}") + string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}") + + set(ANDROID_TOOLCHAIN clang) + set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository + set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib") + set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include") + + # include official NDK toolchain script + include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake) +elseif(FREEBSD) + # we cross-compile by instructing clang + set(CMAKE_C_COMPILER_TARGET ${triple}) + set(CMAKE_CXX_COMPILER_TARGET ${triple}) + set(CMAKE_ASM_COMPILER_TARGET ${triple}) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld") +elseif(ILLUMOS) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") + + include_directories(SYSTEM ${CROSS_ROOTFS}/include) + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) +elseif(HAIKU) + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + set(CMAKE_PROGRAM_PATH "${CMAKE_PROGRAM_PATH};${CROSS_ROOTFS}/cross-tools-x86_64/bin") + set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}") + set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp") + set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp") + + locate_toolchain_exec(gcc CMAKE_C_COMPILER) + locate_toolchain_exec(g++ CMAKE_CXX_COMPILER) + + # let CMake set up the correct search paths + include(Platform/Haiku) +else() + set(CMAKE_SYSROOT "${CROSS_ROOTFS}") + + set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") + set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr") +endif() + +# Specify link flags + +function(add_toolchain_linker_flag Flag) + set(Config "${ARGV1}") + set(CONFIG_SUFFIX "") + if (NOT Config STREQUAL "") + set(CONFIG_SUFFIX "_${Config}") + endif() + set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) + set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE) +endfunction() + +if(LINUX) + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}") +endif() + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(TARGET_ARCH_NAME MATCHES "^(arm64|x64|riscv64)$") + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64") + add_toolchain_linker_flag("-Wl,--rpath-link=${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(TARGET_ARCH_NAME STREQUAL "s390x") + add_toolchain_linker_flag("--target=${TOOLCHAIN}") +elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_toolchain_linker_flag("--target=${TOOLCHAIN}") + add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/gcc/${TOOLCHAIN}") + endif() + add_toolchain_linker_flag(-m32) + if(TIZEN) + add_toolchain_linker_flag("-B${TIZEN_TOOLCHAIN_PATH}") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib") + add_toolchain_linker_flag("-L${TIZEN_TOOLCHAIN_PATH}") + endif() +elseif(ILLUMOS) + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64") + add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib") +elseif(HAIKU) + add_toolchain_linker_flag("-lnetwork") + add_toolchain_linker_flag("-lroot") +endif() + +# Specify compile options + +if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|loongarch64|ppc64le|riscv64|s390x|x64|x86)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU) + set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN}) + set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN}) +endif() + +if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$") + add_compile_options(-mthumb) + if (NOT DEFINED CLR_ARM_FPU_TYPE) + set (CLR_ARM_FPU_TYPE vfpv3) + endif (NOT DEFINED CLR_ARM_FPU_TYPE) + + add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE}) + if (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + set (CLR_ARM_FPU_CAPABILITY 0x7) + endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY) + + add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY}) + + # persist variables across multiple try_compile passes + list(APPEND CMAKE_TRY_COMPILE_PLATFORM_VARIABLES CLR_ARM_FPU_TYPE CLR_ARM_FPU_CAPABILITY) + + if(TARGET_ARCH_NAME STREQUAL "armel") + add_compile_options(-mfloat-abi=softfp) + endif() +elseif(TARGET_ARCH_NAME STREQUAL "s390x") + add_compile_options("--target=${TOOLCHAIN}") +elseif(TARGET_ARCH_NAME STREQUAL "x86") + if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/i586-alpine-linux-musl) + add_compile_options(--target=${TOOLCHAIN}) + endif() + add_compile_options(-m32) + add_compile_options(-Wno-error=unused-command-line-argument) +endif() + +if(TIZEN) + if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$") + add_compile_options(-Wno-deprecated-declarations) # compile-time option + add_compile_options(-D__extern_always_inline=inline) # compile-time option + endif() +endif() + +# Set LLDB include and library paths for builds that need lldb. +if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$") + if(TARGET_ARCH_NAME STREQUAL "x86") + set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}") + else() # arm/armel case + set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}") + endif() + if(LLVM_CROSS_DIR) + set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "") + set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "") + set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "") + else() + if(TARGET_ARCH_NAME STREQUAL "x86") + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "") + set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include") + if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}") + set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}") + else() + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include") + endif() + else() # arm/armel case + set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "") + set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "") + endif() + endif() +endif() + +# Set C++ standard library options if specified +set(CLR_CMAKE_CXX_STANDARD_LIBRARY "" CACHE STRING "Standard library flavor to link against. Only supported with the Clang compiler.") +if (CLR_CMAKE_CXX_STANDARD_LIBRARY) + add_compile_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) + add_link_options($<$:--stdlib=${CLR_CMAKE_CXX_STANDARD_LIBRARY}>) +endif() + +option(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC "Statically link against the C++ standard library" OFF) +if(CLR_CMAKE_CXX_STANDARD_LIBRARY_STATIC) + add_link_options($<$:-static-libstdc++>) +endif() + +set(CLR_CMAKE_CXX_ABI_LIBRARY "" CACHE STRING "C++ ABI implementation library to link against. Only supported with the Clang compiler.") +if (CLR_CMAKE_CXX_ABI_LIBRARY) + # The user may specify the ABI library with the 'lib' prefix, like 'libstdc++'. Strip the prefix here so the linker finds the right library. + string(REGEX REPLACE "^lib(.+)" "\\1" CLR_CMAKE_CXX_ABI_LIBRARY ${CLR_CMAKE_CXX_ABI_LIBRARY}) + # We need to specify this as a linker-backend option as Clang will filter this option out when linking to libc++. + add_link_options("LINKER:-l${CLR_CMAKE_CXX_ABI_LIBRARY}") +endif() + +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/src/arcade/eng/common/darc-init.ps1 b/src/arcade/eng/common/darc-init.ps1 new file mode 100644 index 000000000..e33743105 --- /dev/null +++ b/src/arcade/eng/common/darc-init.ps1 @@ -0,0 +1,47 @@ +param ( + $darcVersion = $null, + $versionEndpoint = 'https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20', + $verbosity = 'minimal', + $toolpath = $null +) + +. $PSScriptRoot\tools.ps1 + +function InstallDarcCli ($darcVersion, $toolpath) { + $darcCliPackageName = 'microsoft.dotnet.darc' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list -g + + if ($toolList -like "*$darcCliPackageName*") { + & "$dotnet" tool uninstall $darcCliPackageName -g + } + + # If the user didn't explicitly specify the darc version, + # query the Maestro API for the correct version of darc to install. + if (-not $darcVersion) { + $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content + } + + $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json' + + Write-Host "Installing Darc CLI version $darcVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + if (-not $toolpath) { + Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g" + & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g + }else { + Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'" + & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath" + } +} + +try { + InstallDarcCli $darcVersion $toolpath +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Darc' -Message $_ + ExitWithExitCode 1 +} \ No newline at end of file diff --git a/src/arcade/eng/common/dotnet-install.cmd b/src/arcade/eng/common/dotnet-install.cmd new file mode 100644 index 000000000..b1c2642e7 --- /dev/null +++ b/src/arcade/eng/common/dotnet-install.cmd @@ -0,0 +1,2 @@ +@echo off +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*" \ No newline at end of file diff --git a/src/arcade/eng/common/dotnet-install.ps1 b/src/arcade/eng/common/dotnet-install.ps1 new file mode 100644 index 000000000..811f0f717 --- /dev/null +++ b/src/arcade/eng/common/dotnet-install.ps1 @@ -0,0 +1,28 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $verbosity = 'minimal', + [string] $architecture = '', + [string] $version = 'Latest', + [string] $runtime = 'dotnet', + [string] $RuntimeSourceFeed = '', + [string] $RuntimeSourceFeedKey = '' +) + +. $PSScriptRoot\tools.ps1 + +$dotnetRoot = Join-Path $RepoRoot '.dotnet' + +$installdir = $dotnetRoot +try { + if ($architecture -and $architecture.Trim() -eq 'x86') { + $installdir = Join-Path $installdir 'x86' + } + InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/dotnet-install.sh b/src/arcade/eng/common/dotnet-install.sh new file mode 100644 index 000000000..7b9d97e3b --- /dev/null +++ b/src/arcade/eng/common/dotnet-install.sh @@ -0,0 +1,94 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +version='Latest' +architecture='' +runtime='dotnet' +runtimeSourceFeed='' +runtimeSourceFeedKey='' +while [[ $# > 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -version|-v) + shift + version="$1" + ;; + -architecture|-a) + shift + architecture="$1" + ;; + -runtime|-r) + shift + runtime="$1" + ;; + -runtimesourcefeed) + shift + runtimeSourceFeed="$1" + ;; + -runtimesourcefeedkey) + shift + runtimeSourceFeedKey="$1" + ;; + *) + Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1" + exit 1 + ;; + esac + shift +done + +# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples +cpuname=$(uname -m) +case $cpuname in + arm64|aarch64) + buildarch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + buildarch=arm + fi + ;; + loongarch64) + buildarch=loongarch64 + ;; + amd64|x86_64) + buildarch=x64 + ;; + armv*l) + buildarch=arm + ;; + i[3-6]86) + buildarch=x86 + ;; + riscv64) + buildarch=riscv64 + ;; + *) + echo "Unknown CPU $cpuname detected, treating it as x64" + buildarch=x64 + ;; +esac + +dotnetRoot="${repo_root}.dotnet" +if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then + dotnetRoot="$dotnetRoot/$architecture" +fi + +InstallDotNet "$dotnetRoot" $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || { + local exit_code=$? + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2 + ExitWithExitCode $exit_code +} + +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/enable-cross-org-publishing.ps1 b/src/arcade/eng/common/enable-cross-org-publishing.ps1 new file mode 100644 index 000000000..da09da4f1 --- /dev/null +++ b/src/arcade/eng/common/enable-cross-org-publishing.ps1 @@ -0,0 +1,13 @@ +param( + [string] $token +) + + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +# Write-PipelineSetVariable will no-op if a variable named $ci is not defined +# Since this script is only ever called in AzDO builds, just universally set it +$ci = $true + +Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false +Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false diff --git a/src/arcade/eng/common/generate-locproject.ps1 b/src/arcade/eng/common/generate-locproject.ps1 new file mode 100644 index 000000000..524aaa57f --- /dev/null +++ b/src/arcade/eng/common/generate-locproject.ps1 @@ -0,0 +1,189 @@ +Param( + [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here + [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json + [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one + [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally +) + +# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here: +# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +. $PSScriptRoot\pipeline-logging-functions.ps1 + +$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json" +$exclusions = @{ Exclusions = @() } +if (Test-Path -Path $exclusionsFilePath) +{ + $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json +} + +Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work + +# Template files +$jsonFiles = @() +$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern +$jsonTemplateFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json" + $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern + +$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them +if (-not $wxlFiles) { + $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files + if ($wxlEnFiles) { + $wxlFiles = @() + $wxlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } + } +} + +$macosHtmlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\.lproj\\.+\.html$" } # add installer HTML files +$macosHtmlFiles = @() +if ($macosHtmlEnFiles) { + $macosHtmlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $macosHtmlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru + } +} + +$xlfFiles = @() + +$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf" +$langXlfFiles = @() +if ($allXlfFiles) { + $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf' + $firstLangCode = $Matches.1 + $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf" +} +$langXlfFiles | ForEach-Object { + $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf + + $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf" + $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru +} + +$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles + +$locJson = @{ + Projects = @( + @{ + LanguageSet = $LanguageSet + LocItems = @( + $locFiles | ForEach-Object { + $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) + { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') { + Remove-Item -Path $sourceFile + } + if ($continue) + { + if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\" + } + } else { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnName" + OutputPath = $outputPath + } + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "WiX_CloneLanguages" + LssFiles = @( "wxl_loc.lss" ) + LocItems = @( + $wxlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if ($continue) + { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "VS_macOS_CloneLanguages" + LssFiles = @( ".\eng\common\loc\P22DotNetHtmlLocalization.lss" ) + LocItems = @( + $macosHtmlFiles | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + $lciFile = $sourceFile + ".lci" + if ($continue) { + $result = @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + if (Test-Path $lciFile -PathType Leaf) { + $result["LciFile"] = $lciFile + } + return $result + } + } + ) + } + ) +} + +$json = ConvertTo-Json $locJson -Depth 5 +Write-Host "LocProject.json generated:`n`n$json`n`n" +Pop-Location + +if (!$UseCheckedInLocProjectJson) { + New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json +} +else { + New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created + Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json + + if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) { + Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them." + + exit 1 + } + else { + Write-Host "Generated LocProject.json and current LocProject.json are identical." + } +} diff --git a/src/arcade/eng/common/generate-sbom-prep.ps1 b/src/arcade/eng/common/generate-sbom-prep.ps1 new file mode 100644 index 000000000..a0c7d792a --- /dev/null +++ b/src/arcade/eng/common/generate-sbom-prep.ps1 @@ -0,0 +1,29 @@ +Param( + [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed +) + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly +# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. +$ArtifactName = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" +$SafeArtifactName = $ArtifactName -replace '["/:<>\\|?@*"() ]', '_' +$SbomGenerationDir = Join-Path $ManifestDirPath $SafeArtifactName + +Write-Host "Artifact name before : $ArtifactName" +Write-Host "Artifact name after : $SafeArtifactName" + +Write-Host "Creating dir $ManifestDirPath" + +# create directory for sbom manifest to be placed +if (!(Test-Path -path $SbomGenerationDir)) +{ + New-Item -ItemType Directory -path $SbomGenerationDir + Write-Host "Successfully created directory $SbomGenerationDir" +} +else{ + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +} + +Write-Host "Updating artifact name" +Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$SafeArtifactName" diff --git a/src/arcade/eng/common/generate-sbom-prep.sh b/src/arcade/eng/common/generate-sbom-prep.sh new file mode 100644 index 000000000..b8ecca72b --- /dev/null +++ b/src/arcade/eng/common/generate-sbom-prep.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" +. $scriptroot/pipeline-logging-functions.sh + + +# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts. +artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM" +safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}" +manifest_dir=$1 + +# Normally - we'd listen to the manifest path given, but 1ES templates will overwrite if this level gets uploaded directly +# with their own overwriting ours. So we create it as a sub directory of the requested manifest path. +sbom_generation_dir="$manifest_dir/$safe_artifact_name" + +if [ ! -d "$sbom_generation_dir" ] ; then + mkdir -p "$sbom_generation_dir" + echo "Sbom directory created." $sbom_generation_dir +else + Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder." +fi + +echo "Artifact name before : "$artifact_name +echo "Artifact name after : "$safe_artifact_name +export ARTIFACT_NAME=$safe_artifact_name +echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name" + +exit 0 diff --git a/src/arcade/eng/common/helixpublish.proj b/src/arcade/eng/common/helixpublish.proj new file mode 100644 index 000000000..c1323bf41 --- /dev/null +++ b/src/arcade/eng/common/helixpublish.proj @@ -0,0 +1,27 @@ + + + + + msbuild + + + + + %(Identity) + + + + + + $(WorkItemDirectory) + $(WorkItemCommand) + $(WorkItemTimeout) + + + + + + + + + diff --git a/src/arcade/eng/common/init-tools-native.cmd b/src/arcade/eng/common/init-tools-native.cmd new file mode 100644 index 000000000..438cd548c --- /dev/null +++ b/src/arcade/eng/common/init-tools-native.cmd @@ -0,0 +1,3 @@ +@echo off +powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*" +exit /b %ErrorLevel% \ No newline at end of file diff --git a/src/arcade/eng/common/init-tools-native.ps1 b/src/arcade/eng/common/init-tools-native.ps1 new file mode 100644 index 000000000..27ccdb9ec --- /dev/null +++ b/src/arcade/eng/common/init-tools-native.ps1 @@ -0,0 +1,203 @@ +<# +.SYNOPSIS +Entry point script for installing native tools + +.DESCRIPTION +Reads $RepoRoot\global.json file to determine native assets to install +and executes installers for those tools + +.PARAMETER BaseUri +Base file directory or Url from which to acquire tool archives + +.PARAMETER InstallDirectory +Directory to install native toolset. This is a command-line override for the default +Install directory precedence order: +- InstallDirectory command-line override +- NETCOREENG_INSTALL_DIRECTORY environment variable +- (default) %USERPROFILE%/.netcoreeng/native + +.PARAMETER Clean +Switch specifying to not install anything, but cleanup native asset folders + +.PARAMETER Force +Clean and then install tools + +.PARAMETER DownloadRetries +Total number of retry attempts + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds + +.PARAMETER GlobalJsonFile +File path to global.json file + +.PARAMETER PathPromotion +Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines) +or break the build if a native tool is not found on the path (on a local dev machine) + +.NOTES +#> +[CmdletBinding(PositionalBinding=$false)] +Param ( + [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external', + [string] $InstallDirectory, + [switch] $Clean = $False, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [string] $GlobalJsonFile, + [switch] $PathPromotion +) + +if (!$GlobalJsonFile) { + $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json' +} + +Set-StrictMode -version 2.0 +$ErrorActionPreference='Stop' + +. $PSScriptRoot\pipeline-logging-functions.ps1 +Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1') + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq 'Continue' + + $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\' + $NativeBaseDir = $InstallDirectory + if (!$NativeBaseDir) { + $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory + } + $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir + $InstallBin = Join-Path $NativeBaseDir 'bin' + $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1' + + # Process tools list + Write-Host "Processing $GlobalJsonFile" + If (-Not (Test-Path $GlobalJsonFile)) { + Write-Host "Unable to find '$GlobalJsonFile'" + exit 0 + } + $NativeTools = Get-Content($GlobalJsonFile) -Raw | + ConvertFrom-Json | + Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue + if ($NativeTools) { + if ($PathPromotion -eq $True) { + $ArcadeToolsDirectory = "$env:SYSTEMDRIVE\arcade-tools" + if (Test-Path $ArcadeToolsDirectory) { # if this directory exists, we should use native tools on machine + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $InstalledTools = @{} + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + if ($ToolVersion -eq "latest") { + $ToolVersion = "" + } + $ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending) + if ($ToolDirectories -eq $null) { + Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image." + exit 1 + } + $ToolDirectory = $ToolDirectories[0] + $BinPathFile = "$($ToolDirectory.FullName)\binpath.txt" + if (-not (Test-Path -Path "$BinPathFile")) { + Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool." + exit 1 + } + $BinPath = Get-Content "$BinPathFile" + $ToolPath = Convert-Path -Path $BinPath + Write-Host "Adding $ToolName to the path ($ToolPath)..." + Write-Host "##vso[task.prependpath]$ToolPath" + $env:PATH = "$ToolPath;$env:PATH" + $InstalledTools += @{ $ToolName = $ToolDirectory.FullName } + } + } + return $InstalledTools + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + + if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding." + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "If this is running on a build machine, the arcade-tools directory was not found, which means there's an error with the image." + } + } + exit 0 + } + } else { + $NativeTools.PSObject.Properties | ForEach-Object { + $ToolName = $_.Name + $ToolVersion = $_.Value + $LocalInstallerArguments = @{ ToolName = "$ToolName" } + $LocalInstallerArguments += @{ InstallPath = "$InstallBin" } + $LocalInstallerArguments += @{ BaseUri = "$BaseUri" } + $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" } + $LocalInstallerArguments += @{ Version = "$ToolVersion" } + + if ($Verbose) { + $LocalInstallerArguments += @{ Verbose = $True } + } + if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') { + if($Force) { + $LocalInstallerArguments += @{ Force = $True } + } + } + if ($Clean) { + $LocalInstallerArguments += @{ Clean = $True } + } + + Write-Verbose "Installing $ToolName version $ToolVersion" + Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'" + & $InstallerPath @LocalInstallerArguments + if ($LASTEXITCODE -Ne "0") { + $errMsg = "$ToolName installation failed" + if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) { + $showNativeToolsWarning = $true + if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) { + $showNativeToolsWarning = $false + } + if ($showNativeToolsWarning) { + Write-Warning $errMsg + } + $toolInstallationFailure = $true + } else { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host $errMsg + exit 1 + } + } + } + + if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) { + # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482 + Write-Host 'Native tools bootstrap failed' + exit 1 + } + } + } + else { + Write-Host 'No native tools defined in global.json' + exit 0 + } + + if ($Clean) { + exit 0 + } + if (Test-Path $InstallBin) { + Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin) + Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)" + return $InstallBin + } + elseif (-not ($PathPromotion)) { + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed' + exit 1 + } + exit 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/init-tools-native.sh b/src/arcade/eng/common/init-tools-native.sh new file mode 100644 index 000000000..3e6a8d6ac --- /dev/null +++ b/src/arcade/eng/common/init-tools-native.sh @@ -0,0 +1,238 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external' +install_directory='' +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 +global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json" +declare -a native_assets + +. $scriptroot/pipeline-logging-functions.sh +. $scriptroot/native/common-library.sh + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installdirectory) + install_directory=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --donotabortonfailure) + donotabortonfailure=true + shift 1 + ;; + --donotdisplaywarnings) + donotdisplaywarnings=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --installdirectory Directory to install native toolset." + echo " This is a command-line override for the default" + echo " Install directory precedence order:" + echo " - InstallDirectory command-line override" + echo " - NETCOREENG_INSTALL_DIRECTORY environment variable" + echo " - (default) %USERPROFILE%/.netcoreeng/native" + echo "" + echo " --clean Switch specifying not to install anything, but cleanup native asset folders" + echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure" + echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure" + echo " --force Clean and then install tools" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --baseuri Base URI for where to download native tools from" + echo " --downloadretries Number of times a download should be attempted" + echo " --retrywaittimeseconds Wait time between download attempts" + echo "" + exit 0 + ;; + esac +done + +function ReadGlobalJsonNativeTools { + # happy path: we have a proper JSON parsing tool `jq(1)` in PATH! + if command -v jq &> /dev/null; then + + # jq: read each key/value pair under "native-tools" entry and emit: + # KEY="" VALUE="" + # followed by a null byte. + # + # bash: read line with null byte delimeter and push to array (for later `eval`uation). + + while IFS= read -rd '' line; do + native_assets+=("$line") + done < <(jq -r '. | + select(has("native-tools")) | + ."native-tools" | + keys[] as $k | + @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file") + + return + fi + + # Warning: falling back to manually parsing JSON, which is not recommended. + + # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above. + # It has been tested with several weird strings with escaped characters in entries (key and value) + # and results were compared with the output of jq(1) in binary representation using xxd(1); + # just before the assignment to 'native_assets' array (above and below). + + # try to capture the section under "native-tools". + if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then + return + fi + + section="${BASH_REMATCH[1]}" + + parseStarted=0 + possibleEnd=0 + escaping=0 + escaped=0 + isKey=1 + + for (( i=0; i<${#section}; i++ )); do + char="${section:$i:1}" + if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi + + if ! ((escaping)) && [[ "$char" == "\\" ]]; then + escaping=1 + elif ((escaping)) && ! ((escaped)); then + escaped=1 + fi + + if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then + parseStarted=1 + possibleEnd=0 + elif [[ "$char" == "'" ]]; then + token="$token'\\\''" + possibleEnd=0 + elif ((escaping)) || [[ "$char" != "\"" ]]; then + token="$token$char" + possibleEnd=1 + fi + + if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then + # Use printf to unescape token to match jq(1)'s @sh formatting rules. + # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed. + printf -v token "'$token'" + + if ((isKey)); then + KEY="$token" + isKey=0 + else + line="KEY=$KEY VALUE=$token" + native_assets+=("$line") + isKey=1 + fi + + # reset for next token + parseStarted=0 + token= + elif ((escaping)) && ((escaped)); then + escaping=0 + escaped=0 + fi + done +} + +native_base_dir=$install_directory +if [[ -z $install_directory ]]; then + native_base_dir=$(GetNativeInstallDirectory) +fi + +install_bin="${native_base_dir}/bin" +installed_any=false + +ReadGlobalJsonNativeTools + +if [[ ${#native_assets[@]} -eq 0 ]]; then + echo "No native tools defined in global.json" + exit 0; +else + native_installer_dir="$scriptroot/native" + for index in "${!native_assets[@]}"; do + eval "${native_assets["$index"]}" + + installer_path="$native_installer_dir/install-$KEY.sh" + installer_command="$installer_path" + installer_command+=" --baseuri $base_uri" + installer_command+=" --installpath $install_bin" + installer_command+=" --version $VALUE" + echo $installer_command + + if [[ $force = true ]]; then + installer_command+=" --force" + fi + + if [[ $clean = true ]]; then + installer_command+=" --clean" + fi + + if [[ -a $installer_path ]]; then + $installer_command + if [[ $? != 0 ]]; then + if [[ $donotabortonfailure = true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed" + exit 1 + fi + else + $installed_any = true + fi + else + if [[ $donotabortonfailure == true ]]; then + if [[ $donotdisplaywarnings != true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + fi + else + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script" + exit 1 + fi + fi + done +fi + +if [[ $clean = true ]]; then + exit 0 +fi + +if [[ -d $install_bin ]]; then + echo "Native tools are available from $install_bin" + echo "##vso[task.prependpath]$install_bin" +else + if [[ $installed_any = true ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed" + exit 1 + fi +fi + +exit 0 diff --git a/src/arcade/eng/common/internal-feed-operations.ps1 b/src/arcade/eng/common/internal-feed-operations.ps1 new file mode 100644 index 000000000..92b77347d --- /dev/null +++ b/src/arcade/eng/common/internal-feed-operations.ps1 @@ -0,0 +1,132 @@ +param( + [Parameter(Mandatory=$true)][string] $Operation, + [string] $AuthToken, + [string] $CommitSha, + [string] $RepoName, + [switch] $IsFeedPrivate +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +. $PSScriptRoot\tools.ps1 + +# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed +# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in +# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified +# internal builds +function SetupCredProvider { + param( + [string] $AuthToken + ) + + # Install the Cred Provider NuGet plugin + Write-Host 'Setting up Cred Provider NuGet plugin in the agent...' + Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." + + $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1' + + Write-Host "Writing the contents of 'installcredprovider.ps1' locally..." + Invoke-WebRequest $url -OutFile installcredprovider.ps1 + + Write-Host 'Installing plugin...' + .\installcredprovider.ps1 -Force + + Write-Host "Deleting local copy of 'installcredprovider.ps1'..." + Remove-Item .\installcredprovider.ps1 + + if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) { + Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!' + ExitWithExitCode 1 + } + else { + Write-Host 'CredProvider plugin was installed correctly!' + } + + # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable + # feeds successfully + + $nugetConfigPath = Join-Path $RepoRoot "NuGet.config" + + if (-Not (Test-Path -Path $nugetConfigPath)) { + Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!' + ExitWithExitCode 1 + } + + $endpoints = New-Object System.Collections.ArrayList + $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value} + + if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) { + foreach ($stableRestoreResource in $nugetConfigPackageSources) { + $trimmedResource = ([string]$stableRestoreResource).Trim() + [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"}) + } + } + + if (($endpoints | Measure-Object).Count -gt 0) { + $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress + + # Create the environment variables the AzDo way + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{ + 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' + 'issecret' = 'false' + } + + # We don't want sessions cached since we will be updating the endpoints quite frequently + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{ + 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED' + 'issecret' = 'false' + } + } + else + { + Write-Host 'No internal endpoints found in NuGet.config' + } +} + +#Workaround for https://github.com/microsoft/msbuild/issues/4430 +function InstallDotNetSdkAndRestoreArcade { + $dotnetTempDir = Join-Path $RepoRoot "dotnet" + $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) + $dotnet = "$dotnetTempDir\dotnet.exe" + $restoreProjPath = "$PSScriptRoot\restore.proj" + + Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." + InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" + + '' | Out-File "$restoreProjPath" + + & $dotnet restore $restoreProjPath + + Write-Host 'Arcade SDK restored!' + + if (Test-Path -Path $restoreProjPath) { + Remove-Item $restoreProjPath + } + + if (Test-Path -Path $dotnetTempDir) { + Remove-Item $dotnetTempDir -Recurse + } +} + +try { + Push-Location $PSScriptRoot + + if ($Operation -like 'setup') { + SetupCredProvider $AuthToken + } + elseif ($Operation -like 'install-restore') { + InstallDotNetSdkAndRestoreArcade + } + else { + Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!" + ExitWithExitCode 1 + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Arcade' -Message $_ + ExitWithExitCode 1 +} +finally { + Pop-Location +} diff --git a/src/arcade/eng/common/internal-feed-operations.sh b/src/arcade/eng/common/internal-feed-operations.sh new file mode 100644 index 000000000..9378223ba --- /dev/null +++ b/src/arcade/eng/common/internal-feed-operations.sh @@ -0,0 +1,141 @@ +#!/usr/bin/env bash + +set -e + +# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed +# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in +# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. +# This should ONLY be called from identified internal builds +function SetupCredProvider { + local authToken=$1 + + # Install the Cred Provider NuGet plugin + echo "Setting up Cred Provider NuGet plugin in the agent..."... + echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..." + + local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh" + + echo "Writing the contents of 'installcredprovider.ps1' locally..." + local installcredproviderPath="installcredprovider.sh" + if command -v curl > /dev/null; then + curl $url > "$installcredproviderPath" + else + wget -q -O "$installcredproviderPath" "$url" + fi + + echo "Installing plugin..." + . "$installcredproviderPath" + + echo "Deleting local copy of 'installcredprovider.sh'..." + rm installcredprovider.sh + + if [ ! -d "$HOME/.nuget/plugins" ]; then + Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!' + ExitWithExitCode 1 + else + echo "CredProvider plugin was installed correctly!" + fi + + # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable + # feeds successfully + + local nugetConfigPath="{$repo_root}NuGet.config" + + if [ ! "$nugetConfigPath" ]; then + Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!" + ExitWithExitCode 1 + fi + + local endpoints='[' + local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"` + local pattern="value=\"(.*)\"" + + for value in $nugetConfigPackageValues + do + if [[ $value =~ $pattern ]]; then + local endpoint="${BASH_REMATCH[1]}" + endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"}," + fi + done + + endpoints=${endpoints%?} + endpoints+=']' + + if [ ${#endpoints} -gt 2 ]; then + local endpointCredentials="{\"endpointCredentials\": "$endpoints"}" + + echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials" + echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False" + else + echo "No internal endpoints found in NuGet.config" + fi +} + +# Workaround for https://github.com/microsoft/msbuild/issues/4430 +function InstallDotNetSdkAndRestoreArcade { + local dotnetTempDir="$repo_root/dotnet" + local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*) + local restoreProjPath="$repo_root/eng/common/restore.proj" + + echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..." + echo "" > "$restoreProjPath" + + InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion" + + local res=`$dotnetTempDir/dotnet restore $restoreProjPath` + echo "Arcade SDK restored!" + + # Cleanup + if [ "$restoreProjPath" ]; then + rm "$restoreProjPath" + fi + + if [ "$dotnetTempDir" ]; then + rm -r $dotnetTempDir + fi +} + +source="${BASH_SOURCE[0]}" +operation='' +authToken='' +repoName='' + +while [[ $# > 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + --operation) + operation=$2 + shift + ;; + --authtoken) + authToken=$2 + shift + ;; + *) + echo "Invalid argument: $1" + usage + exit 1 + ;; + esac + + shift +done + +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. "$scriptroot/tools.sh" + +if [ "$operation" = "setup" ]; then + SetupCredProvider $authToken +elif [ "$operation" = "install-restore" ]; then + InstallDotNetSdkAndRestoreArcade +else + echo "Unknown operation '$operation'!" +fi diff --git a/src/arcade/eng/common/internal/Directory.Build.props b/src/arcade/eng/common/internal/Directory.Build.props new file mode 100644 index 000000000..f1d041c33 --- /dev/null +++ b/src/arcade/eng/common/internal/Directory.Build.props @@ -0,0 +1,11 @@ + + + + + false + false + + + + + diff --git a/src/arcade/eng/common/internal/NuGet.config b/src/arcade/eng/common/internal/NuGet.config new file mode 100644 index 000000000..19d3d311b --- /dev/null +++ b/src/arcade/eng/common/internal/NuGet.config @@ -0,0 +1,7 @@ + + + + + + + diff --git a/src/arcade/eng/common/internal/Tools.csproj b/src/arcade/eng/common/internal/Tools.csproj new file mode 100644 index 000000000..feaa6d208 --- /dev/null +++ b/src/arcade/eng/common/internal/Tools.csproj @@ -0,0 +1,22 @@ + + + + + net472 + false + false + + + + + + + + + + + + + + + diff --git a/src/arcade/eng/common/loc/P22DotNetHtmlLocalization.lss b/src/arcade/eng/common/loc/P22DotNetHtmlLocalization.lss new file mode 100644 index 000000000..5d892d619 --- /dev/null +++ b/src/arcade/eng/common/loc/P22DotNetHtmlLocalization.lss @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/arcade/eng/common/msbuild.ps1 b/src/arcade/eng/common/msbuild.ps1 new file mode 100644 index 000000000..f041e5ddd --- /dev/null +++ b/src/arcade/eng/common/msbuild.ps1 @@ -0,0 +1,28 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $verbosity = 'minimal', + [bool] $warnAsError = $true, + [bool] $nodeReuse = $true, + [switch] $ci, + [switch] $prepareMachine, + [switch] $excludePrereleaseVS, + [string] $msbuildEngine = $null, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs +) + +. $PSScriptRoot\tools.ps1 + +try { + if ($ci) { + $nodeReuse = $false + } + + MSBuild @extraArgs +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 \ No newline at end of file diff --git a/src/arcade/eng/common/msbuild.sh b/src/arcade/eng/common/msbuild.sh new file mode 100644 index 000000000..20d3dad54 --- /dev/null +++ b/src/arcade/eng/common/msbuild.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +verbosity='minimal' +warn_as_error=true +node_reuse=true +prepare_machine=false +extra_args='' + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --verbosity) + verbosity=$2 + shift 2 + ;; + --warnaserror) + warn_as_error=$2 + shift 2 + ;; + --nodereuse) + node_reuse=$2 + shift 2 + ;; + --ci) + ci=true + shift 1 + ;; + --preparemachine) + prepare_machine=true + shift 1 + ;; + *) + extra_args="$extra_args $1" + shift 1 + ;; + esac +done + +. "$scriptroot/tools.sh" + +if [[ "$ci" == true ]]; then + node_reuse=false +fi + +MSBuild $extra_args +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/native/CommonLibrary.psm1 b/src/arcade/eng/common/native/CommonLibrary.psm1 new file mode 100644 index 000000000..f71f6af6c --- /dev/null +++ b/src/arcade/eng/common/native/CommonLibrary.psm1 @@ -0,0 +1,401 @@ +<# +.SYNOPSIS +Helper module to install an archive to a directory + +.DESCRIPTION +Helper module to download and extract an archive to a specified directory + +.PARAMETER Uri +Uri of artifact to download + +.PARAMETER InstallDirectory +Directory to extract artifact contents to + +.PARAMETER Force +Force download / extraction if file or contents already exist. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds. Default = 30 + +.NOTES +Returns False if download or extraction fail, True otherwise +#> +function DownloadAndExtract { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $InstallDirectory, + [switch] $Force = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 + ) + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri + + # Download native tool + $DownloadStatus = CommonLibrary\Get-File -Uri $Uri ` + -Path $TempToolPath ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Force:$Force ` + -Verbose:$Verbose + + if ($DownloadStatus -Eq $False) { + Write-Error "Download failed from $Uri" + return $False + } + + # Extract native tool + $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` + -OutputDirectory $InstallDirectory ` + -Force:$Force ` + -Verbose:$Verbose + + if ($UnzipStatus -Eq $False) { + # Retry Download one more time with Force=true + $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri ` + -Path $TempToolPath ` + -DownloadRetries 1 ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Force:$True ` + -Verbose:$Verbose + + if ($DownloadRetryStatus -Eq $False) { + Write-Error "Last attempt of download failed as well" + return $False + } + + # Retry unzip again one more time with Force=true + $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath ` + -OutputDirectory $InstallDirectory ` + -Force:$True ` + -Verbose:$Verbose + if ($UnzipRetryStatus -Eq $False) + { + Write-Error "Last attempt of unzip failed as well" + # Clean up partial zips and extracts + if (Test-Path $TempToolPath) { + Remove-Item $TempToolPath -Force + } + if (Test-Path $InstallDirectory) { + Remove-Item $InstallDirectory -Force -Recurse + } + return $False + } + } + + return $True +} + +<# +.SYNOPSIS +Download a file, retry on failure + +.DESCRIPTION +Download specified file and retry if attempt fails + +.PARAMETER Uri +Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded + +.PARAMETER Path +Path to download or copy uri file to + +.PARAMETER Force +Overwrite existing file if present. Default = False + +.PARAMETER DownloadRetries +Total number of retry attempts. Default = 5 + +.PARAMETER RetryWaitTimeInSeconds +Wait time between retry attempts in seconds Default = 30 + +#> +function Get-File { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Uri, + [Parameter(Mandatory=$True)] + [string] $Path, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30, + [switch] $Force = $False + ) + $Attempt = 0 + + if ($Force) { + if (Test-Path $Path) { + Remove-Item $Path -Force + } + } + if (Test-Path $Path) { + Write-Host "File '$Path' already exists, skipping download" + return $True + } + + $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent + if (-Not (Test-Path $DownloadDirectory)) { + New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null + } + + $TempPath = "$Path.tmp" + if (Test-Path -IsValid -Path $Uri) { + Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'" + Copy-Item -Path $Uri -Destination $TempPath + Write-Verbose "Moving temporary file to '$Path'" + Move-Item -Path $TempPath -Destination $Path + return $? + } + else { + Write-Verbose "Downloading $Uri" + # Don't display the console progress UI - it's a huge perf hit + $ProgressPreference = 'SilentlyContinue' + while($Attempt -Lt $DownloadRetries) + { + try { + Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath + Write-Verbose "Downloaded to temporary location '$TempPath'" + Move-Item -Path $TempPath -Destination $Path + Write-Verbose "Moved temporary file to '$Path'" + return $True + } + catch { + $Attempt++ + if ($Attempt -Lt $DownloadRetries) { + $AttemptsLeft = $DownloadRetries - $Attempt + Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $RetryWaitTimeInSeconds + } + else { + Write-Error $_ + Write-Error $_.Exception + } + } + } + } + + return $False +} + +<# +.SYNOPSIS +Generate a shim for a native tool + +.DESCRIPTION +Creates a wrapper script (shim) that passes arguments forward to native tool assembly + +.PARAMETER ShimName +The name of the shim + +.PARAMETER ShimDirectory +The directory where shims are stored + +.PARAMETER ToolFilePath +Path to file that shim forwards to + +.PARAMETER Force +Replace shim if already present. Default = False + +.NOTES +Returns $True if generating shim succeeds, $False otherwise +#> +function New-ScriptShim { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ShimName, + [Parameter(Mandatory=$True)] + [string] $ShimDirectory, + [Parameter(Mandatory=$True)] + [string] $ToolFilePath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [switch] $Force + ) + try { + Write-Verbose "Generating '$ShimName' shim" + + if (-Not (Test-Path $ToolFilePath)){ + Write-Error "Specified tool file path '$ToolFilePath' does not exist" + return $False + } + + # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs + # Many of the checks for installed programs expect a .exe extension for Windows tools, rather + # than a .bat or .cmd file. + # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer + if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) { + $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" ` + -InstallDirectory $ShimDirectory\WinShimmer ` + -Force:$Force ` + -DownloadRetries 2 ` + -RetryWaitTimeInSeconds 5 ` + -Verbose:$Verbose + } + + if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) { + Write-Host "$ShimName.exe already exists; replacing..." + Remove-Item (Join-Path $ShimDirectory "$ShimName.exe") + } + + & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory + return $True + } + catch { + Write-Host $_ + Write-Host $_.Exception + return $False + } +} + +<# +.SYNOPSIS +Returns the machine architecture of the host machine + +.NOTES +Returns 'x64' on 64 bit machines + Returns 'x86' on 32 bit machines +#> +function Get-MachineArchitecture { + $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE + $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432 + if($ProcessorArchitecture -Eq "X86") + { + if(($ProcessorArchitectureW6432 -Eq "") -Or + ($ProcessorArchitectureW6432 -Eq "X86")) { + return "x86" + } + $ProcessorArchitecture = $ProcessorArchitectureW6432 + } + if (($ProcessorArchitecture -Eq "AMD64") -Or + ($ProcessorArchitecture -Eq "IA64") -Or + ($ProcessorArchitecture -Eq "ARM64") -Or + ($ProcessorArchitecture -Eq "LOONGARCH64") -Or + ($ProcessorArchitecture -Eq "RISCV64")) { + return "x64" + } + return "x86" +} + +<# +.SYNOPSIS +Get the name of a temporary folder under the native install directory +#> +function Get-TempDirectory { + return Join-Path (Get-NativeInstallDirectory) "temp/" +} + +function Get-TempPathFilename { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $Path + ) + $TempDir = CommonLibrary\Get-TempDirectory + $TempFilename = Split-Path $Path -leaf + $TempPath = Join-Path $TempDir $TempFilename + return $TempPath +} + +<# +.SYNOPSIS +Returns the base directory to use for native tool installation + +.NOTES +Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable +is set, or otherwise returns an install directory under the %USERPROFILE% +#> +function Get-NativeInstallDirectory { + $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY + if (!$InstallDir) { + $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/" + } + return $InstallDir +} + +<# +.SYNOPSIS +Unzip an archive + +.DESCRIPTION +Powershell module to unzip an archive to a specified directory + +.PARAMETER ZipPath (Required) +Path to archive to unzip + +.PARAMETER OutputDirectory (Required) +Output directory for archive contents + +.PARAMETER Force +Overwrite output directory contents if they already exist + +.NOTES +- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True. +- Returns True if unzip operation is successful +- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory +- Returns False if unable to extract zip archive +#> +function Expand-Zip { + [CmdletBinding(PositionalBinding=$false)] + Param ( + [Parameter(Mandatory=$True)] + [string] $ZipPath, + [Parameter(Mandatory=$True)] + [string] $OutputDirectory, + [switch] $Force + ) + + Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'" + try { + if ((Test-Path $OutputDirectory) -And (-Not $Force)) { + Write-Host "Directory '$OutputDirectory' already exists, skipping extract" + return $True + } + if (Test-Path $OutputDirectory) { + Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory" + Remove-Item $OutputDirectory -Force -Recurse + if ($? -Eq $False) { + Write-Error "Unable to remove '$OutputDirectory'" + return $False + } + } + + $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp" + if (Test-Path $TempOutputDirectory) { + Remove-Item $TempOutputDirectory -Force -Recurse + } + New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null + + Add-Type -assembly "system.io.compression.filesystem" + [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory") + if ($? -Eq $False) { + Write-Error "Unable to extract '$ZipPath'" + return $False + } + + Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory + } + catch { + Write-Host $_ + Write-Host $_.Exception + + return $False + } + return $True +} + +export-modulemember -function DownloadAndExtract +export-modulemember -function Expand-Zip +export-modulemember -function Get-File +export-modulemember -function Get-MachineArchitecture +export-modulemember -function Get-NativeInstallDirectory +export-modulemember -function Get-TempDirectory +export-modulemember -function Get-TempPathFilename +export-modulemember -function New-ScriptShim diff --git a/src/arcade/eng/common/native/common-library.sh b/src/arcade/eng/common/native/common-library.sh new file mode 100644 index 000000000..080c2c283 --- /dev/null +++ b/src/arcade/eng/common/native/common-library.sh @@ -0,0 +1,172 @@ +#!/usr/bin/env bash + +function GetNativeInstallDirectory { + local install_dir + + if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then + install_dir=$HOME/.netcoreeng/native/ + else + install_dir=$NETCOREENG_INSTALL_DIRECTORY + fi + + echo $install_dir + return 0 +} + +function GetTempDirectory { + + echo $(GetNativeInstallDirectory)temp/ + return 0 +} + +function ExpandZip { + local zip_path=$1 + local output_directory=$2 + local force=${3:-false} + + echo "Extracting $zip_path to $output_directory" + if [[ -d $output_directory ]] && [[ $force = false ]]; then + echo "Directory '$output_directory' already exists, skipping extract" + return 0 + fi + + if [[ -d $output_directory ]]; then + echo "'Force flag enabled, but '$output_directory' exists. Removing directory" + rm -rf $output_directory + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'" + return 1 + fi + fi + + echo "Creating directory: '$output_directory'" + mkdir -p $output_directory + + echo "Extracting archive" + tar -xf $zip_path -C $output_directory + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'" + return 1 + fi + + return 0 +} + +function GetCurrentOS { + local unameOut="$(uname -s)" + case $unameOut in + Linux*) echo "Linux";; + Darwin*) echo "MacOS";; + esac + return 0 +} + +function GetFile { + local uri=$1 + local path=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + if [[ -f $path ]]; then + if [[ $force = false ]]; then + echo "File '$path' already exists. Skipping download" + return 0 + else + rm -rf $path + fi + fi + + if [[ -f $uri ]]; then + echo "'$uri' is a file path, copying file to '$path'" + cp $uri $path + return $? + fi + + echo "Downloading $uri" + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail + else + wget -q -O "$path" "$uri" --tries="$download_retries" + fi + + return $? +} + +function GetTempPathFileName { + local path=$1 + + local temp_dir=$(GetTempDirectory) + local temp_file_name=$(basename $path) + echo $temp_dir$temp_file_name + return 0 +} + +function DownloadAndExtract { + local uri=$1 + local installDir=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + local temp_tool_path=$(GetTempPathFileName $uri) + + echo "downloading to: $temp_tool_path" + + # Download file + GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'." + return 1 + fi + + # Extract File + echo "extracting from $temp_tool_path to $installDir" + ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds + if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'." + return 1 + fi + + return 0 +} + +function NewScriptShim { + local shimpath=$1 + local tool_file_path=$2 + local force=${3:-false} + + echo "Generating '$shimpath' shim" + if [[ -f $shimpath ]]; then + if [[ $force = false ]]; then + echo "File '$shimpath' already exists." >&2 + return 1 + else + rm -rf $shimpath + fi + fi + + if [[ ! -f $tool_file_path ]]; then + # try to see if the path is lower cased + tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")" + if [[ ! -f $tool_file_path ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist" + return 1 + fi + fi + + local shim_contents=$'#!/usr/bin/env bash\n' + shim_contents+="SHIMARGS="$'$1\n' + shim_contents+="$tool_file_path"$' $SHIMARGS\n' + + # Write shim file + echo "$shim_contents" > $shimpath + + chmod +x $shimpath + + echo "Finished generating shim '$shimpath'" + + return $? +} + diff --git a/src/arcade/eng/common/native/init-compiler.sh b/src/arcade/eng/common/native/init-compiler.sh new file mode 100644 index 000000000..9a0e1f2b4 --- /dev/null +++ b/src/arcade/eng/common/native/init-compiler.sh @@ -0,0 +1,146 @@ +#!/bin/sh +# +# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables +# +# NOTE: some scripts source this file and rely on stdout being empty, make sure +# to not output *anything* here, unless it is an error message that fails the +# build. + +if [ -z "$build_arch" ] || [ -z "$compiler" ]; then + echo "Usage..." + echo "build_arch= compiler= init-compiler.sh" + echo "Specify the target architecture." + echo "Specify the name of compiler (clang or gcc)." + exit 1 +fi + +case "$compiler" in + clang*|-clang*|--clang*) + # clangx.y or clang-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + + # LLVM based on v18 released in early 2024, with two releases per year + maxVersion="$((18 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 6)))" + compiler=clang + ;; + + gcc*|-gcc*|--gcc*) + # gccx.y or gcc-x.y + version="$(echo "$compiler" | tr -d '[:alpha:]-=')" + majorVersion="${version%%.*}" + + # GCC based on v14 released in early 2024, with one release per year + maxVersion="$((14 + ((($(date +%Y) - 2024) * 12 + $(date +%-m) - 3) / 12)))" + compiler=gcc + ;; +esac + +cxxCompiler="$compiler++" + +# clear the existing CC and CXX from environment +CC= +CXX= +LDFLAGS= + +if [ "$compiler" = "gcc" ]; then cxxCompiler="g++"; fi + +check_version_exists() { + desired_version=-1 + + # Set up the environment to be used for building with the desired compiler. + if command -v "$compiler-$1" > /dev/null; then + desired_version="-$1" + elif command -v "$compiler$1" > /dev/null; then + desired_version="$1" + fi + + echo "$desired_version" +} + +__baseOS="$(uname)" +set_compiler_version_from_CC() { + if [ "$__baseOS" = "Darwin" ]; then + # On Darwin, the versions from -version/-dumpversion refer to Xcode + # versions, not llvm versions, so we can't rely on them. + return + fi + + version="$("$CC" -dumpversion)" + if [ -z "$version" ]; then + echo "Error: $CC -dumpversion didn't provide a version" + exit 1 + fi + + # gcc and clang often display 3 part versions. However, gcc can show only 1 part in some environments. + IFS=. read -r majorVersion _ < /dev/null; then + echo "Error: No compatible version of $compiler was found within the range of $minVersion to $maxVersion. Please upgrade your toolchain or specify the compiler explicitly using CLR_CC and CLR_CXX environment variables." + exit 1 + fi + + CC="$(command -v "$compiler" 2> /dev/null)" + CXX="$(command -v "$cxxCompiler" 2> /dev/null)" + set_compiler_version_from_CC + fi + else + desired_version="$(check_version_exists "$majorVersion")" + if [ "$desired_version" = "-1" ]; then + echo "Error: Could not find specific version of $compiler: $majorVersion." + exit 1 + fi + fi + + if [ -z "$CC" ]; then + CC="$(command -v "$compiler$desired_version" 2> /dev/null)" + CXX="$(command -v "$cxxCompiler$desired_version" 2> /dev/null)" + if [ -z "$CXX" ]; then CXX="$(command -v "$cxxCompiler" 2> /dev/null)"; fi + set_compiler_version_from_CC + fi +else + if [ ! -f "$CLR_CC" ]; then + echo "Error: CLR_CC is set but path '$CLR_CC' does not exist" + exit 1 + fi + CC="$CLR_CC" + CXX="$CLR_CXX" + set_compiler_version_from_CC +fi + +if [ -z "$CC" ]; then + echo "Error: Unable to find $compiler." + exit 1 +fi + +if [ "$__baseOS" != "Darwin" ]; then + # On Darwin, we always want to use the Apple linker. + + # Only lld version >= 9 can be considered stable. lld supports s390x starting from 18.0. + if [ "$compiler" = "clang" ] && [ -n "$majorVersion" ] && [ "$majorVersion" -ge 9 ] && { [ "$build_arch" != "s390x" ] || [ "$majorVersion" -ge 18 ]; }; then + if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then + LDFLAGS="-fuse-ld=lld" + fi + fi +fi + +SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version" 2> /dev/null)" + +export CC CXX LDFLAGS SCAN_BUILD_COMMAND diff --git a/src/arcade/eng/common/native/init-distro-rid.sh b/src/arcade/eng/common/native/init-distro-rid.sh new file mode 100644 index 000000000..83ea7aab0 --- /dev/null +++ b/src/arcade/eng/common/native/init-distro-rid.sh @@ -0,0 +1,110 @@ +#!/bin/sh + +# getNonPortableDistroRid +# +# Input: +# targetOs: (str) +# targetArch: (str) +# rootfsDir: (str) +# +# Return: +# non-portable rid +getNonPortableDistroRid() +{ + targetOs="$1" + targetArch="$2" + rootfsDir="$3" + nonPortableRid="" + + if [ "$targetOs" = "linux" ]; then + # shellcheck disable=SC1091 + if [ -e "${rootfsDir}/etc/os-release" ]; then + . "${rootfsDir}/etc/os-release" + if echo "${VERSION_ID:-}" | grep -qE '^([[:digit:]]|\.)+$'; then + nonPortableRid="${ID}.${VERSION_ID}-${targetArch}" + else + # Rolling release distros either do not set VERSION_ID, set it as blank or + # set it to non-version looking string (such as TEMPLATE_VERSION_ID on ArchLinux); + # so omit it here to be consistent with everything else. + nonPortableRid="${ID}-${targetArch}" + fi + elif [ -e "${rootfsDir}/android_platform" ]; then + # shellcheck disable=SC1091 + . "${rootfsDir}/android_platform" + nonPortableRid="$RID" + fi + fi + + if [ "$targetOs" = "freebsd" ]; then + # $rootfsDir can be empty. freebsd-version is a shell script and should always work. + __freebsd_major_version=$("$rootfsDir"/bin/freebsd-version | cut -d'.' -f1) + nonPortableRid="freebsd.$__freebsd_major_version-${targetArch}" + elif command -v getprop >/dev/null && getprop ro.product.system.model | grep -qi android; then + __android_sdk_version=$(getprop ro.build.version.sdk) + nonPortableRid="android.$__android_sdk_version-${targetArch}" + elif [ "$targetOs" = "illumos" ]; then + __uname_version=$(uname -v) + nonPortableRid="illumos-${targetArch}" + elif [ "$targetOs" = "solaris" ]; then + __uname_version=$(uname -v) + __solaris_major_version=$(echo "$__uname_version" | cut -d'.' -f1) + nonPortableRid="solaris.$__solaris_major_version-${targetArch}" + elif [ "$targetOs" = "haiku" ]; then + __uname_release="$(uname -r)" + nonPortableRid=haiku.r"$__uname_release"-"$targetArch" + fi + + echo "$nonPortableRid" | tr '[:upper:]' '[:lower:]' +} + +# initDistroRidGlobal +# +# Input: +# os: (str) +# arch: (str) +# rootfsDir?: (nullable:string) +# +# Return: +# None +# +# Notes: +# It is important to note that the function does not return anything, but it +# exports the following variables on success: +# __DistroRid : Non-portable rid of the target platform. +# __PortableTargetOS : OS-part of the portable rid that corresponds to the target platform. +initDistroRidGlobal() +{ + targetOs="$1" + targetArch="$2" + rootfsDir="" + if [ $# -ge 3 ]; then + rootfsDir="$3" + fi + + if [ -n "${rootfsDir}" ]; then + # We may have a cross build. Check for the existence of the rootfsDir + if [ ! -e "${rootfsDir}" ]; then + echo "Error: rootfsDir has been passed, but the location is not valid." + exit 1 + fi + fi + + __DistroRid=$(getNonPortableDistroRid "${targetOs}" "${targetArch}" "${rootfsDir}") + + if [ -z "${__PortableTargetOS:-}" ]; then + __PortableTargetOS="$targetOs" + + STRINGS="$(command -v strings || true)" + if [ -z "$STRINGS" ]; then + STRINGS="$(command -v llvm-strings || true)" + fi + + # Check for musl-based distros (e.g. Alpine Linux, Void Linux). + if "${rootfsDir}/usr/bin/ldd" --version 2>&1 | grep -q musl || + ( [ -n "$STRINGS" ] && "$STRINGS" "${rootfsDir}/usr/bin/ldd" 2>&1 | grep -q musl ); then + __PortableTargetOS="linux-musl" + fi + fi + + export __DistroRid __PortableTargetOS +} diff --git a/src/arcade/eng/common/native/init-os-and-arch.sh b/src/arcade/eng/common/native/init-os-and-arch.sh new file mode 100644 index 000000000..38921d433 --- /dev/null +++ b/src/arcade/eng/common/native/init-os-and-arch.sh @@ -0,0 +1,85 @@ +#!/bin/sh + +# Use uname to determine what the OS is. +OSName=$(uname -s | tr '[:upper:]' '[:lower:]') + +if command -v getprop && getprop ro.product.system.model 2>&1 | grep -qi android; then + OSName="android" +fi + +case "$OSName" in +freebsd|linux|netbsd|openbsd|sunos|android|haiku) + os="$OSName" ;; +darwin) + os=osx ;; +*) + echo "Unsupported OS $OSName detected!" + exit 1 ;; +esac + +# On Solaris, `uname -m` is discouraged, see https://docs.oracle.com/cd/E36784_01/html/E36870/uname-1.html +# and `uname -p` returns processor type (e.g. i386 on amd64). +# The appropriate tool to determine CPU is isainfo(1) https://docs.oracle.com/cd/E36784_01/html/E36870/isainfo-1.html. +if [ "$os" = "sunos" ]; then + if uname -o 2>&1 | grep -q illumos; then + os="illumos" + else + os="solaris" + fi + CPUName=$(isainfo -n) +else + # For the rest of the operating systems, use uname(1) to determine what the CPU is. + CPUName=$(uname -m) +fi + +case "$CPUName" in + arm64|aarch64) + arch=arm64 + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + arch=arm + fi + ;; + + loongarch64) + arch=loongarch64 + ;; + + riscv64) + arch=riscv64 + ;; + + amd64|x86_64) + arch=x64 + ;; + + armv7l|armv8l) + # shellcheck disable=SC1091 + if (NAME=""; . /etc/os-release; test "$NAME" = "Tizen"); then + arch=armel + else + arch=arm + fi + ;; + + armv6l) + arch=armv6 + ;; + + i[3-6]86) + echo "Unsupported CPU $CPUName detected, build might not succeed!" + arch=x86 + ;; + + s390x) + arch=s390x + ;; + + ppc64le) + arch=ppc64le + ;; + *) + echo "Unknown CPU $CPUName detected!" + exit 1 + ;; +esac diff --git a/src/arcade/eng/common/native/install-cmake-test.sh b/src/arcade/eng/common/native/install-cmake-test.sh new file mode 100644 index 000000000..8a5e7cf0d --- /dev/null +++ b/src/arcade/eng/common/native/install-cmake-test.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. $scriptroot/common-library.sh + +base_uri= +install_path= +version= +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installpath) + install_path=$2 + shift 2 + ;; + --version) + version=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --baseuri Base file directory or Url wrom which to acquire tool archives" + echo " --installpath Base directory to install native tool to" + echo " --clean Don't install the tool, just clean up the current install of the tool" + echo " --force Force install of tools even if they previously exist" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --downloadretries Total number of retry attempts" + echo " --retrywaittimeseconds Wait time between retry attempts in seconds" + echo "" + exit 0 + ;; + esac +done + +tool_name="cmake-test" +tool_os=$(GetCurrentOS) +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" +tool_arch="x86_64" +tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" +tool_install_directory="$install_path/$tool_name/$version" +tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" +shim_path="$install_path/$tool_name.sh" +uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" + +# Clean up tool and installers +if [[ $clean = true ]]; then + echo "Cleaning $tool_install_directory" + if [[ -d $tool_install_directory ]]; then + rm -rf $tool_install_directory + fi + + echo "Cleaning $shim_path" + if [[ -f $shim_path ]]; then + rm -rf $shim_path + fi + + tool_temp_path=$(GetTempPathFileName $uri) + echo "Cleaning $tool_temp_path" + if [[ -f $tool_temp_path ]]; then + rm -rf $tool_temp_path + fi + + exit 0 +fi + +# Install tool +if [[ -f $tool_file_path ]] && [[ $force = false ]]; then + echo "$tool_name ($version) already exists, skipping install" + exit 0 +fi + +DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' + exit 1 +fi + +# Generate Shim +# Always rewrite shims so that we are referencing the expected version +NewScriptShim $shim_path $tool_file_path true + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' + exit 1 +fi + +exit 0 diff --git a/src/arcade/eng/common/native/install-cmake.sh b/src/arcade/eng/common/native/install-cmake.sh new file mode 100644 index 000000000..de496beeb --- /dev/null +++ b/src/arcade/eng/common/native/install-cmake.sh @@ -0,0 +1,117 @@ +#!/usr/bin/env bash + +source="${BASH_SOURCE[0]}" +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +. $scriptroot/common-library.sh + +base_uri= +install_path= +version= +clean=false +force=false +download_retries=5 +retry_wait_time_seconds=30 + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --baseuri) + base_uri=$2 + shift 2 + ;; + --installpath) + install_path=$2 + shift 2 + ;; + --version) + version=$2 + shift 2 + ;; + --clean) + clean=true + shift 1 + ;; + --force) + force=true + shift 1 + ;; + --downloadretries) + download_retries=$2 + shift 2 + ;; + --retrywaittimeseconds) + retry_wait_time_seconds=$2 + shift 2 + ;; + --help) + echo "Common settings:" + echo " --baseuri Base file directory or Url wrom which to acquire tool archives" + echo " --installpath Base directory to install native tool to" + echo " --clean Don't install the tool, just clean up the current install of the tool" + echo " --force Force install of tools even if they previously exist" + echo " --help Print help and exit" + echo "" + echo "Advanced settings:" + echo " --downloadretries Total number of retry attempts" + echo " --retrywaittimeseconds Wait time between retry attempts in seconds" + echo "" + exit 0 + ;; + esac +done + +tool_name="cmake" +tool_os=$(GetCurrentOS) +tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")" +tool_arch="x86_64" +tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch" +tool_install_directory="$install_path/$tool_name/$version" +tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name" +shim_path="$install_path/$tool_name.sh" +uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz" + +# Clean up tool and installers +if [[ $clean = true ]]; then + echo "Cleaning $tool_install_directory" + if [[ -d $tool_install_directory ]]; then + rm -rf $tool_install_directory + fi + + echo "Cleaning $shim_path" + if [[ -f $shim_path ]]; then + rm -rf $shim_path + fi + + tool_temp_path=$(GetTempPathFileName $uri) + echo "Cleaning $tool_temp_path" + if [[ -f $tool_temp_path ]]; then + rm -rf $tool_temp_path + fi + + exit 0 +fi + +# Install tool +if [[ -f $tool_file_path ]] && [[ $force = false ]]; then + echo "$tool_name ($version) already exists, skipping install" + exit 0 +fi + +DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed' + exit 1 +fi + +# Generate Shim +# Always rewrite shims so that we are referencing the expected version +NewScriptShim $shim_path $tool_file_path true + +if [[ $? != 0 ]]; then + Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed' + exit 1 +fi + +exit 0 diff --git a/src/arcade/eng/common/native/install-dependencies.sh b/src/arcade/eng/common/native/install-dependencies.sh new file mode 100644 index 000000000..477a44f33 --- /dev/null +++ b/src/arcade/eng/common/native/install-dependencies.sh @@ -0,0 +1,62 @@ +#!/bin/sh + +set -e + +# This is a simple script primarily used for CI to install necessary dependencies +# +# Usage: +# +# ./install-dependencies.sh + +os="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + +if [ -z "$os" ]; then + . "$(dirname "$0")"/init-os-and-arch.sh +fi + +case "$os" in + linux) + if [ -e /etc/os-release ]; then + . /etc/os-release + fi + + if [ "$ID" = "debian" ] || [ "$ID_LIKE" = "debian" ]; then + apt update + + apt install -y build-essential gettext locales cmake llvm clang lld lldb liblldb-dev libunwind8-dev libicu-dev liblttng-ust-dev \ + libssl-dev libkrb5-dev pigz cpio + + localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 + elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then + pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" + $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio + elif [ "$ID" = "alpine" ]; then + apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio + else + echo "Unsupported distro. distro: $ID" + exit 1 + fi + ;; + + osx|maccatalyst|ios|iossimulator|tvos|tvossimulator) + echo "Installed xcode version: $(xcode-select -p)" + + export HOMEBREW_NO_INSTALL_CLEANUP=1 + export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 + # Skip brew update for now, see https://github.com/actions/setup-python/issues/577 + # brew update --preinstall + brew bundle --no-upgrade --file=- < +[CmdletBinding(PositionalBinding=$false)] +Param ( + [Parameter(Mandatory=$True)] + [string] $ToolName, + [Parameter(Mandatory=$True)] + [string] $InstallPath, + [Parameter(Mandatory=$True)] + [string] $BaseUri, + [Parameter(Mandatory=$True)] + [string] $Version, + [string] $CommonLibraryDirectory = $PSScriptRoot, + [switch] $Force = $False, + [switch] $Clean = $False, + [int] $DownloadRetries = 5, + [int] $RetryWaitTimeInSeconds = 30 +) + +. $PSScriptRoot\..\pipeline-logging-functions.ps1 + +# Import common library modules +Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1") + +try { + # Define verbose switch if undefined + $Verbose = $VerbosePreference -Eq "Continue" + + $Arch = CommonLibrary\Get-MachineArchitecture + $ToolOs = "win64" + if($Arch -Eq "x32") { + $ToolOs = "win32" + } + $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch" + $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\" + $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip" + $ShimPath = Join-Path $InstallPath "$ToolName.exe" + + if ($Clean) { + Write-Host "Cleaning $ToolInstallDirectory" + if (Test-Path $ToolInstallDirectory) { + Remove-Item $ToolInstallDirectory -Force -Recurse + } + Write-Host "Cleaning $ShimPath" + if (Test-Path $ShimPath) { + Remove-Item $ShimPath -Force + } + $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri + Write-Host "Cleaning $ToolTempPath" + if (Test-Path $ToolTempPath) { + Remove-Item $ToolTempPath -Force + } + exit 0 + } + + # Install tool + if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) { + Write-Verbose "$ToolName ($Version) already exists, skipping install" + } + else { + $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri ` + -InstallDirectory $ToolInstallDirectory ` + -Force:$Force ` + -DownloadRetries $DownloadRetries ` + -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds ` + -Verbose:$Verbose + + if ($InstallStatus -Eq $False) { + Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping" + exit 1 + } + } + + $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName } + if (@($ToolFilePath).Length -Gt 1) { + Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))" + exit 1 + } elseif (@($ToolFilePath).Length -Lt 1) { + Write-Host "$ToolName was not found in $ToolInstallDirectory." + exit 1 + } + + # Generate shim + # Always rewrite shims so that we are referencing the expected version + $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName ` + -ShimDirectory $InstallPath ` + -ToolFilePath "$ToolFilePath" ` + -BaseUri $BaseUri ` + -Force:$Force ` + -Verbose:$Verbose + + if ($GenerateShimStatus -Eq $False) { + Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping" + return 1 + } + + exit 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_ + exit 1 +} diff --git a/src/arcade/eng/common/pipeline-logging-functions.ps1 b/src/arcade/eng/common/pipeline-logging-functions.ps1 new file mode 100644 index 000000000..8e422c561 --- /dev/null +++ b/src/arcade/eng/common/pipeline-logging-functions.ps1 @@ -0,0 +1,260 @@ +# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified. + +# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1 + +$script:loggingCommandPrefix = '##vso[' +$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"? + New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' } + New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' } + New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' } + New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' } +) +# TODO: BUG: Escape % ??? +# TODO: Add test to verify don't need to escape "=". + +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set +function Write-PipelineTelemetryError { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Category, + [Parameter(Mandatory = $true)] + [string]$Message, + [Parameter(Mandatory = $false)] + [string]$Type = 'error', + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput, + [switch]$Force) + + $PSBoundParameters.Remove('Category') | Out-Null + + if ($Force -Or ((Test-Path variable:ci) -And $ci)) { + $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message" + } + $PSBoundParameters.Remove('Message') | Out-Null + $PSBoundParameters.Add('Message', $Message) + Write-PipelineTaskError @PSBoundParameters +} + +# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set +function Write-PipelineTaskError { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Message, + [Parameter(Mandatory = $false)] + [string]$Type = 'error', + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput, + [switch]$Force + ) + + if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) { + if ($Type -eq 'error') { + Write-Host $Message -ForegroundColor Red + return + } + elseif ($Type -eq 'warning') { + Write-Host $Message -ForegroundColor Yellow + return + } + } + + if (($Type -ne 'error') -and ($Type -ne 'warning')) { + Write-Host $Message + return + } + $PSBoundParameters.Remove('Force') | Out-Null + if (-not $PSBoundParameters.ContainsKey('Type')) { + $PSBoundParameters.Add('Type', 'error') + } + Write-LogIssue @PSBoundParameters +} + +function Write-PipelineSetVariable { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Name, + [string]$Value, + [switch]$Secret, + [switch]$AsOutput, + [bool]$IsMultiJobVariable = $true) + + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{ + 'variable' = $Name + 'isSecret' = $Secret + 'isOutput' = $IsMultiJobVariable + } -AsOutput:$AsOutput + } +} + +function Write-PipelinePrependPath { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Path, + [switch]$AsOutput) + + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput + } +} + +function Write-PipelineSetResult { + [CmdletBinding()] + param( + [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")] + [Parameter(Mandatory = $true)] + [string]$Result, + [string]$Message) + if ((Test-Path variable:ci) -And $ci) { + Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{ + 'result' = $Result + } + } +} + +<######################################## +# Private functions. +########################################> +function Format-LoggingCommandData { + [CmdletBinding()] + param([string]$Value, [switch]$Reverse) + + if (!$Value) { + return '' + } + + if (!$Reverse) { + foreach ($mapping in $script:loggingCommandEscapeMappings) { + $Value = $Value.Replace($mapping.Token, $mapping.Replacement) + } + } + else { + for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) { + $mapping = $script:loggingCommandEscapeMappings[$i] + $Value = $Value.Replace($mapping.Replacement, $mapping.Token) + } + } + + return $Value +} + +function Format-LoggingCommand { + [CmdletBinding()] + param( + [Parameter(Mandatory = $true)] + [string]$Area, + [Parameter(Mandatory = $true)] + [string]$Event, + [string]$Data, + [hashtable]$Properties) + + # Append the preamble. + [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder + $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event) + + # Append the properties. + if ($Properties) { + $first = $true + foreach ($key in $Properties.Keys) { + [string]$value = Format-LoggingCommandData $Properties[$key] + if ($value) { + if ($first) { + $null = $sb.Append(' ') + $first = $false + } + else { + $null = $sb.Append(';') + } + + $null = $sb.Append("$key=$value") + } + } + } + + # Append the tail and output the value. + $Data = Format-LoggingCommandData $Data + $sb.Append(']').Append($Data).ToString() +} + +function Write-LoggingCommand { + [CmdletBinding(DefaultParameterSetName = 'Parameters')] + param( + [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] + [string]$Area, + [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')] + [string]$Event, + [Parameter(ParameterSetName = 'Parameters')] + [string]$Data, + [Parameter(ParameterSetName = 'Parameters')] + [hashtable]$Properties, + [Parameter(Mandatory = $true, ParameterSetName = 'Object')] + $Command, + [switch]$AsOutput) + + if ($PSCmdlet.ParameterSetName -eq 'Object') { + Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput + return + } + + $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties + if ($AsOutput) { + $command + } + else { + Write-Host $command + } +} + +function Write-LogIssue { + [CmdletBinding()] + param( + [ValidateSet('warning', 'error')] + [Parameter(Mandatory = $true)] + [string]$Type, + [string]$Message, + [string]$ErrCode, + [string]$SourcePath, + [string]$LineNumber, + [string]$ColumnNumber, + [switch]$AsOutput) + + $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{ + 'type' = $Type + 'code' = $ErrCode + 'sourcepath' = $SourcePath + 'linenumber' = $LineNumber + 'columnnumber' = $ColumnNumber + } + if ($AsOutput) { + return $command + } + + if ($Type -eq 'error') { + $foregroundColor = $host.PrivateData.ErrorForegroundColor + $backgroundColor = $host.PrivateData.ErrorBackgroundColor + if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { + $foregroundColor = [System.ConsoleColor]::Red + $backgroundColor = [System.ConsoleColor]::Black + } + } + else { + $foregroundColor = $host.PrivateData.WarningForegroundColor + $backgroundColor = $host.PrivateData.WarningBackgroundColor + if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) { + $foregroundColor = [System.ConsoleColor]::Yellow + $backgroundColor = [System.ConsoleColor]::Black + } + } + + Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor +} diff --git a/src/arcade/eng/common/pipeline-logging-functions.sh b/src/arcade/eng/common/pipeline-logging-functions.sh new file mode 100644 index 000000000..6a0b2255e --- /dev/null +++ b/src/arcade/eng/common/pipeline-logging-functions.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash + +function Write-PipelineTelemetryError { + local telemetry_category='' + local force=false + local function_args=() + local message='' + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -category|-c) + telemetry_category=$2 + shift + ;; + -force|-f) + force=true + ;; + -*) + function_args+=("$1 $2") + shift + ;; + *) + message=$* + ;; + esac + shift + done + + if [[ $force != true ]] && [[ "$ci" != true ]]; then + echo "$message" >&2 + return + fi + + if [[ $force == true ]]; then + function_args+=("-force") + fi + message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message" + function_args+=("$message") + Write-PipelineTaskError ${function_args[@]} +} + +function Write-PipelineTaskError { + local message_type="error" + local sourcepath='' + local linenumber='' + local columnnumber='' + local error_code='' + local force=false + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -type|-t) + message_type=$2 + shift + ;; + -sourcepath|-s) + sourcepath=$2 + shift + ;; + -linenumber|-ln) + linenumber=$2 + shift + ;; + -columnnumber|-cn) + columnnumber=$2 + shift + ;; + -errcode|-e) + error_code=$2 + shift + ;; + -force|-f) + force=true + ;; + *) + break + ;; + esac + + shift + done + + if [[ $force != true ]] && [[ "$ci" != true ]]; then + echo "$@" >&2 + return + fi + + local message="##vso[task.logissue" + + message="$message type=$message_type" + + if [ -n "$sourcepath" ]; then + message="$message;sourcepath=$sourcepath" + fi + + if [ -n "$linenumber" ]; then + message="$message;linenumber=$linenumber" + fi + + if [ -n "$columnnumber" ]; then + message="$message;columnnumber=$columnnumber" + fi + + if [ -n "$error_code" ]; then + message="$message;code=$error_code" + fi + + message="$message]$*" + echo "$message" +} + +function Write-PipelineSetVariable { + if [[ "$ci" != true ]]; then + return + fi + + local name='' + local value='' + local secret=false + local as_output=false + local is_multi_job_variable=true + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -name|-n) + name=$2 + shift + ;; + -value|-v) + value=$2 + shift + ;; + -secret|-s) + secret=true + ;; + -as_output|-a) + as_output=true + ;; + -is_multi_job_variable|-i) + is_multi_job_variable=$2 + shift + ;; + esac + shift + done + + value=${value/;/%3B} + value=${value/\\r/%0D} + value=${value/\\n/%0A} + value=${value/]/%5D} + + local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value" + + if [[ "$as_output" == true ]]; then + $message + else + echo "$message" + fi +} + +function Write-PipelinePrependPath { + local prepend_path='' + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -path|-p) + prepend_path=$2 + shift + ;; + esac + shift + done + + export PATH="$prepend_path:$PATH" + + if [[ "$ci" == true ]]; then + echo "##vso[task.prependpath]$prepend_path" + fi +} + +function Write-PipelineSetResult { + local result='' + local message='' + + while [[ $# -gt 0 ]]; do + opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -result|-r) + result=$2 + shift + ;; + -message|-m) + message=$2 + shift + ;; + esac + shift + done + + if [[ "$ci" == true ]]; then + echo "##vso[task.complete result=$result;]$message" + fi +} diff --git a/src/arcade/eng/common/post-build/check-channel-consistency.ps1 b/src/arcade/eng/common/post-build/check-channel-consistency.ps1 new file mode 100644 index 000000000..61208d2d1 --- /dev/null +++ b/src/arcade/eng/common/post-build/check-channel-consistency.ps1 @@ -0,0 +1,48 @@ +param( + [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to + [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation +) + +try { + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + + # `tools.ps1` checks $ci to perform some actions. Since the post-build + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + $disableConfigureToolsetImport = $true + . $PSScriptRoot\..\tools.ps1 + + if ($PromoteToChannels -eq "") { + Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/main/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info." + ExitWithExitCode 0 + } + + # Check that every channel that Maestro told to promote the build to + # is available in YAML + $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ } + + $hasErrors = $false + + foreach ($id in $PromoteToChannelsIds) { + if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) { + Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng." + $hasErrors = $true + } + } + + # The `Write-PipelineTaskError` doesn't error the script and we might report several errors + # in the previous lines. The check below makes sure that we return an error state from the + # script if we reported any validation error + if ($hasErrors) { + ExitWithExitCode 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration." + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/post-build/nuget-validation.ps1 b/src/arcade/eng/common/post-build/nuget-validation.ps1 new file mode 100644 index 000000000..e5de00c89 --- /dev/null +++ b/src/arcade/eng/common/post-build/nuget-validation.ps1 @@ -0,0 +1,22 @@ +# This script validates NuGet package metadata information using this +# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage + +param( + [Parameter(Mandatory=$true)][string] $PackagesPath # Path to where the packages to be validated are +) + +# `tools.ps1` checks $ci to perform some actions. Since the post-build +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +$disableConfigureToolsetImport = $true +. $PSScriptRoot\..\tools.ps1 + +try { + & $PSScriptRoot\nuget-verification.ps1 ${PackagesPath}\*.nupkg +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/post-build/nuget-verification.ps1 b/src/arcade/eng/common/post-build/nuget-verification.ps1 new file mode 100644 index 000000000..a365194a9 --- /dev/null +++ b/src/arcade/eng/common/post-build/nuget-verification.ps1 @@ -0,0 +1,121 @@ +<# +.SYNOPSIS + Verifies that Microsoft NuGet packages have proper metadata. +.DESCRIPTION + Downloads a verification tool and runs metadata validation on the provided NuGet packages. This script writes an + error if any of the provided packages fail validation. All arguments provided to this PowerShell script that do not + match PowerShell parameters are passed on to the verification tool downloaded during the execution of this script. +.PARAMETER NuGetExePath + The path to the nuget.exe binary to use. If not provided, nuget.exe will be downloaded into the -DownloadPath + directory. +.PARAMETER PackageSource + The package source to use to download the verification tool. If not provided, nuget.org will be used. +.PARAMETER DownloadPath + The directory path to download the verification tool and nuget.exe to. If not provided, + %TEMP%\NuGet.VerifyNuGetPackage will be used. +.PARAMETER args + Arguments that will be passed to the verification tool. +.EXAMPLE + PS> .\verify.ps1 *.nupkg + Verifies the metadata of all .nupkg files in the currect working directory. +.EXAMPLE + PS> .\verify.ps1 --help + Displays the help text of the downloaded verifiction tool. +.LINK + https://github.com/NuGet/NuGetGallery/blob/master/src/VerifyMicrosoftPackage/README.md +#> + +# This script was copied from https://github.com/NuGet/NuGetGallery/blob/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1 + +[CmdletBinding(PositionalBinding = $false)] +param( + [string]$NuGetExePath, + [string]$PackageSource = "https://api.nuget.org/v3/index.json", + [string]$DownloadPath, + [Parameter(ValueFromRemainingArguments = $true)] + [string[]]$args +) + +# The URL to download nuget.exe. +$nugetExeUrl = "https://dist.nuget.org/win-x86-commandline/v4.9.4/nuget.exe" + +# The package ID of the verification tool. +$packageId = "NuGet.VerifyMicrosoftPackage" + +# The location that nuget.exe and the verification tool will be downloaded to. +if (!$DownloadPath) { + $DownloadPath = (Join-Path $env:TEMP "NuGet.VerifyMicrosoftPackage") +} + +$fence = New-Object -TypeName string -ArgumentList '=', 80 + +# Create the download directory, if it doesn't already exist. +if (!(Test-Path $DownloadPath)) { + New-Item -ItemType Directory $DownloadPath | Out-Null +} +Write-Host "Using download path: $DownloadPath" + +if ($NuGetExePath) { + $nuget = $NuGetExePath +} else { + $downloadedNuGetExe = Join-Path $DownloadPath "nuget.exe" + + # Download nuget.exe, if it doesn't already exist. + if (!(Test-Path $downloadedNuGetExe)) { + Write-Host "Downloading nuget.exe from $nugetExeUrl..." + $ProgressPreference = 'SilentlyContinue' + try { + Invoke-WebRequest $nugetExeUrl -OutFile $downloadedNuGetExe + $ProgressPreference = 'Continue' + } catch { + $ProgressPreference = 'Continue' + Write-Error $_ + Write-Error "nuget.exe failed to download." + exit + } + } + + $nuget = $downloadedNuGetExe +} + +Write-Host "Using nuget.exe path: $nuget" +Write-Host " " + +# Download the latest version of the verification tool. +Write-Host "Downloading the latest version of $packageId from $packageSource..." +Write-Host $fence +& $nuget install $packageId ` + -Prerelease ` + -OutputDirectory $DownloadPath ` + -Source $PackageSource +Write-Host $fence +Write-Host " " + +if ($LASTEXITCODE -ne 0) { + Write-Error "nuget.exe failed to fetch the verify tool." + exit +} + +# Find the most recently downloaded tool +Write-Host "Finding the most recently downloaded verification tool." +$verifyProbePath = Join-Path $DownloadPath "$packageId.*" +$verifyPath = Get-ChildItem -Path $verifyProbePath -Directory ` + | Sort-Object -Property LastWriteTime -Descending ` + | Select-Object -First 1 +$verify = Join-Path $verifyPath "tools\NuGet.VerifyMicrosoftPackage.exe" +Write-Host "Using verification tool: $verify" +Write-Host " " + +# Execute the verification tool. +Write-Host "Executing the verify tool..." +Write-Host $fence +& $verify $args +Write-Host $fence +Write-Host " " + +# Respond to the exit code. +if ($LASTEXITCODE -ne 0) { + Write-Error "The verify tool found some problems." +} else { + Write-Output "The verify tool succeeded." +} diff --git a/src/arcade/eng/common/post-build/publish-using-darc.ps1 b/src/arcade/eng/common/post-build/publish-using-darc.ps1 new file mode 100644 index 000000000..1eda208a3 --- /dev/null +++ b/src/arcade/eng/common/post-build/publish-using-darc.ps1 @@ -0,0 +1,69 @@ +param( + [Parameter(Mandatory=$true)][int] $BuildId, + [Parameter(Mandatory=$true)][int] $PublishingInfraVersion, + [Parameter(Mandatory=$true)][string] $AzdoToken, + [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net', + [Parameter(Mandatory=$true)][string] $WaitPublishingFinish, + [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, + [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters, + [Parameter(Mandatory=$false)][string] $RequireDefaultChannels, + [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing +) + +try { + # `tools.ps1` checks $ci to perform some actions. Since the post-build + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + $disableConfigureToolsetImport = $true + . $PSScriptRoot\..\tools.ps1 + + $darc = Get-Darc + + $optionalParams = [System.Collections.ArrayList]::new() + + if ("" -ne $ArtifactsPublishingAdditionalParameters) { + $optionalParams.Add("--artifact-publishing-parameters") | Out-Null + $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null + } + + if ("" -ne $SymbolPublishingAdditionalParameters) { + $optionalParams.Add("--symbol-publishing-parameters") | Out-Null + $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null + } + + if ("false" -eq $WaitPublishingFinish) { + $optionalParams.Add("--no-wait") | Out-Null + } + + if ("true" -eq $RequireDefaultChannels) { + $optionalParams.Add("--default-channels-required") | Out-Null + } + + if ("true" -eq $SkipAssetsPublishing) { + $optionalParams.Add("--skip-assets-publishing") | Out-Null + } + + & $darc add-build-to-channel ` + --id $buildId ` + --publishing-infra-version $PublishingInfraVersion ` + --default-channels ` + --source-branch main ` + --azdev-pat "$AzdoToken" ` + --bar-uri "$MaestroApiEndPoint" ` + --ci ` + --verbose ` + @optionalParams + + if ($LastExitCode -ne 0) { + Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..." + exit 1 + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels." + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/post-build/redact-logs.ps1 b/src/arcade/eng/common/post-build/redact-logs.ps1 new file mode 100644 index 000000000..b7fc19591 --- /dev/null +++ b/src/arcade/eng/common/post-build/redact-logs.ps1 @@ -0,0 +1,89 @@ +[CmdletBinding(PositionalBinding=$False)] +param( + [Parameter(Mandatory=$true, Position=0)][string] $InputPath, + [Parameter(Mandatory=$true)][string] $BinlogToolVersion, + [Parameter(Mandatory=$false)][string] $DotnetPath, + [Parameter(Mandatory=$false)][string] $PackageFeed = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json', + # File with strings to redact - separated by newlines. + # For comments start the line with '# ' - such lines are ignored + [Parameter(Mandatory=$false)][string] $TokensFilePath, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact +) + +try { + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + + # `tools.ps1` checks $ci to perform some actions. Since the post-build + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + $disableConfigureToolsetImport = $true + . $PSScriptRoot\..\tools.ps1 + + $packageName = 'binlogtool' + + $dotnet = $DotnetPath + + if (!$dotnet) { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + } + + $toolList = & "$dotnet" tool list -g + + if ($toolList -like "*$packageName*") { + & "$dotnet" tool uninstall $packageName -g + } + + $toolPath = "$PSScriptRoot\..\..\..\.tools" + $verbosity = 'minimal' + + New-Item -ItemType Directory -Force -Path $toolPath + + Push-Location -Path $toolPath + + try { + Write-Host "Installing Binlog redactor CLI..." + Write-Host "'$dotnet' new tool-manifest" + & "$dotnet" new tool-manifest + Write-Host "'$dotnet' tool install $packageName --local --add-source '$PackageFeed' -v $verbosity --version $BinlogToolVersion" + & "$dotnet" tool install $packageName --local --add-source "$PackageFeed" -v $verbosity --version $BinlogToolVersion + + if (Test-Path $TokensFilePath) { + Write-Host "Adding additional sensitive data for redaction from file: " $TokensFilePath + $TokensToRedact += Get-Content -Path $TokensFilePath | Foreach {$_.Trim()} | Where { $_ -notmatch "^# " } + } + + $optionalParams = [System.Collections.ArrayList]::new() + + Foreach ($p in $TokensToRedact) + { + if($p -match '^\$\(.*\)$') + { + Write-Host ("Ignoring token {0} as it is probably unexpanded AzDO variable" -f $p) + } + elseif($p) + { + $optionalParams.Add("-p:" + $p) | Out-Null + } + } + + & $dotnet binlogtool redact --input:$InputPath --recurse --in-place ` + @optionalParams + + if ($LastExitCode -ne 0) { + Write-PipelineTelemetryError -Category 'Redactor' -Type 'warning' -Message "Problems using Redactor tool (exit code: $LastExitCode). But ignoring them now." + } + } + finally { + Pop-Location + } + + Write-Host 'done.' +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Category 'Redactor' -Message "There was an error while trying to redact logs. Error: $_" + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/post-build/sourcelink-validation.ps1 b/src/arcade/eng/common/post-build/sourcelink-validation.ps1 new file mode 100644 index 000000000..1976ef70f --- /dev/null +++ b/src/arcade/eng/common/post-build/sourcelink-validation.ps1 @@ -0,0 +1,327 @@ +param( + [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored + [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade + [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages + [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +# `tools.ps1` checks $ci to perform some actions. Since the post-build +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +$disableConfigureToolsetImport = $true +. $PSScriptRoot\..\tools.ps1 + +# Cache/HashMap (File -> Exist flag) used to consult whether a file exist +# in the repository at a specific commit point. This is populated by inserting +# all files present in the repo at a specific commit point. +$global:RepoFiles = @{} + +# Maximum number of jobs to run in parallel +$MaxParallelJobs = 16 + +$MaxRetries = 5 +$RetryWaitTimeInSeconds = 30 + +# Wait time between check for system load +$SecondsBetweenLoadChecks = 10 + +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No files to validate." + ExitWithExitCode 0 +} + +$ValidatePackage = { + param( + [string] $PackagePath # Full path to a Symbols.NuGet package + ) + + . $using:PSScriptRoot\..\tools.ps1 + + # Ensure input file exist + if (!(Test-Path $PackagePath)) { + Write-Host "Input file does not exist: $PackagePath" + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } + } + + # Extensions for which we'll look for SourceLink information + # For now we'll only care about Portable & Embedded PDBs + $RelevantExtensions = @('.dll', '.exe', '.pdb') + + Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...' + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId + $FailedFiles = 0 + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null + + try { + $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) + + $zip.Entries | + Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | + ForEach-Object { + $FileName = $_.FullName + $Extension = [System.IO.Path]::GetExtension($_.Name) + $FakeName = -Join((New-Guid), $Extension) + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName + + # We ignore resource DLLs + if ($FileName.EndsWith('.resources.dll')) { + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } + } + + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true) + + $ValidateFile = { + param( + [string] $FullPath, # Full path to the module that has to be checked + [string] $RealPath, + [ref] $FailedFiles + ) + + $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools" + $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe" + $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String + + if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) { + $NumFailedLinks = 0 + + # We only care about Http addresses + $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches + + if ($Matches.Count -ne 0) { + $Matches.Value | + ForEach-Object { + $Link = $_ + $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/" + + $FilePath = $Link.Replace($CommitUrl, "") + $Status = 200 + $Cache = $using:RepoFiles + + $attempts = 0 + + while ($attempts -lt $using:MaxRetries) { + if ( !($Cache.ContainsKey($FilePath)) ) { + try { + $Uri = $Link -as [System.URI] + + if ($Link -match "submodules") { + # Skip submodule links until sourcelink properly handles submodules + $Status = 200 + } + elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) { + # Only GitHub links are valid + $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode + } + else { + # If it's not a github link, we want to break out of the loop and not retry. + $Status = 0 + $attempts = $using:MaxRetries + } + } + catch { + Write-Host $_ + $Status = 0 + } + } + + if ($Status -ne 200) { + $attempts++ + + if ($attempts -lt $using:MaxRetries) + { + $attemptsLeft = $using:MaxRetries - $attempts + Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds" + Start-Sleep -Seconds $using:RetryWaitTimeInSeconds + } + else { + if ($NumFailedLinks -eq 0) { + if ($FailedFiles.Value -eq 0) { + Write-Host + } + + Write-Host "`tFile $RealPath has broken links:" + } + + Write-Host "`t`tFailed to retrieve $Link" + + $NumFailedLinks++ + } + } + else { + break + } + } + } + } + + if ($NumFailedLinks -ne 0) { + $FailedFiles.value++ + $global:LASTEXITCODE = 1 + } + } + } + + &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles) + } + } + catch { + Write-Host $_ + } + finally { + $zip.Dispose() + } + + if ($FailedFiles -eq 0) { + Write-Host 'Passed.' + return [pscustomobject]@{ + result = 0 + packagePath = $PackagePath + } + } + else { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links." + return [pscustomobject]@{ + result = 1 + packagePath = $PackagePath + } + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$ValidationFailures, + [switch]$logErrors) { + if ($result -ne '0') { + if ($logErrors) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links." + } + $ValidationFailures.Value++ + } +} + +function ValidateSourceLinkLinks { + if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) { + if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format / or -. '$GHRepoName'" + ExitWithExitCode 1 + } + else { + $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2'; + } + } + + if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'" + ExitWithExitCode 1 + } + + if ($GHRepoName -ne '' -and $GHCommit -ne '') { + $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1') + $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript') + + try { + # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash + $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree + + foreach ($file in $Data) { + $Extension = [System.IO.Path]::GetExtension($file.path) + + if ($CodeExtensions.Contains($Extension)) { + $RepoFiles[$file.path] = 1 + } + } + } + catch { + Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching." + } + } + elseif ($GHRepoName -ne '' -or $GHCommit -ne '') { + Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.' + } + + if (Test-Path $ExtractPath) { + Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue + } + + $ValidationFailures = 0 + + # Process each NuGet package in parallel + Get-ChildItem "$InputPath\*.symbols.nupkg" | + ForEach-Object { + Write-Host "Starting $($_.FullName)" + Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null + $NumJobs = @(Get-Job -State 'Running').Count + + while ($NumJobs -ge $MaxParallelJobs) { + Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." + sleep $SecondsBetweenLoadChecks + $NumJobs = @(Get-Job -State 'Running').Count + } + + foreach ($Job in @(Get-Job -State 'Completed')) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors + Remove-Job -Id $Job.Id + } + } + + foreach ($Job in @(Get-Job)) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) + Remove-Job -Id $Job.Id + } + if ($ValidationFailures -gt 0) { + Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation." + ExitWithExitCode 1 + } +} + +function InstallSourcelinkCli { + $sourcelinkCliPackageName = 'sourcelink' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list --global + + if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) { + Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed." + } + else { + Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global + } +} + +try { + InstallSourcelinkCli + + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + + ValidateSourceLinkLinks +} +catch { + Write-Host $_.Exception + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'SourceLink' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/post-build/symbols-validation.ps1 b/src/arcade/eng/common/post-build/symbols-validation.ps1 new file mode 100644 index 000000000..7146e593f --- /dev/null +++ b/src/arcade/eng/common/post-build/symbols-validation.ps1 @@ -0,0 +1,337 @@ +param( + [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored + [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation + [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use + [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs + [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error + [Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols + [Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server +) + +. $PSScriptRoot\..\tools.ps1 +# Maximum number of jobs to run in parallel +$MaxParallelJobs = 16 + +# Max number of retries +$MaxRetry = 5 + +# Wait time between check for system load +$SecondsBetweenLoadChecks = 10 + +# Set error codes +Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1 +Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2 + +$WindowsPdbVerificationParam = "" +if ($CheckForWindowsPdbs) { + $WindowsPdbVerificationParam = "--windows-pdbs" +} + +$ExclusionSet = New-Object System.Collections.Generic.HashSet[string]; + +if (!$InputPath -or !(Test-Path $InputPath)){ + Write-Host "No symbols to validate." + ExitWithExitCode 0 +} + +#Check if the path exists +if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){ + [string[]]$Exclusions = Get-Content "$SymbolExclusionFile" + $Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} } +} +else{ + Write-Host "Symbol Exclusion file does not exists. No symbols to exclude." +} + +$CountMissingSymbols = { + param( + [string] $PackagePath, # Path to a NuGet package + [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs + ) + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + Write-Host "Validating $PackagePath " + + # Ensure input file exist + if (!(Test-Path $PackagePath)) { + Write-PipelineTaskError "Input file does not exist: $PackagePath" + return [pscustomobject]@{ + result = $using:ERROR_FILEDOESNOTEXIST + packagePath = $PackagePath + } + } + + # Extensions for which we'll look for symbols + $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib') + + # How many files are missing symbol information + $MissingSymbols = 0 + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $PackageGuid = New-Guid + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid + $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols' + + try { + [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath) + } + catch { + Write-Host "Something went wrong extracting $PackagePath" + Write-Host $_ + return [pscustomobject]@{ + result = $using:ERROR_BADEXTRACT + packagePath = $PackagePath + } + } + + Get-ChildItem -Recurse $ExtractPath | + Where-Object { $RelevantExtensions -contains $_.Extension } | + ForEach-Object { + $FileName = $_.FullName + if ($FileName -Match '\\ref\\') { + Write-Host "`t Ignoring reference assembly file " $FileName + return + } + + $FirstMatchingSymbolDescriptionOrDefault = { + param( + [string] $FullPath, # Full path to the module that has to be checked + [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols + [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs. + [string] $SymbolsPath + ) + + $FileName = [System.IO.Path]::GetFileName($FullPath) + $Extension = [System.IO.Path]::GetExtension($FullPath) + + # Those below are potential symbol files that the `dotnet symbol` might + # return. Which one will be returned depend on the type of file we are + # checking and which type of file was uploaded. + + # The file itself is returned + $SymbolPath = $SymbolsPath + '\' + $FileName + + # PDB file for the module + $PdbPath = $SymbolPath.Replace($Extension, '.pdb') + + # PDB file for R2R module (created by crossgen) + $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb') + + # DBG file for a .so library + $SODbg = $SymbolPath.Replace($Extension, '.so.dbg') + + # DWARF file for a .dylib + $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf') + + $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools" + $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe" + + $totalRetries = 0 + + while ($totalRetries -lt $using:MaxRetry) { + + # Save the output and get diagnostic output + $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String + + if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) { + return 'Module and PDB for Module' + } + elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) { + return 'Dll, PDB and NGen PDB' + } + elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) { + return 'So and DBG for SO' + } + elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) { + return 'Dylib and Dwarf for Dylib' + } + elseif (Test-Path $SymbolPath) { + return 'Module' + } + else + { + $totalRetries++ + } + } + + return $null + } + + $FileRelativePath = $FileName.Replace("$ExtractPath\", "") + if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){ + Write-Host "Skipping $FileName from symbol validation" + } + + else { + $FileGuid = New-Guid + $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid + + $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--microsoft-symbol-server' ` + -SymbolsPath "$ExpandedSymbolsPath-msdl" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault ` + -FullPath $FileName ` + -TargetServerParam '--internal-server' ` + -SymbolsPath "$ExpandedSymbolsPath-symweb" ` + -WindowsPdbVerificationParam $WindowsPdbVerificationParam + + Write-Host -NoNewLine "`t Checking file " $FileName "... " + + if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) { + Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)" + } + else { + $MissingSymbols++ + + if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) { + Write-Host 'No symbols found on MSDL or SymWeb!' + } + else { + if ($SymbolsOnMSDL -eq $null) { + Write-Host 'No symbols found on MSDL!' + } + else { + Write-Host 'No symbols found on SymWeb!' + } + } + } + } + } + + if ($using:Clean) { + Remove-Item $ExtractPath -Recurse -Force + } + + Pop-Location + + return [pscustomobject]@{ + result = $MissingSymbols + packagePath = $PackagePath + } +} + +function CheckJobResult( + $result, + $packagePath, + [ref]$DupedSymbols, + [ref]$TotalFailures) { + if ($result -eq $ERROR_BADEXTRACT) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files" + $DupedSymbols.Value++ + } + elseif ($result -eq $ERROR_FILEDOESNOTEXIST) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist" + $TotalFailures.Value++ + } + elseif ($result -gt '0') { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath" + $TotalFailures.Value++ + } + else { + Write-Host "All symbols verified for package $packagePath" + } +} + +function CheckSymbolsAvailable { + if (Test-Path $ExtractPath) { + Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue + } + + $TotalPackages = 0 + $TotalFailures = 0 + $DupedSymbols = 0 + + Get-ChildItem "$InputPath\*.nupkg" | + ForEach-Object { + $FileName = $_.Name + $FullName = $_.FullName + + # These packages from Arcade-Services include some native libraries that + # our current symbol uploader can't handle. Below is a workaround until + # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted. + if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') { + Write-Host "Ignoring Arcade-services file: $FileName" + Write-Host + return + } + elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') { + Write-Host "Ignoring Arcade-services file: $FileName" + Write-Host + return + } + + $TotalPackages++ + + Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null + + $NumJobs = @(Get-Job -State 'Running').Count + + while ($NumJobs -ge $MaxParallelJobs) { + Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again." + sleep $SecondsBetweenLoadChecks + $NumJobs = @(Get-Job -State 'Running').Count + } + + foreach ($Job in @(Get-Job -State 'Completed')) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + Remove-Job -Id $Job.Id + } + Write-Host + } + + foreach ($Job in @(Get-Job)) { + $jobResult = Wait-Job -Id $Job.Id | Receive-Job + CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures) + } + + if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) { + if ($TotalFailures -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages" + } + + if ($DupedSymbols -gt 0) { + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted" + } + + ExitWithExitCode 1 + } + else { + Write-Host "All symbols validated!" + } +} + +function InstallDotnetSymbol { + $dotnetSymbolPackageName = 'dotnet-symbol' + + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + $toolList = & "$dotnet" tool list --global + + if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) { + Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed." + } + else { + Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..." + Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.' + & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global + } +} + +try { + InstallDotnetSymbol + + foreach ($Job in @(Get-Job)) { + Remove-Job -Id $Job.Id + } + + CheckSymbolsAvailable +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/retain-build.ps1 b/src/arcade/eng/common/retain-build.ps1 new file mode 100644 index 000000000..e7ba975ad --- /dev/null +++ b/src/arcade/eng/common/retain-build.ps1 @@ -0,0 +1,45 @@ + +Param( +[Parameter(Mandatory=$true)][int] $buildId, +[Parameter(Mandatory=$true)][string] $azdoOrgUri, +[Parameter(Mandatory=$true)][string] $azdoProject, +[Parameter(Mandatory=$true)][string] $token +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +function Get-AzDOHeaders( + [string] $token) +{ + $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}")) + $headers = @{"Authorization"="Basic $base64AuthInfo"} + return $headers +} + +function Update-BuildRetention( + [string] $azdoOrgUri, + [string] $azdoProject, + [int] $buildId, + [string] $token) +{ + $headers = Get-AzDOHeaders -token $token + $requestBody = "{ + `"keepForever`": `"true`" + }" + + $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0" + write-Host "Attempting to retain build using the following URI: ${requestUri} ..." + + try { + Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json" + Write-Host "Updated retention settings for build ${buildId}." + } + catch { + Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription" + exit 1 + } +} + +Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token +exit 0 diff --git a/src/arcade/eng/common/sdk-task.ps1 b/src/arcade/eng/common/sdk-task.ps1 new file mode 100644 index 000000000..a9d2a2d26 --- /dev/null +++ b/src/arcade/eng/common/sdk-task.ps1 @@ -0,0 +1,100 @@ +[CmdletBinding(PositionalBinding=$false)] +Param( + [string] $configuration = 'Debug', + [string] $task, + [string] $verbosity = 'minimal', + [string] $msbuildEngine = $null, + [switch] $restore, + [switch] $prepareMachine, + [switch][Alias('nobl')]$excludeCIBinaryLog, + [switch] $help, + [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties +) + +$ci = $true +$binaryLog = if ($excludeCIBinaryLog) { $false } else { $true } +$warnAsError = $true + +. $PSScriptRoot\tools.ps1 + +function Print-Usage() { + Write-Host "Common settings:" + Write-Host " -task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" + Write-Host " -restore Restore dependencies" + Write-Host " -verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" + Write-Host " -help Print help and exit" + Write-Host "" + + Write-Host "Advanced settings:" + Write-Host " -prepareMachine Prepare machine for CI run" + Write-Host " -msbuildEngine Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)." + Write-Host " -excludeCIBinaryLog When running on CI, allow no binary log (short: -nobl)" + Write-Host "" + Write-Host "Command line arguments not listed above are passed thru to msbuild." +} + +function Build([string]$target) { + $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" } + $log = Join-Path $LogDir "$task$logSuffix.binlog" + $binaryLogArg = if ($binaryLog) { "/bl:$log" } else { "" } + $outputPath = Join-Path $ToolsetDir "$task\" + + MSBuild $taskProject ` + $binaryLogArg ` + /t:$target ` + /p:Configuration=$configuration ` + /p:RepoRoot=$RepoRoot ` + /p:BaseIntermediateOutputPath=$outputPath ` + /v:$verbosity ` + @properties +} + +try { + if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) { + Print-Usage + exit 0 + } + + if ($task -eq "") { + Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task '" + Print-Usage + ExitWithExitCode 1 + } + + if( $msbuildEngine -eq "vs") { + # Ensure desktop MSBuild is available for sdk tasks. + if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) { + $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty + } + if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.13.0" -MemberType NoteProperty + } + if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { + $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true + } + if ($xcopyMSBuildToolsFolder -eq $null) { + throw 'Unable to get xcopy downloadable version of msbuild' + } + + $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe" + } + + $taskProject = GetSdkTaskProject $task + if (!(Test-Path $taskProject)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task" + ExitWithExitCode 1 + } + + if ($restore) { + Build 'Restore' + } + + Build 'Execute' +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Category 'Build' -Message $_ + ExitWithExitCode 1 +} + +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/sdk-task.sh b/src/arcade/eng/common/sdk-task.sh new file mode 100644 index 000000000..2f83adc02 --- /dev/null +++ b/src/arcade/eng/common/sdk-task.sh @@ -0,0 +1,116 @@ +#!/usr/bin/env bash + +show_usage() { + echo "Common settings:" + echo " --task Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)" + echo " --restore Restore dependencies" + echo " --verbosity Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]" + echo " --help Print help and exit" + echo "" + + echo "Advanced settings:" + echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" + echo "" + echo "Command line arguments not listed above are passed thru to msbuild." +} + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +Build() { + local target=$1 + local log_suffix="" + [[ "$target" != "Execute" ]] && log_suffix=".$target" + local log="$log_dir/$task$log_suffix.binlog" + local binaryLogArg="" + [[ $binary_log == true ]] && binaryLogArg="/bl:$log" + local output_path="$toolset_dir/$task/" + + MSBuild "$taskProject" \ + $binaryLogArg \ + /t:"$target" \ + /p:Configuration="$configuration" \ + /p:RepoRoot="$repo_root" \ + /p:BaseIntermediateOutputPath="$output_path" \ + /v:"$verbosity" \ + $properties +} + +binary_log=true +configuration="Debug" +verbosity="minimal" +exclude_ci_binary_log=false +restore=false +help=false +properties='' + +while (($# > 0)); do + lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" + case $lowerI in + --task) + task=$2 + shift 2 + ;; + --restore) + restore=true + shift 1 + ;; + --verbosity) + verbosity=$2 + shift 2 + ;; + --excludecibinarylog|--nobl) + binary_log=false + exclude_ci_binary_log=true + shift 1 + ;; + --help) + help=true + shift 1 + ;; + *) + properties="$properties $1" + shift 1 + ;; + esac +done + +ci=true +warnAsError=true + +if $help; then + show_usage + exit 0 +fi + +. "$scriptroot/tools.sh" +InitializeToolset + +if [[ -z "$task" ]]; then + Write-PipelineTelemetryError -Category 'Task' -Name 'MissingTask' -Message "Missing required parameter '-task '" + ExitWithExitCode 1 +fi + +taskProject=$(GetSdkTaskProject "$task") +if [[ ! -e "$taskProject" ]]; then + Write-PipelineTelemetryError -Category 'Task' -Name 'UnknownTask' -Message "Unknown task: $task" + ExitWithExitCode 1 +fi + +if $restore; then + Build "Restore" +fi + +Build "Execute" + + +ExitWithExitCode 0 diff --git a/src/arcade/eng/common/sdl/NuGet.config b/src/arcade/eng/common/sdl/NuGet.config new file mode 100644 index 000000000..3849bdb3c --- /dev/null +++ b/src/arcade/eng/common/sdl/NuGet.config @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/src/arcade/eng/common/sdl/configure-sdl-tool.ps1 b/src/arcade/eng/common/sdl/configure-sdl-tool.ps1 new file mode 100644 index 000000000..27f5a4115 --- /dev/null +++ b/src/arcade/eng/common/sdl/configure-sdl-tool.ps1 @@ -0,0 +1,130 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $TargetDirectory, + [string] $GdnFolder, + # The list of Guardian tools to configure. For each object in the array: + # - If the item is a [hashtable], it must contain these entries: + # - Name = The tool name as Guardian knows it. + # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique + # among all tool entries with the same Name. + # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")' + # - If the item is a [string] $v, it is treated as '@{ Name="$v" }' + [object[]] $ToolsList, + [string] $GuardianLoggerLevel='Standard', + # Optional: Additional params to add to any tool using CredScan. + [string[]] $CrScanAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using PoliCheck. + [string[]] $PoliCheckAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using CodeQL/Semmle. + [string[]] $CodeQLAdditionalRunConfigParams, + # Optional: Additional params to add to any tool using Binskim. + [string[]] $BinskimAdditionalRunConfigParams +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # Normalize tools list: all in [hashtable] form with defined values for each key. + $ToolsList = $ToolsList | + ForEach-Object { + if ($_ -is [string]) { + $_ = @{ Name = $_ } + } + + if (-not ($_['Scenario'])) { $_.Scenario = "" } + if (-not ($_['Args'])) { $_.Args = @() } + $_ + } + + Write-Host "List of tools to configure:" + $ToolsList | ForEach-Object { $_ | Out-String | Write-Host } + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + foreach ($tool in $ToolsList) { + # Put together the name and scenario to make a unique key. + $toolConfigName = $tool.Name + if ($tool.Scenario) { + $toolConfigName += "_" + $tool.Scenario + } + + Write-Host "=== Configuring $toolConfigName..." + + $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig" + + # For some tools, add default and automatic args. + switch -Exact ($tool.Name) { + 'credscan' { + if ($targetDirectory) { + $tool.Args += "`"TargetDirectory < $TargetDirectory`"" + } + $tool.Args += "`"OutputType < pre`"" + $tool.Args += $CrScanAdditionalRunConfigParams + } + 'policheck' { + if ($targetDirectory) { + $tool.Args += "`"Target < $TargetDirectory`"" + } + $tool.Args += $PoliCheckAdditionalRunConfigParams + } + {$_ -in 'semmle', 'codeql'} { + if ($targetDirectory) { + $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`"" + } + $tool.Args += $CodeQLAdditionalRunConfigParams + } + 'binskim' { + if ($targetDirectory) { + # Binskim crashes due to specific PDBs. GitHub issue: https://github.com/microsoft/binskim/issues/924. + # We are excluding all `_.pdb` files from the scan. + $tool.Args += "`"Target < $TargetDirectory\**;-:file|$TargetDirectory\**\_.pdb`"" + } + $tool.Args += $BinskimAdditionalRunConfigParams + } + } + + # Create variable pointing to the args array directly so we can use splat syntax later. + $toolArgs = $tool.Args + + # Configure the tool. If args array is provided or the current tool has some default arguments + # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}", + # one per parameter. Doc page for "guardian configure": + # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure + Exec-BlockVerbosely { + & $GuardianCliLocation configure ` + --working-directory $WorkingDirectory ` + --tool $tool.Name ` + --output-path $gdnConfigFile ` + --logger-level $GuardianLoggerLevel ` + --noninteractive ` + --force ` + $(if ($toolArgs) { "--args" }) @toolArgs + Exit-IfNZEC "Sdl" + } + + Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/sdl/execute-all-sdl-tools.ps1 b/src/arcade/eng/common/sdl/execute-all-sdl-tools.ps1 new file mode 100644 index 000000000..4715d75e9 --- /dev/null +++ b/src/arcade/eng/common/sdl/execute-all-sdl-tools.ps1 @@ -0,0 +1,167 @@ +Param( + [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified) + [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified) + [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified + [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade) + [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master + [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located + [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located + [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault + + # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list + # format. + [object[]] $SourceToolsList, + # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools + # list format. + [object[]] $ArtifactToolsList, + # Optional: list of SDL tools to run without automatically specifying a target directory. See + # 'configure-sdl-tool.ps1' for tools list format. + [object[]] $CustomToolsList, + + [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs. + [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs. + [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber) + [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed + [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs. + [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs. + [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs. + [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs. + [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs. + [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error + [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1") + [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1") + [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1") + [string[]] $BinskimAdditionalRunConfigParams, # Optional: Additional Params to custom build a Binskim run config in the format @("xyz < abc","sdf < 1") + [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run +) + +try { + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + #Replace repo names to the format of org/repo + if (!($Repository.contains('/'))) { + $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2'; + } + else{ + $RepoName = $Repository; + } + + if ($GuardianPackageName) { + $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd')) + } else { + $guardianCliLocation = $GuardianCliLocation + } + + $workingDirectory = (Split-Path $SourceDirectory -Parent) + $ValidPath = Test-Path $guardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.' + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel + } + $gdnFolder = Join-Path $workingDirectory '.gdn' + + if ($TsaOnboard) { + if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) { + Exec-BlockVerbosely { + & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.' + ExitWithExitCode 1 + } + } + + # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory. + function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) { + if ($tools -and $tools.Count -gt 0) { + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $workingDirectory ` + -TargetDirectory $targetDirectory ` + -GdnFolder $gdnFolder ` + -ToolsList $tools ` + -AzureDevOpsAccessToken $AzureDevOpsAccessToken ` + -GuardianLoggerLevel $GuardianLoggerLevel ` + -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams ` + -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams ` + -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams ` + -BinskimAdditionalRunConfigParams $BinskimAdditionalRunConfigParams + if ($BreakOnFailure) { + Exit-IfNZEC "Sdl" + } + } + } + } + + # Configure Artifact and Source tools with default Target directories. + Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory + Configure-ToolsList $SourceToolsList $SourceDirectory + # Configure custom tools with no default Target directory. + Configure-ToolsList $CustomToolsList $null + + # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call. + # (If we used "run" multiple times, each run would overwrite data from earlier runs.) + Exec-BlockVerbosely { + & $(Join-Path $PSScriptRoot 'run-sdl.ps1') ` + -GuardianCliLocation $guardianCliLocation ` + -WorkingDirectory $SourceDirectory ` + -UpdateBaseline $UpdateBaseline ` + -GdnFolder $gdnFolder + } + + if ($TsaPublish) { + if ($TsaBranchName -and $BuildNumber) { + if (-not $TsaRepositoryName) { + $TsaRepositoryName = "$($Repository)-$($BranchName)" + } + Exec-BlockVerbosely { + & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + } else { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.' + ExitWithExitCode 1 + } + } + + if ($BreakOnFailure) { + Write-Host "Failing the build in case of breaking results..." + Exec-BlockVerbosely { + & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel + } + } else { + Write-Host "Letting the build pass even if there were breaking results..." + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + exit 1 +} diff --git a/src/arcade/eng/common/sdl/extract-artifact-archives.ps1 b/src/arcade/eng/common/sdl/extract-artifact-archives.ps1 new file mode 100644 index 000000000..68da4fbf2 --- /dev/null +++ b/src/arcade/eng/common/sdl/extract-artifact-archives.ps1 @@ -0,0 +1,63 @@ +# This script looks for each archive file in a directory and extracts it into the target directory. +# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**". +# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip. +param( + # Full path to directory where archives are stored. + [Parameter(Mandatory=$true)][string] $InputPath, + # Full path to directory to extract archives into. May be the same as $InputPath. + [Parameter(Mandatory=$true)][string] $ExtractPath +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + Measure-Command { + $jobs = @() + + # Find archive files for non-Windows and Windows builds. + $archiveFiles = @( + Get-ChildItem (Join-Path $InputPath "*.tar.gz") + Get-ChildItem (Join-Path $InputPath "*.zip") + ) + + foreach ($targzFile in $archiveFiles) { + $jobs += Start-Job -ScriptBlock { + $file = $using:targzFile + $fileName = [System.IO.Path]::GetFileName($file) + $extractDir = Join-Path $using:ExtractPath "$fileName.extracted" + + New-Item $extractDir -ItemType Directory -Force | Out-Null + + Write-Host "Extracting '$file' to '$extractDir'..." + + # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early. + # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the + # error. Save output so it can be stored in the exception string along with context. + $output = tar -xf $file -C $extractDir 2>&1 + # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we + # don't have access to the outer scope. + if ($LASTEXITCODE -ne 0) { + throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'" + } + + Write-Host "Extracted to $extractDir" + } + } + + Receive-Job $jobs -Wait + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/sdl/extract-artifact-packages.ps1 b/src/arcade/eng/common/sdl/extract-artifact-packages.ps1 new file mode 100644 index 000000000..f031ed5b2 --- /dev/null +++ b/src/arcade/eng/common/sdl/extract-artifact-packages.ps1 @@ -0,0 +1,82 @@ +param( + [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored + [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true + +function ExtractArtifacts { + if (!(Test-Path $InputPath)) { + Write-Host "Input Path does not exist: $InputPath" + ExitWithExitCode 0 + } + $Jobs = @() + Get-ChildItem "$InputPath\*.nupkg" | + ForEach-Object { + $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName + } + + foreach ($Job in $Jobs) { + Wait-Job -Id $Job.Id | Receive-Job + } +} + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $ExtractPackage = { + param( + [string] $PackagePath # Full path to a NuGet package + ) + + if (!(Test-Path $PackagePath)) { + Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath" + ExitWithExitCode 1 + } + + $RelevantExtensions = @('.dll', '.exe', '.pdb') + Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...' + + $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath) + $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId + + Add-Type -AssemblyName System.IO.Compression.FileSystem + + [System.IO.Directory]::CreateDirectory($ExtractPath); + + try { + $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath) + + $zip.Entries | + Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} | + ForEach-Object { + $TargetPath = Join-Path -Path $ExtractPath -ChildPath (Split-Path -Path $_.FullName) + [System.IO.Directory]::CreateDirectory($TargetPath); + + $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.FullName + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile) + } + } + catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 + } + finally { + $zip.Dispose() + } + } + Measure-Command { ExtractArtifacts } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/sdl/init-sdl.ps1 b/src/arcade/eng/common/sdl/init-sdl.ps1 new file mode 100644 index 000000000..3ac1d92b3 --- /dev/null +++ b/src/arcade/eng/common/sdl/init-sdl.ps1 @@ -0,0 +1,55 @@ +Param( + [string] $GuardianCliLocation, + [string] $Repository, + [string] $BranchName='master', + [string] $WorkingDirectory, + [string] $AzureDevOpsAccessToken, + [string] $GuardianLoggerLevel='Standard' +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +# Don't display the console progress UI - it's a huge perf hit +$ProgressPreference = 'SilentlyContinue' + +# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file +$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken")) +$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn") +$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0" +$zipFile = "$WorkingDirectory/gdn.zip" + +Add-Type -AssemblyName System.IO.Compression.FileSystem +$gdnFolder = (Join-Path $WorkingDirectory '.gdn') + +try { + # if the folder does not exist, we'll do a guardian init and push it to the remote repository + Write-Host 'Initializing Guardian...' + Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel" + & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + # We create the mainbaseline so it can be edited later + Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline" + & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline + if ($LASTEXITCODE -ne 0) { + Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE." + ExitWithExitCode $LASTEXITCODE + } + ExitWithExitCode 0 +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/sdl/packages.config b/src/arcade/eng/common/sdl/packages.config new file mode 100644 index 000000000..e5f543ea6 --- /dev/null +++ b/src/arcade/eng/common/sdl/packages.config @@ -0,0 +1,4 @@ + + + + diff --git a/src/arcade/eng/common/sdl/run-sdl.ps1 b/src/arcade/eng/common/sdl/run-sdl.ps1 new file mode 100644 index 000000000..2eac8c78f --- /dev/null +++ b/src/arcade/eng/common/sdl/run-sdl.ps1 @@ -0,0 +1,49 @@ +Param( + [string] $GuardianCliLocation, + [string] $WorkingDirectory, + [string] $GdnFolder, + [string] $UpdateBaseline, + [string] $GuardianLoggerLevel='Standard' +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +try { + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + # We store config files in the r directory of .gdn + $gdnConfigPath = Join-Path $GdnFolder 'r' + $ValidPath = Test-Path $GuardianCliLocation + + if ($ValidPath -eq $False) + { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location." + ExitWithExitCode 1 + } + + $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig' + Write-Host "Discovered Guardian config files:" + $gdnConfigFiles | Out-String | Write-Host + + Exec-BlockVerbosely { + & $GuardianCliLocation run ` + --working-directory $WorkingDirectory ` + --baseline mainbaseline ` + --update-baseline $UpdateBaseline ` + --logger-level $GuardianLoggerLevel ` + --config @gdnConfigFiles + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_.ScriptStackTrace + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/sdl/sdl.ps1 b/src/arcade/eng/common/sdl/sdl.ps1 new file mode 100644 index 000000000..648c5068d --- /dev/null +++ b/src/arcade/eng/common/sdl/sdl.ps1 @@ -0,0 +1,38 @@ + +function Install-Gdn { + param( + [Parameter(Mandatory=$true)] + [string]$Path, + + # If omitted, install the latest version of Guardian, otherwise install that specific version. + [string]$Version + ) + + $ErrorActionPreference = 'Stop' + Set-StrictMode -Version 2.0 + $disableConfigureToolsetImport = $true + $global:LASTEXITCODE = 0 + + # `tools.ps1` checks $ci to perform some actions. Since the SDL + # scripts don't necessarily execute in the same agent that run the + # build.ps1/sh script this variable isn't automatically set. + $ci = $true + . $PSScriptRoot\..\tools.ps1 + + $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache") + + if ($Version) { + $argumentList += "-Version $Version" + } + + Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait + + $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path + + if (!$gdnCliPath) + { + Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian' + } + + return $gdnCliPath.FullName +} \ No newline at end of file diff --git a/src/arcade/eng/common/sdl/trim-assets-version.ps1 b/src/arcade/eng/common/sdl/trim-assets-version.ps1 new file mode 100644 index 000000000..0daa2a9e9 --- /dev/null +++ b/src/arcade/eng/common/sdl/trim-assets-version.ps1 @@ -0,0 +1,75 @@ +<# +.SYNOPSIS +Install and run the 'Microsoft.DotNet.VersionTools.Cli' tool with the 'trim-artifacts-version' command to trim the version from the NuGet assets file name. + +.PARAMETER InputPath +Full path to directory where artifact packages are stored + +.PARAMETER Recursive +Search for NuGet packages recursively + +#> + +Param( + [string] $InputPath, + [bool] $Recursive = $true +) + +$CliToolName = "Microsoft.DotNet.VersionTools.Cli" + +function Install-VersionTools-Cli { + param( + [Parameter(Mandatory=$true)][string]$Version + ) + + Write-Host "Installing the package '$CliToolName' with a version of '$version' ..." + $feed = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" + + $argumentList = @("tool", "install", "--local", "$CliToolName", "--add-source $feed", "--no-cache", "--version $Version", "--create-manifest-if-needed") + Start-Process "$dotnet" -Verbose -ArgumentList $argumentList -NoNewWindow -Wait +} + +# ------------------------------------------------------------------- + +if (!(Test-Path $InputPath)) { + Write-Host "Input Path '$InputPath' does not exist" + ExitWithExitCode 1 +} + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version 2.0 + +$disableConfigureToolsetImport = $true +$global:LASTEXITCODE = 0 + +# `tools.ps1` checks $ci to perform some actions. Since the SDL +# scripts don't necessarily execute in the same agent that run the +# build.ps1/sh script this variable isn't automatically set. +$ci = $true +. $PSScriptRoot\..\tools.ps1 + +try { + $dotnetRoot = InitializeDotNetCli -install:$true + $dotnet = "$dotnetRoot\dotnet.exe" + + $toolsetVersion = Read-ArcadeSdkVersion + Install-VersionTools-Cli -Version $toolsetVersion + + $cliToolFound = (& "$dotnet" tool list --local | Where-Object {$_.Split(' ')[0] -eq $CliToolName}) + if ($null -eq $cliToolFound) { + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "The '$CliToolName' tool is not installed." + ExitWithExitCode 1 + } + + Exec-BlockVerbosely { + & "$dotnet" $CliToolName trim-assets-version ` + --assets-path $InputPath ` + --recursive $Recursive + Exit-IfNZEC "Sdl" + } +} +catch { + Write-Host $_ + Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_ + ExitWithExitCode 1 +} diff --git a/src/arcade/eng/common/template-guidance.md b/src/arcade/eng/common/template-guidance.md new file mode 100644 index 000000000..98bbc1ded --- /dev/null +++ b/src/arcade/eng/common/template-guidance.md @@ -0,0 +1,133 @@ +# Overview + +Arcade provides templates for public (`/templates`) and 1ES pipeline templates (`/templates-official`) scenarios. Pipelines which are required to be managed by 1ES pipeline templates should reference `/templates-offical`, all other pipelines may reference `/templates`. + +## How to use + +Basic guidance is: + +- 1ES Pipeline Template or 1ES Microbuild template runs should reference `eng/common/templates-official`. Any internal production-graded pipeline should use these templates. + +- All other runs should reference `eng/common/templates`. + +See [azure-pipelines.yml](../../azure-pipelines.yml) (templates-official example) or [azure-pipelines-pr.yml](../../azure-pipelines-pr.yml) (templates example) for examples. + +#### The `templateIs1ESManaged` parameter + +The `templateIs1ESManaged` is available on most templates and affects which of the variants is used for nested templates. See [Development Notes](#development-notes) below for more information on the `templateIs1ESManaged1 parameter. + +- For templates under `job/`, `jobs/`, `steps`, or `post-build/`, this parameter must be explicitly set. + +## Multiple outputs + +1ES pipeline templates impose a policy where every publish artifact execution results in additional security scans being injected into your pipeline. When using `templates-official/jobs/jobs.yml`, Arcade reduces the number of additional security injections by gathering all publishing outputs into the [Build.ArtifactStagingDirectory](https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#build-variables-devops-services), and utilizing the [outputParentDirectory](https://eng.ms/docs/cloud-ai-platform/devdiv/one-engineering-system-1es/1es-docs/1es-pipeline-templates/features/outputs#multiple-outputs) feature of 1ES pipeline templates. When implementing your pipeline, if you ensure publish artifacts are located in the `$(Build.ArtifactStagingDirectory)`, and utilize the 1ES provided template context, then you can reduce the number of security scans for your pipeline. + +Example: +``` yaml +# azure-pipelines.yml +extends: + template: azure-pipelines/MicroBuild.1ES.Official.yml@MicroBuildTemplate + parameters: + stages: + - stage: build + jobs: + - template: /eng/common/templates-official/jobs/jobs.yml@self + parameters: + # 1ES makes use of outputs to reduce security task injection overhead + templateContext: + outputs: + - output: pipelineArtifact + displayName: 'Publish logs from source' + continueOnError: true + condition: always() + targetPath: $(Build.ArtifactStagingDirectory)/artifacts/log + artifactName: Logs + jobs: + - job: Windows + steps: + - script: echo "friendly neighborhood" > artifacts/marvel/spiderman.txt + # copy build outputs to artifact staging directory for publishing + - task: CopyFiles@2 + displayName: Gather build output + inputs: + SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' + Contents: '**' + TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' +``` + +Note: Multiple outputs are ONLY applicable to 1ES PT publishing (only usable when referencing `templates-official`). + +## Development notes + +**Folder / file structure** + +``` text +eng\common\ + [templates || templates-official]\ + job\ + job.yml (shim + artifact publishing logic) + onelocbuild.yml (shim) + publish-build-assets.yml (shim) + source-build.yml (shim) + source-index-stage1.yml (shim) + jobs\ + codeql-build.yml (shim) + jobs.yml (shim) + source-build.yml (shim) + post-build\ + post-build.yml (shim) + common-variabls.yml (shim) + setup-maestro-vars.yml (shim) + steps\ + publish-build-artifacts.yml (logic) + publish-pipeline-artifacts.yml (logic) + component-governance.yml (shim) + generate-sbom.yml (shim) + publish-logs.yml (shim) + retain-build.yml (shim) + send-to-helix.yml (shim) + source-build.yml (shim) + variables\ + pool-providers.yml (logic + redirect) # templates/variables/pool-providers.yml will redirect to templates-official/variables/pool-providers.yml if you are running in the internal project + sdl-variables.yml (logic) + core-templates\ + job\ + job.yml (logic) + onelocbuild.yml (logic) + publish-build-assets.yml (logic) + source-build.yml (logic) + source-index-stage1.yml (logic) + jobs\ + codeql-build.yml (logic) + jobs.yml (logic) + source-build.yml (logic) + post-build\ + common-variabls.yml (logic) + post-build.yml (logic) + setup-maestro-vars.yml (logic) + steps\ + component-governance.yml (logic) + generate-sbom.yml (logic) + publish-build-artifacts.yml (redirect) + publish-logs.yml (logic) + publish-pipeline-artifacts.yml (redirect) + retain-build.yml (logic) + send-to-helix.yml (logic) + source-build.yml (logic) + variables\ + pool-providers.yml (redirect) +``` + +In the table above, a file is designated as "shim", "logic", or "redirect". + +- shim - represents a yaml file which is an intermediate step between pipeline logic and .Net Core Engineering's templates (`core-templates`) and defines the `is1ESPipeline` parameter value. + +- logic - represents actual base template logic. + +- redirect- represents a file in `core-templates` which redirects to the "logic" file in either `templates` or `templates-official`. + +Logic for Arcade's templates live **primarily** in the `core-templates` folder. The exceptions to the location of the logic files are around artifact publishing, which is handled differently between 1es pipeline templates and standard templates. `templates` and `templates-official` provide shim entry points which redirect to `core-templates` while also defining the `is1ESPipeline` parameter. If a shim is referenced in `templates`, then `is1ESPipeline` is set to `false`. If a shim is referenced in `templates-official`, then `is1ESPipeline` is set to `true`. + +Within `templates` and `templates-official`, the templates at the "stages", and "jobs" / "job" level have been replaced with shims. Templates at the "steps" and "variables" level are typically too granular to be replaced with shims and instead persist logic which is directly applicable to either scenario. + +Within `core-templates`, there are a handful of places where logic is dependent on which shim entry point was used. In those places, we redirect back to the respective logic file in `templates` or `templates-official`. diff --git a/src/arcade/eng/common/templates-official/job/job.yml b/src/arcade/eng/common/templates-official/job/job.yml new file mode 100644 index 000000000..a8a943287 --- /dev/null +++ b/src/arcade/eng/common/templates-official/job/job.yml @@ -0,0 +1,83 @@ +parameters: +# Sbom related params + enableSbom: true + runAsPublic: false + PackageVersion: 9.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: true + + componentGovernanceSteps: + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - template: /eng/common/templates/steps/generate-sbom.yml + parameters: + PackageVersion: ${{ parameters.packageVersion }} + BuildDropPath: ${{ parameters.buildDropPath }} + ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom + publishArtifacts: false + + # publish artifacts + # for 1ES managed templates, use the templateContext.output to handle multiple outputs. + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - output: buildArtifacts + displayName: Publish pipeline artifacts + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + condition: always() + retryCountOnTaskFailure: 10 # for any logs being locked + continueOnError: true + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)_Attempt$(System.JobAttempt)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + retryCountOnTaskFailure: 10 # for any logs being locked + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if eq(parameters.enablePublishBuildArtifacts, true) }}: + - output: buildArtifacts + displayName: Publish Logs + PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }} + continueOnError: true + condition: always() + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - output: pipelineArtifact + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/eng/common/BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true + sbomEnabled: false # we don't need SBOM for BuildConfiguration + + - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}: + - output: pipelineArtifact + displayName: Publish SBOM manifest + continueOnError: true + targetPath: $(Build.ArtifactStagingDirectory)/sbom + artifactName: $(ARTIFACT_NAME) + + # add any outputs provided via root yaml + - ${{ if ne(parameters.templateContext.outputs, '') }}: + - ${{ each output in parameters.templateContext.outputs }}: + - ${{ output }} + + # add any remaining templateContext properties + ${{ each context in parameters.templateContext }}: + ${{ if and(ne(context.key, 'outputParentDirectory'), ne(context.key, 'outputs')) }}: + ${{ context.key }}: ${{ context.value }} + + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'templateContext'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/job/onelocbuild.yml b/src/arcade/eng/common/templates-official/job/onelocbuild.yml new file mode 100644 index 000000000..0f0c514b9 --- /dev/null +++ b/src/arcade/eng/common/templates-official/job/onelocbuild.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/job/publish-build-assets.yml b/src/arcade/eng/common/templates-official/job/publish-build-assets.yml new file mode 100644 index 000000000..d667a70e8 --- /dev/null +++ b/src/arcade/eng/common/templates-official/job/publish-build-assets.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/job/source-build.yml b/src/arcade/eng/common/templates-official/job/source-build.yml new file mode 100644 index 000000000..1a480034b --- /dev/null +++ b/src/arcade/eng/common/templates-official/job/source-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/job/source-index-stage1.yml b/src/arcade/eng/common/templates-official/job/source-index-stage1.yml new file mode 100644 index 000000000..6d5ead316 --- /dev/null +++ b/src/arcade/eng/common/templates-official/job/source-index-stage1.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/jobs/codeql-build.yml b/src/arcade/eng/common/templates-official/jobs/codeql-build.yml new file mode 100644 index 000000000..a726322ec --- /dev/null +++ b/src/arcade/eng/common/templates-official/jobs/codeql-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/codeql-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/jobs/jobs.yml b/src/arcade/eng/common/templates-official/jobs/jobs.yml new file mode 100644 index 000000000..007deddae --- /dev/null +++ b/src/arcade/eng/common/templates-official/jobs/jobs.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/jobs/source-build.yml b/src/arcade/eng/common/templates-official/jobs/source-build.yml new file mode 100644 index 000000000..483e7b611 --- /dev/null +++ b/src/arcade/eng/common/templates-official/jobs/source-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/post-build/common-variables.yml b/src/arcade/eng/common/templates-official/post-build/common-variables.yml new file mode 100644 index 000000000..c32fc4923 --- /dev/null +++ b/src/arcade/eng/common/templates-official/post-build/common-variables.yml @@ -0,0 +1,8 @@ +variables: +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/post-build/post-build.yml b/src/arcade/eng/common/templates-official/post-build/post-build.yml new file mode 100644 index 000000000..2364c0fd4 --- /dev/null +++ b/src/arcade/eng/common/templates-official/post-build/post-build.yml @@ -0,0 +1,8 @@ +stages: +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/post-build/setup-maestro-vars.yml b/src/arcade/eng/common/templates-official/post-build/setup-maestro-vars.yml new file mode 100644 index 000000000..024397d87 --- /dev/null +++ b/src/arcade/eng/common/templates-official/post-build/setup-maestro-vars.yml @@ -0,0 +1,8 @@ +steps: +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/steps/component-governance.yml b/src/arcade/eng/common/templates-official/steps/component-governance.yml new file mode 100644 index 000000000..30bb3985c --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/component-governance.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/enable-internal-runtimes.yml b/src/arcade/eng/common/templates-official/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..f9dd238c6 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/enable-internal-runtimes.yml @@ -0,0 +1,9 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default +steps: +- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/enable-internal-sources.yml b/src/arcade/eng/common/templates-official/steps/enable-internal-sources.yml new file mode 100644 index 000000000..e6d571822 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/enable-internal-sources.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/enable-internal-sources.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/steps/generate-sbom.yml b/src/arcade/eng/common/templates-official/steps/generate-sbom.yml new file mode 100644 index 000000000..9a89a4706 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/generate-sbom.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/get-delegation-sas.yml b/src/arcade/eng/common/templates-official/steps/get-delegation-sas.yml new file mode 100644 index 000000000..c5a9c1f82 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/get-delegation-sas.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/get-federated-access-token.yml b/src/arcade/eng/common/templates-official/steps/get-federated-access-token.yml new file mode 100644 index 000000000..c8dcf6b81 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/get-federated-access-token.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-federated-access-token.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/steps/publish-build-artifacts.yml b/src/arcade/eng/common/templates-official/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..fcf6637b2 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/publish-build-artifacts.yml @@ -0,0 +1,46 @@ +parameters: +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +- name: is1ESPipeline + type: boolean + default: true + +- name: retryCountOnTaskFailure + type: string + default: 10 + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} + ${{ if parameters.retryCountOnTaskFailure }}: + retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }} diff --git a/src/arcade/eng/common/templates-official/steps/publish-logs.yml b/src/arcade/eng/common/templates-official/steps/publish-logs.yml new file mode 100644 index 000000000..579fd531e --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/publish-logs.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/publish-pipeline-artifacts.yml b/src/arcade/eng/common/templates-official/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..172f9f0fd --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,28 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: true + +- name: args + type: object + default: {} + +steps: +- ${{ if ne(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates-official cannot be referenced from a non-1ES managed template': error +- task: 1ES.PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} + ${{ if parameters.args.sbomEnabled }}: + sbomEnabled: ${{ parameters.args.sbomEnabled }} diff --git a/src/arcade/eng/common/templates-official/steps/retain-build.yml b/src/arcade/eng/common/templates-official/steps/retain-build.yml new file mode 100644 index 000000000..559455150 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/retain-build.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/send-to-helix.yml b/src/arcade/eng/common/templates-official/steps/send-to-helix.yml new file mode 100644 index 000000000..6500f21bf --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/send-to-helix.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/source-build.yml b/src/arcade/eng/common/templates-official/steps/source-build.yml new file mode 100644 index 000000000..8f92c49e7 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/source-build.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/steps/source-index-stage1-publish.yml b/src/arcade/eng/common/templates-official/steps/source-index-stage1-publish.yml new file mode 100644 index 000000000..9b8b80942 --- /dev/null +++ b/src/arcade/eng/common/templates-official/steps/source-index-stage1-publish.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + is1ESPipeline: true + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates-official/variables/pool-providers.yml b/src/arcade/eng/common/templates-official/variables/pool-providers.yml new file mode 100644 index 000000000..1f308b24e --- /dev/null +++ b/src/arcade/eng/common/templates-official/variables/pool-providers.yml @@ -0,0 +1,45 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates-official/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) +# +# pool: +# name: $(DncEngInternalBuildPool) +# image: 1es-windows-2022 + +variables: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] \ No newline at end of file diff --git a/src/arcade/eng/common/templates-official/variables/sdl-variables.yml b/src/arcade/eng/common/templates-official/variables/sdl-variables.yml new file mode 100644 index 000000000..dbdd66d4a --- /dev/null +++ b/src/arcade/eng/common/templates-official/variables/sdl-variables.yml @@ -0,0 +1,7 @@ +variables: +# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in +# sync with the packages.config file. +- name: DefaultGuardianVersion + value: 0.109.0 +- name: GuardianPackagesConfigFile + value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/src/arcade/eng/common/templates/job/job.yml b/src/arcade/eng/common/templates/job/job.yml new file mode 100644 index 000000000..7cbf668c2 --- /dev/null +++ b/src/arcade/eng/common/templates/job/job.yml @@ -0,0 +1,84 @@ +parameters: + enablePublishBuildArtifacts: false + disableComponentGovernance: '' + componentGovernanceIgnoreDirectories: '' +# Sbom related params + enableSbom: true + runAsPublic: false + PackageVersion: 9.0.0 + BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + +jobs: +- template: /eng/common/core-templates/job/job.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ if and(ne(parameter.key, 'steps'), ne(parameter.key, 'is1ESPipeline')) }}: + ${{ parameter.key }}: ${{ parameter.value }} + + steps: + - ${{ each step in parameters.steps }}: + - ${{ step }} + + componentGovernanceSteps: + - template: /eng/common/templates/steps/component-governance.yml + parameters: + ${{ if eq(parameters.disableComponentGovernance, '') }}: + ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}: + disableComponentGovernance: false + ${{ else }}: + disableComponentGovernance: true + ${{ else }}: + disableComponentGovernance: ${{ parameters.disableComponentGovernance }} + componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }} + + artifactPublishSteps: + - ${{ if ne(parameters.artifacts.publish, '') }}: + - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish pipeline artifacts + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts' + publishLocation: Container + artifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }} + continueOnError: true + condition: always() + retryCountOnTaskFailure: 10 # for any logs being locked + - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.ArtifactStagingDirectory)/artifacts/log' + artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }} + displayName: 'Publish logs' + continueOnError: true + condition: always() + retryCountOnTaskFailure: 10 # for any logs being locked + sbomEnabled: false # we don't need SBOM for logs + + - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}: + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + parameters: + is1ESPipeline: false + args: + displayName: Publish Logs + pathToPublish: '$(Build.ArtifactStagingDirectory)/artifacts/log/$(_BuildConfig)' + publishLocation: Container + artifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)_Attempt$(System.JobAttempt)' ) }} + continueOnError: true + condition: always() + + - ${{ if eq(parameters.enableBuildRetry, 'true') }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: false + args: + targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + artifactName: 'BuildConfiguration' + displayName: 'Publish build retry configuration' + continueOnError: true + sbomEnabled: false # we don't need SBOM for BuildConfiguration diff --git a/src/arcade/eng/common/templates/job/onelocbuild.yml b/src/arcade/eng/common/templates/job/onelocbuild.yml new file mode 100644 index 000000000..ff829dc4c --- /dev/null +++ b/src/arcade/eng/common/templates/job/onelocbuild.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/onelocbuild.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/job/publish-build-assets.yml b/src/arcade/eng/common/templates/job/publish-build-assets.yml new file mode 100644 index 000000000..ab2edec2a --- /dev/null +++ b/src/arcade/eng/common/templates/job/publish-build-assets.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/publish-build-assets.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/job/source-build.yml b/src/arcade/eng/common/templates/job/source-build.yml new file mode 100644 index 000000000..e44d47b1d --- /dev/null +++ b/src/arcade/eng/common/templates/job/source-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/source-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/job/source-index-stage1.yml b/src/arcade/eng/common/templates/job/source-index-stage1.yml new file mode 100644 index 000000000..89f329159 --- /dev/null +++ b/src/arcade/eng/common/templates/job/source-index-stage1.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/job/source-index-stage1.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/jobs/codeql-build.yml b/src/arcade/eng/common/templates/jobs/codeql-build.yml new file mode 100644 index 000000000..517f24d6a --- /dev/null +++ b/src/arcade/eng/common/templates/jobs/codeql-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/codeql-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/jobs/jobs.yml b/src/arcade/eng/common/templates/jobs/jobs.yml new file mode 100644 index 000000000..388e9037b --- /dev/null +++ b/src/arcade/eng/common/templates/jobs/jobs.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/jobs.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/jobs/source-build.yml b/src/arcade/eng/common/templates/jobs/source-build.yml new file mode 100644 index 000000000..818d4c326 --- /dev/null +++ b/src/arcade/eng/common/templates/jobs/source-build.yml @@ -0,0 +1,7 @@ +jobs: +- template: /eng/common/core-templates/jobs/source-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/post-build/common-variables.yml b/src/arcade/eng/common/templates/post-build/common-variables.yml new file mode 100644 index 000000000..7fa105875 --- /dev/null +++ b/src/arcade/eng/common/templates/post-build/common-variables.yml @@ -0,0 +1,8 @@ +variables: +- template: /eng/common/core-templates/post-build/common-variables.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/post-build/post-build.yml b/src/arcade/eng/common/templates/post-build/post-build.yml new file mode 100644 index 000000000..53ede714b --- /dev/null +++ b/src/arcade/eng/common/templates/post-build/post-build.yml @@ -0,0 +1,8 @@ +stages: +- template: /eng/common/core-templates/post-build/post-build.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/post-build/setup-maestro-vars.yml b/src/arcade/eng/common/templates/post-build/setup-maestro-vars.yml new file mode 100644 index 000000000..a79fab5b4 --- /dev/null +++ b/src/arcade/eng/common/templates/post-build/setup-maestro-vars.yml @@ -0,0 +1,8 @@ +steps: +- template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + # Specifies whether to use 1ES + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/steps/component-governance.yml b/src/arcade/eng/common/templates/steps/component-governance.yml new file mode 100644 index 000000000..c12a5f8d2 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/component-governance.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/component-governance.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/enable-internal-runtimes.yml b/src/arcade/eng/common/templates/steps/enable-internal-runtimes.yml new file mode 100644 index 000000000..b21a8038c --- /dev/null +++ b/src/arcade/eng/common/templates/steps/enable-internal-runtimes.yml @@ -0,0 +1,10 @@ +# Obtains internal runtime download credentials and populates the 'dotnetbuilds-internal-container-read-token-base64' +# variable with the base64-encoded SAS token, by default + +steps: +- template: /eng/common/core-templates/steps/enable-internal-runtimes.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/enable-internal-sources.yml b/src/arcade/eng/common/templates/steps/enable-internal-sources.yml new file mode 100644 index 000000000..5f87e9abb --- /dev/null +++ b/src/arcade/eng/common/templates/steps/enable-internal-sources.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/enable-internal-sources.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/steps/generate-sbom.yml b/src/arcade/eng/common/templates/steps/generate-sbom.yml new file mode 100644 index 000000000..26dc00a2e --- /dev/null +++ b/src/arcade/eng/common/templates/steps/generate-sbom.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/generate-sbom.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/get-delegation-sas.yml b/src/arcade/eng/common/templates/steps/get-delegation-sas.yml new file mode 100644 index 000000000..83760c979 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/get-delegation-sas.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-delegation-sas.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/get-federated-access-token.yml b/src/arcade/eng/common/templates/steps/get-federated-access-token.yml new file mode 100644 index 000000000..31e151d9d --- /dev/null +++ b/src/arcade/eng/common/templates/steps/get-federated-access-token.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/get-federated-access-token.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/steps/publish-build-artifacts.yml b/src/arcade/eng/common/templates/steps/publish-build-artifacts.yml new file mode 100644 index 000000000..605e602e9 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/publish-build-artifacts.yml @@ -0,0 +1,46 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: displayName + type: string + default: 'Publish to Build Artifact' + +- name: condition + type: string + default: succeeded() + +- name: artifactName + type: string + +- name: pathToPublish + type: string + +- name: continueOnError + type: boolean + default: false + +- name: publishLocation + type: string + default: 'Container' + +- name: retryCountOnTaskFailure + type: string + default: 10 + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishBuildArtifacts@1 + displayName: ${{ parameters.displayName }} + condition: ${{ parameters.condition }} + ${{ if parameters.continueOnError }}: + continueOnError: ${{ parameters.continueOnError }} + inputs: + PublishLocation: ${{ parameters.publishLocation }} + PathtoPublish: ${{ parameters.pathToPublish }} + ${{ if parameters.artifactName }}: + ArtifactName: ${{ parameters.artifactName }} + ${{ if parameters.retryCountOnTaskFailure }}: + retryCountOnTaskFailure: ${{ parameters.retryCountOnTaskFailure }} diff --git a/src/arcade/eng/common/templates/steps/publish-logs.yml b/src/arcade/eng/common/templates/steps/publish-logs.yml new file mode 100644 index 000000000..4ea86bd88 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/publish-logs.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/publish-pipeline-artifacts.yml b/src/arcade/eng/common/templates/steps/publish-pipeline-artifacts.yml new file mode 100644 index 000000000..5dd698b21 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/publish-pipeline-artifacts.yml @@ -0,0 +1,34 @@ +parameters: +- name: is1ESPipeline + type: boolean + default: false + +- name: args + type: object + default: {} + +steps: +- ${{ if eq(parameters.is1ESPipeline, true) }}: + - 'eng/common/templates cannot be referenced from a 1ES managed template': error +- task: PublishPipelineArtifact@1 + displayName: ${{ coalesce(parameters.args.displayName, 'Publish to Build Artifact') }} + ${{ if parameters.args.condition }}: + condition: ${{ parameters.args.condition }} + ${{ else }}: + condition: succeeded() + ${{ if parameters.args.continueOnError }}: + continueOnError: ${{ parameters.args.continueOnError }} + inputs: + targetPath: ${{ parameters.args.targetPath }} + ${{ if parameters.args.artifactName }}: + artifactName: ${{ parameters.args.artifactName }} + ${{ if parameters.args.publishLocation }}: + publishLocation: ${{ parameters.args.publishLocation }} + ${{ if parameters.args.fileSharePath }}: + fileSharePath: ${{ parameters.args.fileSharePath }} + ${{ if parameters.args.Parallel }}: + parallel: ${{ parameters.args.Parallel }} + ${{ if parameters.args.parallelCount }}: + parallelCount: ${{ parameters.args.parallelCount }} + ${{ if parameters.args.properties }}: + properties: ${{ parameters.args.properties }} \ No newline at end of file diff --git a/src/arcade/eng/common/templates/steps/retain-build.yml b/src/arcade/eng/common/templates/steps/retain-build.yml new file mode 100644 index 000000000..8e841ace3 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/retain-build.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/retain-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/send-to-helix.yml b/src/arcade/eng/common/templates/steps/send-to-helix.yml new file mode 100644 index 000000000..39f99fc27 --- /dev/null +++ b/src/arcade/eng/common/templates/steps/send-to-helix.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/send-to-helix.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/source-build.yml b/src/arcade/eng/common/templates/steps/source-build.yml new file mode 100644 index 000000000..23c1d6f4e --- /dev/null +++ b/src/arcade/eng/common/templates/steps/source-build.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-build.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/steps/source-index-stage1-publish.yml b/src/arcade/eng/common/templates/steps/source-index-stage1-publish.yml new file mode 100644 index 000000000..182cec33a --- /dev/null +++ b/src/arcade/eng/common/templates/steps/source-index-stage1-publish.yml @@ -0,0 +1,7 @@ +steps: +- template: /eng/common/core-templates/steps/source-index-stage1-publish.yml + parameters: + is1ESPipeline: false + + ${{ each parameter in parameters }}: + ${{ parameter.key }}: ${{ parameter.value }} diff --git a/src/arcade/eng/common/templates/variables/pool-providers.yml b/src/arcade/eng/common/templates/variables/pool-providers.yml new file mode 100644 index 000000000..e0b19c14a --- /dev/null +++ b/src/arcade/eng/common/templates/variables/pool-providers.yml @@ -0,0 +1,59 @@ +# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool, +# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches. + +# Motivation: +# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS +# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing +# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS. +# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services +# team needs to move resources around and create new and potentially differently-named pools. Using this template +# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming. + +# How to use: +# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do). +# If we find alternate naming conventions in broad usage it can be added to the condition below. +# +# First, import the template in an arcade-ified repo to pick up the variables, e.g.: +# +# variables: +# - template: /eng/common/templates/variables/pool-providers.yml +# +# ... then anywhere specifying the pool provider use the runtime variables, +# $(DncEngInternalBuildPool) and $ (DncEngPublicBuildPool), e.g.: +# +# pool: +# name: $(DncEngInternalBuildPool) +# demands: ImageOverride -equals windows.vs2019.amd64 +variables: + - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - template: /eng/common/templates-official/variables/pool-providers.yml + - ${{ else }}: + # Coalesce the target and source branches so we know when a PR targets a release branch + # If these variables are somehow missing, fall back to main (tends to have more capacity) + + # Any new -Svc alternative pools should have variables added here to allow for splitting work + - name: DncEngPublicBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore-Svc-Public' + ), + False, + 'NetCore-Public' + ) + ] + + - name: DncEngInternalBuildPool + value: $[ + replace( + replace( + eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), + True, + 'NetCore1ESPool-Svc-Internal' + ), + False, + 'NetCore1ESPool-Internal' + ) + ] diff --git a/src/arcade/eng/common/tools.ps1 b/src/arcade/eng/common/tools.ps1 new file mode 100644 index 000000000..7373e5305 --- /dev/null +++ b/src/arcade/eng/common/tools.ps1 @@ -0,0 +1,960 @@ +# Initialize variables if they aren't already defined. +# These may be defined as parameters of the importing script, or set after importing this script. + +# CI mode - set to true on CI server for PR validation build or official build. +[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false } + +# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. +[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' } + +# Set to true to opt out of outputting binary log while running in CI +[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false } + +# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. +[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog } + +# Set to true to use the pipelines logger which will enable Azure logging output. +# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md +# This flag is meant as a temporary opt-opt for the feature while validate it across +# our consumers. It will be deleted in the future. +[bool]$pipelinesLog = if (Test-Path variable:pipelinesLog) { $pipelinesLog } else { $ci } + +# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). +[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false } + +# True to restore toolsets and dependencies. +[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true } + +# Adjusts msbuild verbosity level. +[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' } + +# Set to true to reuse msbuild nodes. Recommended to not reuse on CI. +[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci } + +# Configures warning treatment in msbuild. +[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true } + +# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json). +[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null } + +# True to attempt using .NET Core already that meets requirements specified in global.json +# installed on the machine instead of downloading one. +[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true } + +# Enable repos to use a particular version of the on-line dotnet-install scripts. +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1 +[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' } + +# True to use global NuGet cache instead of restoring packages to repository-local directory. +[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci } + +# True to exclude prerelease versions Visual Studio during build +[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false } + +# An array of names of processes to stop on script exit if prepareMachine is true. +$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') } + +$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null } + +set-strictmode -version 2.0 +$ErrorActionPreference = 'Stop' +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + +# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first. +[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null } +# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed +[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } + +# True if the build is a product build +[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false } + +[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() } + +function Create-Directory ([string[]] $path) { + New-Item -Path $path -Force -ItemType 'Directory' | Out-Null +} + +function Unzip([string]$zipfile, [string]$outpath) { + Add-Type -AssemblyName System.IO.Compression.FileSystem + [System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath) +} + +# This will exec a process using the console and return it's exit code. +# This will not throw when the process fails. +# Returns process exit code. +function Exec-Process([string]$command, [string]$commandArgs) { + $startInfo = New-Object System.Diagnostics.ProcessStartInfo + $startInfo.FileName = $command + $startInfo.Arguments = $commandArgs + $startInfo.UseShellExecute = $false + $startInfo.WorkingDirectory = Get-Location + + $process = New-Object System.Diagnostics.Process + $process.StartInfo = $startInfo + $process.Start() | Out-Null + + $finished = $false + try { + while (-not $process.WaitForExit(100)) { + # Non-blocking loop done to allow ctr-c interrupts + } + + $finished = $true + return $global:LASTEXITCODE = $process.ExitCode + } + finally { + # If we didn't finish then an error occurred or the user hit ctrl-c. Either + # way kill the process + if (-not $finished) { + $process.Kill() + } + } +} + +# Take the given block, print it, print what the block probably references from the current set of +# variables using low-effort string matching, then run the block. +# +# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes, +# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less +# maintainable and less reliable. It is easy to make a mistake and modify the command without +# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of +# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in +# existing source code can also be difficult, because the strings are not aligned to each other and +# the line may be 300+ columns long. +# +# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues. +# +# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically. +# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it +# doesn't print any info about the variables being used by the command, which is normally the +# interesting part to diagnose. +function Exec-BlockVerbosely([scriptblock] $block) { + Write-Host "--- Running script block:" + $blockString = $block.ToString().Trim() + Write-Host $blockString + + Write-Host "--- List of variables that might be used:" + # For each variable x in the environment, check the block for a reference to x via simple "$x" or + # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x", + # among others). It only catches what this function was originally written for: simple + # command-line commands. + $variableTable = Get-Variable | + Where-Object { + $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)") + } | + Format-Table -AutoSize -HideTableHeaders -Wrap | + Out-String + Write-Host $variableTable.Trim() + + Write-Host "--- Executing:" + & $block + Write-Host "--- Done running script block!" +} + +# createSdkLocationFile parameter enables a file being generated under the toolset directory +# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations +# as dot sourcing isn't possible. +function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { + if (Test-Path variable:global:_DotNetInstallDir) { + return $global:_DotNetInstallDir + } + + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + $env:DOTNET_MULTILEVEL_LOOKUP=0 + + # Disable first run since we do not need all ASP.NET packages restored. + $env:DOTNET_NOLOGO=1 + + # Disable telemetry on CI. + if ($ci) { + $env:DOTNET_CLI_TELEMETRY_OPTOUT=1 + } + + # Find the first path on %PATH% that contains the dotnet.exe + if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) { + $dotnetExecutable = GetExecutableFileName 'dotnet' + $dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue + + if ($dotnetCmd -ne $null) { + $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent + } + } + + $dotnetSdkVersion = $GlobalJson.tools.dotnet + + # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, + # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. + if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) { + $dotnetRoot = $env:DOTNET_INSTALL_DIR + } else { + $dotnetRoot = Join-Path $RepoRoot '.dotnet' + + if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) { + if ($install) { + InstallDotNetSdk $dotnetRoot $dotnetSdkVersion + } else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'" + ExitWithExitCode 1 + } + } + + $env:DOTNET_INSTALL_DIR = $dotnetRoot + } + + # Creates a temporary file under the toolset dir. + # The following code block is protecting against concurrent access so that this function can + # be called in parallel. + if ($createSdkLocationFile) { + do { + $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName()) + } + until (!(Test-Path $sdkCacheFileTemp)) + Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot + + try { + Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt') + } catch { + # Somebody beat us + Remove-Item -Path $sdkCacheFileTemp + } + } + + # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom + # build steps from using anything other than what we've downloaded. + # It also ensures that VS msbuild will use the downloaded sdk targets. + $env:PATH = "$dotnetRoot;$env:PATH" + + # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build + Write-PipelinePrependPath -Path $dotnetRoot + + Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' + Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1' + + return $global:_DotNetInstallDir = $dotnetRoot +} + +function Retry($downloadBlock, $maxRetries = 5) { + $retries = 1 + + while($true) { + try { + & $downloadBlock + break + } + catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + } + + if (++$retries -le $maxRetries) { + $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff + Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)." + Start-Sleep -Seconds $delayInSeconds + } + else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts." + break + } + } +} + +function GetDotNetInstallScript([string] $dotnetRoot) { + $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' + if (!(Test-Path $installScript)) { + Create-Directory $dotnetRoot + $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit + $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" + + Retry({ + Write-Host "GET $uri" + Invoke-WebRequest $uri -OutFile $installScript + }) + } + + return $installScript +} + +function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) { + InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath +} + +function InstallDotNet([string] $dotnetRoot, + [string] $version, + [string] $architecture = '', + [string] $runtime = '', + [bool] $skipNonVersionedFiles = $false, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', + [switch] $noPath) { + + $dotnetVersionLabel = "'sdk v$version'" + + if ($runtime -ne '' -and $runtime -ne 'sdk') { + $runtimePath = $dotnetRoot + $runtimePath = $runtimePath + "\shared" + if ($runtime -eq "dotnet") { $runtimePath = $runtimePath + "\Microsoft.NETCore.App" } + if ($runtime -eq "aspnetcore") { $runtimePath = $runtimePath + "\Microsoft.AspNetCore.App" } + if ($runtime -eq "windowsdesktop") { $runtimePath = $runtimePath + "\Microsoft.WindowsDesktop.App" } + $runtimePath = $runtimePath + "\" + $version + + $dotnetVersionLabel = "runtime toolset '$runtime/$architecture v$version'" + + if (Test-Path $runtimePath) { + Write-Host " Runtime toolset '$runtime/$architecture v$version' already installed." + $installSuccess = $true + Exit + } + } + + $installScript = GetDotNetInstallScript $dotnetRoot + $installParameters = @{ + Version = $version + InstallDir = $dotnetRoot + } + + if ($architecture) { $installParameters.Architecture = $architecture } + if ($runtime) { $installParameters.Runtime = $runtime } + if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles } + if ($noPath) { $installParameters.NoPath = $True } + + $variations = @() + $variations += @($installParameters) + + $dotnetBuilds = $installParameters.Clone() + $dotnetbuilds.AzureFeed = "https://ci.dot.net/public" + $variations += @($dotnetBuilds) + + if ($runtimeSourceFeed) { + $runtimeSource = $installParameters.Clone() + $runtimeSource.AzureFeed = $runtimeSourceFeed + if ($runtimeSourceFeedKey) { + $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey) + $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes) + $runtimeSource.FeedCredential = $decodedString + } + $variations += @($runtimeSource) + } + + $installSuccess = $false + foreach ($variation in $variations) { + if ($variation | Get-Member AzureFeed) { + $location = $variation.AzureFeed + } else { + $location = "public location"; + } + Write-Host " Attempting to install $dotnetVersionLabel from $location." + try { + & $installScript @variation + $installSuccess = $true + break + } + catch { + Write-Host " Failed to install $dotnetVersionLabel from $location." + } + } + if (-not $installSuccess) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 + } +} + +# +# Locates Visual Studio MSBuild installation. +# The preference order for MSBuild to use is as follows: +# +# 1. MSBuild from an active VS command prompt +# 2. MSBuild from a compatible VS installation +# 3. MSBuild from the xcopy tool package +# +# Returns full path to msbuild.exe. +# Throws on failure. +# +function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) { + if (-not (IsWindowsPlatform)) { + throw "Cannot initialize Visual Studio on non-Windows" + } + + if (Test-Path variable:global:_MSBuildExe) { + return $global:_MSBuildExe + } + + # Minimum VS version to require. + $vsMinVersionReqdStr = '17.7' + $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr) + + # If the version of msbuild is going to be xcopied, + # use this version. Version matches a package here: + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.13.0 + $defaultXCopyMSBuildVersion = '17.13.0' + + if (!$vsRequirements) { + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + $vsRequirements = $GlobalJson.tools.vs + } + else { + $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr } + } + } + $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr } + $vsMinVersion = [Version]::new($vsMinVersionStr) + + # Try msbuild command available in the environment. + if ($env:VSINSTALLDIR -ne $null) { + $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue + if ($msbuildCmd -ne $null) { + # Workaround for https://github.com/dotnet/roslyn/issues/35793 + # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+ + $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0]) + + if ($msbuildVersion -ge $vsMinVersion) { + return $global:_MSBuildExe = $msbuildCmd.Path + } + + # Report error - the developer environment is initialized with incompatible VS version. + throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window" + } + } + + # Locate Visual Studio installation or download x-copy msbuild. + $vsInfo = LocateVisualStudio $vsRequirements + if ($vsInfo -ne $null) { + # Ensure vsInstallDir has a trailing slash + $vsInstallDir = Join-Path $vsInfo.installationPath "\" + $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] + + InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion + } else { + if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') { + $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild' + $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] + } else { + #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download + if($vsMinVersion -lt $vsMinVersionReqd){ + Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible" + $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion + $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0] + } + else{ + # If the VS version IS compatible, look for an xcopy msbuild package + # with a version matching VS. + # Note: If this version does not exist, then an explicit version of xcopy msbuild + # can be specified in global.json. This will be required for pre-release versions of msbuild. + $vsMajorVersion = $vsMinVersion.Major + $vsMinorVersion = $vsMinVersion.Minor + $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0" + } + } + + $vsInstallDir = $null + if ($xcopyMSBuildVersion.Trim() -ine "none") { + $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install + if ($vsInstallDir -eq $null) { + throw "Could not xcopy msbuild. Please check that package 'Microsoft.DotNet.Arcade.MSBuild.Xcopy @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'." + } + } + if ($vsInstallDir -eq $null) { + throw 'Unable to find Visual Studio that has required version and components installed' + } + } + + $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" } + + $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin" + $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false } + if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) { + $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe" + } else { + $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe" + } + + return $global:_MSBuildExe +} + +function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) { + $env:VSINSTALLDIR = $vsInstallDir + Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\") + + $vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\" + if (Test-Path $vsSdkInstallDir) { + Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir + $env:VSSDKInstall = $vsSdkInstallDir + } +} + +function InstallXCopyMSBuild([string]$packageVersion) { + return InitializeXCopyMSBuild $packageVersion -install $true +} + +function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) { + $packageName = 'Microsoft.DotNet.Arcade.MSBuild.Xcopy' + $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion" + $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg" + + if (!(Test-Path $packageDir)) { + if (!$install) { + return $null + } + + Create-Directory $packageDir + + Write-Host "Downloading $packageName $packageVersion" + $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit + Retry({ + Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath + }) + + if (!(Test-Path $packagePath)) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "See https://dev.azure.com/dnceng/internal/_wiki/wikis/DNCEng%20Services%20Wiki/1074/Updating-Microsoft.DotNet.Arcade.MSBuild.Xcopy-WAS-RoslynTools.MSBuild-(xcopy-msbuild)-generation?anchor=troubleshooting for help troubleshooting issues with XCopy MSBuild" + throw + } + Unzip $packagePath $packageDir + } + + return Join-Path $packageDir 'tools' +} + +# +# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json. +# +# The following properties of tools.vs are recognized: +# "version": "{major}.{minor}" +# Two part minimal VS version, e.g. "15.9", "16.0", etc. +# "components": ["componentId1", "componentId2", ...] +# Array of ids of workload components that must be available in the VS instance. +# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017 +# +# Returns JSON describing the located VS instance (same format as returned by vswhere), +# or $null if no instance meeting the requirements is found on the machine. +# +function LocateVisualStudio([object]$vsRequirements = $null){ + if (-not (IsWindowsPlatform)) { + throw "Cannot run vswhere on non-Windows platforms." + } + + if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { + $vswhereVersion = $GlobalJson.tools.vswhere + } else { + $vswhereVersion = '2.5.2' + } + + $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" + $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe' + + if (!(Test-Path $vsWhereExe)) { + Create-Directory $vsWhereDir + Write-Host 'Downloading vswhere' + Retry({ + Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe + }) + } + + if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } + $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') + + if (!$excludePrereleaseVS) { + $args += '-prerelease' + } + + if (Get-Member -InputObject $vsRequirements -Name 'version') { + $args += '-version' + $args += $vsRequirements.version + } + + if (Get-Member -InputObject $vsRequirements -Name 'components') { + foreach ($component in $vsRequirements.components) { + $args += '-requires' + $args += $component + } + } + + $vsInfo =& $vsWhereExe $args | ConvertFrom-Json + + if ($lastExitCode -ne 0) { + return $null + } + + # use first matching instance + return $vsInfo[0] +} + +function InitializeBuildTool() { + if (Test-Path variable:global:_BuildTool) { + # If the requested msbuild parameters do not match, clear the cached variables. + if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) { + Remove-Item variable:global:_BuildTool + Remove-Item variable:global:_MSBuildExe + } else { + return $global:_BuildTool + } + } + + if (-not $msbuildEngine) { + $msbuildEngine = GetDefaultMSBuildEngine + } + + # Initialize dotnet cli if listed in 'tools' + $dotnetRoot = $null + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + $dotnetRoot = InitializeDotNetCli -install:$restore + } + + if ($msbuildEngine -eq 'dotnet') { + if (!$dotnetRoot) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'." + ExitWithExitCode 1 + } + $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet') + + $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net' } + } elseif ($msbuildEngine -eq "vs") { + try { + $msbuildPath = InitializeVisualStudioMSBuild -install:$restore + } catch { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_ + ExitWithExitCode 1 + } + + $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "netframework"; ExcludePrereleaseVS = $excludePrereleaseVS } + } else { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'." + ExitWithExitCode 1 + } + + return $global:_BuildTool = $buildTool +} + +function GetDefaultMSBuildEngine() { + # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows. + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { + return 'vs' + } + + if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') { + return 'dotnet' + } + + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'." + ExitWithExitCode 1 +} + +function GetNuGetPackageCachePath() { + if ($env:NUGET_PACKAGES -eq $null) { + # Use local cache on CI to ensure deterministic build. + # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116 + # use global cache in dev builds to avoid cost of downloading packages. + # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968 + if ($useGlobalNuGetCache) { + $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' + } else { + $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' + $env:RESTORENOHTTPCACHE = $true + } + } + + return $env:NUGET_PACKAGES +} + +# Returns a full path to an Arcade SDK task project file. +function GetSdkTaskProject([string]$taskName) { + return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj" +} + +function InitializeNativeTools() { + if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) { + $nativeArgs= @{} + if ($ci) { + $nativeArgs = @{ + InstallDirectory = "$ToolsDir" + } + } + if ($env:NativeToolsOnMachine) { + Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..." + $nativeArgs += @{ PathPromotion = $true } + } + & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs + } +} + +function Read-ArcadeSdkVersion() { + return $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk' +} + +function InitializeToolset() { + # For Unified Build/Source-build support, check whether the environment variable is + # set. If it is, then use this as the toolset build project. + if ($env:_InitializeToolset -ne $null) { + return $global:_InitializeToolset = $env:_InitializeToolset + } + + if (Test-Path variable:global:_InitializeToolset) { + return $global:_InitializeToolset + } + + $nugetCache = GetNuGetPackageCachePath + + $toolsetVersion = Read-ArcadeSdkVersion + $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt" + + if (Test-Path $toolsetLocationFile) { + $path = Get-Content $toolsetLocationFile -TotalCount 1 + if (Test-Path $path) { + return $global:_InitializeToolset = $path + } + } + + if (-not $restore) { + Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored." + ExitWithExitCode 1 + } + + $buildTool = InitializeBuildTool + + $proj = Join-Path $ToolsetDir 'restore.proj' + $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' } + + '' | Set-Content $proj + + MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile + + $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1 + if (!(Test-Path $path)) { + throw "Invalid toolset path: $path" + } + + return $global:_InitializeToolset = $path +} + +function ExitWithExitCode([int] $exitCode) { + if ($ci -and $prepareMachine) { + Stop-Processes + } + exit $exitCode +} + +# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for +# diagnostics, then exit the script with the $LASTEXITCODE. +function Exit-IfNZEC([string] $category = "General") { + Write-Host "Exit code $LASTEXITCODE" + if ($LASTEXITCODE -ne 0) { + $message = "Last command failed with exit code $LASTEXITCODE." + Write-PipelineTelemetryError -Force -Category $category -Message $message + ExitWithExitCode $LASTEXITCODE + } +} + +function Stop-Processes() { + Write-Host 'Killing running build processes...' + foreach ($processName in $processesToStopOnExit) { + Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process + } +} + +# +# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. +# The arguments are automatically quoted. +# Terminates the script if the build fails. +# +function MSBuild() { + if ($pipelinesLog) { + $buildTool = InitializeBuildTool + + if ($ci -and $buildTool.Tool -eq 'dotnet') { + $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20 + $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20 + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20' + Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20' + } + + Enable-Nuget-EnhancedRetry + + $toolsetBuildProject = InitializeToolset + $basePath = Split-Path -parent $toolsetBuildProject + $possiblePaths = @( + # new scripts need to work with old packages, so we need to look for the old names/versions + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), + + # This list doesn't need to be updated anymore and can eventually be removed. + (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.Arcade.Sdk.dll')), + (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')), + (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll')) + ) + $selectedPath = $null + foreach ($path in $possiblePaths) { + if (Test-Path $path -PathType Leaf) { + $selectedPath = $path + break + } + } + if (-not $selectedPath) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.' + ExitWithExitCode 1 + } + $args += "/logger:$selectedPath" + } + + MSBuild-Core @args +} + +# +# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function. +# The arguments are automatically quoted. +# Terminates the script if the build fails. +# +function MSBuild-Core() { + if ($ci) { + if (!$binaryLog -and !$excludeCIBinarylog) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.' + ExitWithExitCode 1 + } + + if ($nodeReuse) { + Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.' + ExitWithExitCode 1 + } + } + + Enable-Nuget-EnhancedRetry + + $buildTool = InitializeBuildTool + + $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci" + + if ($warnAsError) { + $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true' + } + else { + $cmdArgs += ' /p:TreatWarningsAsErrors=false' + } + + foreach ($arg in $args) { + if ($null -ne $arg -and $arg.Trim() -ne "") { + if ($arg.EndsWith('\')) { + $arg = $arg + "\" + } + $cmdArgs += " `"$arg`"" + } + } + + # Be sure quote the path in case there are spaces in the dotnet installation location. + $env:ARCADE_BUILD_TOOL_COMMAND = "`"$($buildTool.Path)`" $cmdArgs" + + $exitCode = Exec-Process $buildTool.Path $cmdArgs + + if ($exitCode -ne 0) { + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red + + $buildLog = GetMSBuildBinaryLogCommandLineArgument $args + if ($null -ne $buildLog) { + Write-Host "See log: $buildLog" -ForegroundColor DarkGray + } + + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + # Skip this when the build is a child of the VMR orchestrator build. + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) { + Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + } else { + ExitWithExitCode $exitCode + } + } +} + +function GetMSBuildBinaryLogCommandLineArgument($arguments) { + foreach ($argument in $arguments) { + if ($argument -ne $null) { + $arg = $argument.Trim() + if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) { + return $arg.Substring('/bl:'.Length) + } + + if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) { + return $arg.Substring('/binaryLogger:'.Length) + } + } + } + + return $null +} + +function GetExecutableFileName($baseName) { + if (IsWindowsPlatform) { + return "$baseName.exe" + } + else { + return $baseName + } +} + +function IsWindowsPlatform() { + return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT +} + +function Get-Darc($version) { + $darcPath = "$TempDir\darc\$([guid]::NewGuid())" + if ($version -ne $null) { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host + } else { + & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host + } + return "$darcPath\darc.exe" +} + +. $PSScriptRoot\pipeline-logging-functions.ps1 + +$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\') +$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..') +$ArtifactsDir = Join-Path $RepoRoot 'artifacts' +$ToolsetDir = Join-Path $ArtifactsDir 'toolset' +$ToolsDir = Join-Path $RepoRoot '.tools' +$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration +$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration +$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json +# true if global.json contains a "runtimes" section +$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false } + +Create-Directory $ToolsetDir +Create-Directory $TempDir +Create-Directory $LogDir + +Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir +Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir +Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir +Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir +Write-PipelineSetVariable -Name 'TMP' -Value $TempDir + +# Import custom tools configuration, if present in the repo. +# Note: Import in global scope so that the script set top-level variables without qualification. +if (!$disableConfigureToolsetImport) { + $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1' + if (Test-Path $configureToolsetScript) { + . $configureToolsetScript + if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) { + if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) { + Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code' + ExitWithExitCode $LastExitCode + } + } + } +} + +# +# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic. +# +function Enable-Nuget-EnhancedRetry() { + if ($ci) { + Write-Host "Setting NUGET enhanced retry environment variables" + $env:NUGET_ENABLE_ENHANCED_HTTP_RETRY = 'true' + $env:NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT = 6 + $env:NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS = 1000 + $env:NUGET_RETRY_HTTP_429 = 'true' + Write-PipelineSetVariable -Name 'NUGET_ENABLE_ENHANCED_HTTP_RETRY' -Value 'true' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_MAX_NETWORK_TRY_COUNT' -Value '6' + Write-PipelineSetVariable -Name 'NUGET_ENHANCED_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000' + Write-PipelineSetVariable -Name 'NUGET_RETRY_HTTP_429' -Value 'true' + } +} diff --git a/src/arcade/eng/common/tools.sh b/src/arcade/eng/common/tools.sh new file mode 100644 index 000000000..d51f300c7 --- /dev/null +++ b/src/arcade/eng/common/tools.sh @@ -0,0 +1,591 @@ +#!/usr/bin/env bash + +# Initialize variables if they aren't already defined. + +# CI mode - set to true on CI server for PR validation build or official build. +ci=${ci:-false} + +# Set to true to use the pipelines logger which will enable Azure logging output. +# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md +# This flag is meant as a temporary opt-opt for the feature while validate it across +# our consumers. It will be deleted in the future. +if [[ "$ci" == true ]]; then + pipelines_log=${pipelines_log:-true} +else + pipelines_log=${pipelines_log:-false} +fi + +# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names. +configuration=${configuration:-'Debug'} + +# Set to true to opt out of outputting binary log while running in CI +exclude_ci_binary_log=${exclude_ci_binary_log:-false} + +if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then + binary_log_default=true +else + binary_log_default=false +fi + +# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build. +binary_log=${binary_log:-$binary_log_default} + +# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes). +prepare_machine=${prepare_machine:-false} + +# True to restore toolsets and dependencies. +restore=${restore:-true} + +# Adjusts msbuild verbosity level. +verbosity=${verbosity:-'minimal'} + +# Set to true to reuse msbuild nodes. Recommended to not reuse on CI. +if [[ "$ci" == true ]]; then + node_reuse=${node_reuse:-false} +else + node_reuse=${node_reuse:-true} +fi + +# Configures warning treatment in msbuild. +warn_as_error=${warn_as_error:-true} + +# True to attempt using .NET Core already that meets requirements specified in global.json +# installed on the machine instead of downloading one. +use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} + +# Enable repos to use a particular version of the on-line dotnet-install scripts. +# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh +dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} + +# True to use global NuGet cache instead of restoring packages to repository-local directory. +if [[ "$ci" == true ]]; then + use_global_nuget_cache=${use_global_nuget_cache:-false} +else + use_global_nuget_cache=${use_global_nuget_cache:-true} +fi + +# Used when restoring .NET SDK from alternative feeds +runtime_source_feed=${runtime_source_feed:-''} +runtime_source_feed_key=${runtime_source_feed_key:-''} + +# True if the build is a product build +product_build=${product_build:-false} + +# Resolve any symlinks in the given path. +function ResolvePath { + local path=$1 + + while [[ -h $path ]]; do + local dir="$( cd -P "$( dirname "$path" )" && pwd )" + path="$(readlink "$path")" + + # if $path was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $path != /* ]] && path="$dir/$path" + done + + # return value + _ResolvePath="$path" +} + +# ReadVersionFromJson [json key] +function ReadGlobalVersion { + local key=$1 + + if command -v jq &> /dev/null; then + _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")" + elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then + _ReadGlobalVersion=${BASH_REMATCH[1]} + fi + + if [[ -z "$_ReadGlobalVersion" ]]; then + Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file" + ExitWithExitCode 1 + fi +} + +function InitializeDotNetCli { + if [[ -n "${_InitializeDotNetCli:-}" ]]; then + return + fi + + local install=$1 + + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + export DOTNET_MULTILEVEL_LOOKUP=0 + + # Disable first run since we want to control all package sources + export DOTNET_NOLOGO=1 + + # Disable telemetry on CI + if [[ $ci == true ]]; then + export DOTNET_CLI_TELEMETRY_OPTOUT=1 + fi + + # LTTNG is the logging infrastructure used by Core CLR. Need this variable set + # so it doesn't output warnings to the console. + export LTTNG_HOME="$HOME" + + # Find the first path on $PATH that contains the dotnet.exe + if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then + local dotnet_path=`command -v dotnet` + if [[ -n "$dotnet_path" ]]; then + ResolvePath "$dotnet_path" + export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"` + fi + fi + + ReadGlobalVersion "dotnet" + local dotnet_sdk_version=$_ReadGlobalVersion + local dotnet_root="" + + # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version, + # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues. + if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + dotnet_root="$DOTNET_INSTALL_DIR" + else + dotnet_root="${repo_root}.dotnet" + + export DOTNET_INSTALL_DIR="$dotnet_root" + + if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then + if [[ "$install" == true ]]; then + InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version" + else + Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'" + ExitWithExitCode 1 + fi + fi + fi + + # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom + # build steps from using anything other than what we've downloaded. + Write-PipelinePrependPath -path "$dotnet_root" + + Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" + Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1" + + # return value + _InitializeDotNetCli="$dotnet_root" +} + +function InstallDotNetSdk { + local root=$1 + local version=$2 + local architecture="unset" + if [[ $# -ge 3 ]]; then + architecture=$3 + fi + InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key +} + +function InstallDotNet { + local root=$1 + local version=$2 + local runtime=$4 + + local dotnetVersionLabel="'$runtime v$version'" + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + runtimePath="$root" + runtimePath="$runtimePath/shared" + case "$runtime" in + dotnet) + runtimePath="$runtimePath/Microsoft.NETCore.App" + ;; + aspnetcore) + runtimePath="$runtimePath/Microsoft.AspNetCore.App" + ;; + windowsdesktop) + runtimePath="$runtimePath/Microsoft.WindowsDesktop.App" + ;; + *) + ;; + esac + runtimePath="$runtimePath/$version" + + dotnetVersionLabel="runtime toolset '$runtime/$architecture v$version'" + + if [ -d "$runtimePath" ]; then + echo " Runtime toolset '$runtime/$architecture v$version' already installed." + local installSuccess=1 + return + fi + fi + + GetDotNetInstallScript "$root" + local install_script=$_GetDotNetInstallScript + + local installParameters=(--version $version --install-dir "$root") + + if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then + installParameters+=(--architecture $3) + fi + if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then + installParameters+=(--runtime $4) + fi + if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then + installParameters+=(--skip-non-versioned-files) + fi + + local variations=() # list of variable names with parameter arrays in them + + local public_location=("${installParameters[@]}") + variations+=(public_location) + + local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public") + variations+=(dotnetbuilds) + + if [[ -n "${6:-}" ]]; then + variations+=(private_feed) + local private_feed=("${installParameters[@]}" --azure-feed $6) + if [[ -n "${7:-}" ]]; then + # The 'base64' binary on alpine uses '-d' and doesn't support '--decode' + # '-d'. To work around this, do a simple detection and switch the parameter + # accordingly. + decodeArg="--decode" + if base64 --help 2>&1 | grep -q "BusyBox"; then + decodeArg="-d" + fi + decodedFeedKey=`echo $7 | base64 $decodeArg` + private_feed+=(--feed-credential $decodedFeedKey) + fi + fi + + local installSuccess=0 + for variationName in "${variations[@]}"; do + local name="$variationName[@]" + local variation=("${!name}") + echo " Attempting to install $dotnetVersionLabel from $variationName." + bash "$install_script" "${variation[@]}" && installSuccess=1 + if [[ "$installSuccess" -eq 1 ]]; then + break + fi + + echo " Failed to install $dotnetVersionLabel from $variationName." + done + + if [[ "$installSuccess" -eq 0 ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install $dotnetVersionLabel from any of the specified locations." + ExitWithExitCode 1 + fi +} + +function with_retries { + local maxRetries=5 + local retries=1 + echo "Trying to run '$@' for maximum of $maxRetries attempts." + while [[ $((retries++)) -le $maxRetries ]]; do + "$@" + + if [[ $? == 0 ]]; then + echo "Ran '$@' successfully." + return 0 + fi + + timeout=$((3**$retries-1)) + echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2 + sleep $timeout + done + + echo "Failed to execute '$@' for $maxRetries times." 1>&2 + + return 1 +} + +function GetDotNetInstallScript { + local root=$1 + local install_script="$root/dotnet-install.sh" + local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" + + if [[ ! -a "$install_script" ]]; then + mkdir -p "$root" + + echo "Downloading '$install_script_url'" + + # Use curl if available, otherwise use wget + if command -v curl > /dev/null; then + # first, try directly, if this fails we will retry with verbose logging + curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || { + if command -v openssl &> /dev/null; then + echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation" + echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443 || true + fi + echo "Will now retry the same URL with verbose logging." + with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || { + local exit_code=$? + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." + ExitWithExitCode $exit_code + } + } + else + with_retries wget -v -O "$install_script" "$install_script_url" || { + local exit_code=$? + Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')." + ExitWithExitCode $exit_code + } + fi + fi + # return value + _GetDotNetInstallScript="$install_script" +} + +function InitializeBuildTool { + if [[ -n "${_InitializeBuildTool:-}" ]]; then + return + fi + + InitializeDotNetCli $restore + + # return values + _InitializeBuildTool="$_InitializeDotNetCli/dotnet" + _InitializeBuildToolCommand="msbuild" +} + +# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116 +function GetNuGetPackageCachePath { + if [[ -z ${NUGET_PACKAGES:-} ]]; then + if [[ "$use_global_nuget_cache" == true ]]; then + export NUGET_PACKAGES="$HOME/.nuget/packages/" + else + export NUGET_PACKAGES="$repo_root/.packages/" + export RESTORENOHTTPCACHE=true + fi + fi + + # return value + _GetNuGetPackageCachePath=$NUGET_PACKAGES +} + +function InitializeNativeTools() { + if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then + return + fi + if grep -Fq "native-tools" $global_json_file + then + local nativeArgs="" + if [[ "$ci" == true ]]; then + nativeArgs="--installDirectory $tools_dir" + fi + "$_script_dir/init-tools-native.sh" $nativeArgs + fi +} + +function InitializeToolset { + if [[ -n "${_InitializeToolset:-}" ]]; then + return + fi + + GetNuGetPackageCachePath + + ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk" + + local toolset_version=$_ReadGlobalVersion + local toolset_location_file="$toolset_dir/$toolset_version.txt" + + if [[ -a "$toolset_location_file" ]]; then + local path=`cat "$toolset_location_file"` + if [[ -a "$path" ]]; then + # return value + _InitializeToolset="$path" + return + fi + fi + + if [[ "$restore" != true ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored." + ExitWithExitCode 2 + fi + + local proj="$toolset_dir/restore.proj" + + local bl="" + if [[ "$binary_log" == true ]]; then + bl="/bl:$log_dir/ToolsetRestore.binlog" + fi + + echo '' > "$proj" + MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file" + + local toolset_build_proj=`cat "$toolset_location_file"` + + if [[ ! -a "$toolset_build_proj" ]]; then + Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj" + ExitWithExitCode 3 + fi + + # return value + _InitializeToolset="$toolset_build_proj" +} + +function ExitWithExitCode { + if [[ "$ci" == true && "$prepare_machine" == true ]]; then + StopProcesses + fi + exit $1 +} + +function StopProcesses { + echo "Killing running build processes..." + pkill -9 "dotnet" || true + pkill -9 "vbcscompiler" || true + return 0 +} + +function MSBuild { + local args=( "$@" ) + if [[ "$pipelines_log" == true ]]; then + InitializeBuildTool + InitializeToolset + + if [[ "$ci" == true ]]; then + export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20 + export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20 + Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20" + Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20" + fi + + local toolset_dir="${_InitializeToolset%/*}" + # new scripts need to work with old packages, so we need to look for the old names/versions + local selectedPath= + local possiblePaths=() + possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.Arcade.Sdk.dll" ) + + # This list doesn't need to be updated anymore and can eventually be removed. + possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" ) + possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" ) + for path in "${possiblePaths[@]}"; do + if [[ -f $path ]]; then + selectedPath=$path + break + fi + done + if [[ -z "$selectedPath" ]]; then + Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly." + ExitWithExitCode 1 + fi + args+=( "-logger:$selectedPath" ) + fi + + MSBuild-Core "${args[@]}" +} + +function MSBuild-Core { + if [[ "$ci" == true ]]; then + if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then + Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch." + ExitWithExitCode 1 + fi + + if [[ "$node_reuse" == true ]]; then + Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build." + ExitWithExitCode 1 + fi + fi + + InitializeBuildTool + + local warnaserror_switch="" + if [[ $warn_as_error == true ]]; then + warnaserror_switch="/warnaserror" + fi + + function RunBuildTool { + export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@" + + "$_InitializeBuildTool" "$@" || { + local exit_code=$? + # We should not Write-PipelineTaskError here because that message shows up in the build summary + # The build already logged an error, that's the reason it failed. Producing an error here only adds noise. + echo "Build failed with exit code $exit_code. Check errors above." + + # When running on Azure Pipelines, override the returned exit code to avoid double logging. + # Skip this when the build is a child of the VMR orchestrator build. + if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then + Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." + # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error + # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error + ExitWithExitCode 0 + else + ExitWithExitCode $exit_code + fi + } + } + + RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@" +} + +function GetDarc { + darc_path="$temp_dir/darc" + version="$1" + + if [[ -n "$version" ]]; then + version="--darcversion $version" + fi + + "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version +} + +# Returns a full path to an Arcade SDK task project file. +function GetSdkTaskProject { + taskName=$1 + echo "$(dirname $_InitializeToolset)/SdkTasks/$taskName.proj" +} + +ResolvePath "${BASH_SOURCE[0]}" +_script_dir=`dirname "$_ResolvePath"` + +. "$_script_dir/pipeline-logging-functions.sh" + +eng_root=`cd -P "$_script_dir/.." && pwd` +repo_root=`cd -P "$_script_dir/../.." && pwd` +repo_root="${repo_root}/" +artifacts_dir="${repo_root}artifacts" +toolset_dir="$artifacts_dir/toolset" +tools_dir="${repo_root}.tools" +log_dir="$artifacts_dir/log/$configuration" +temp_dir="$artifacts_dir/tmp/$configuration" + +global_json_file="${repo_root}global.json" +# determine if global.json contains a "runtimes" entry +global_json_has_runtimes=false +if command -v jq &> /dev/null; then + if jq -e '.tools | has("runtimes")' "$global_json_file" &> /dev/null; then + global_json_has_runtimes=true + fi +elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then + global_json_has_runtimes=true +fi + +# HOME may not be defined in some scenarios, but it is required by NuGet +if [[ -z $HOME ]]; then + export HOME="${repo_root}artifacts/.home/" + mkdir -p "$HOME" +fi + +mkdir -p "$toolset_dir" +mkdir -p "$temp_dir" +mkdir -p "$log_dir" + +Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir" +Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir" +Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir" +Write-PipelineSetVariable -name "Temp" -value "$temp_dir" +Write-PipelineSetVariable -name "TMP" -value "$temp_dir" + +# Import custom tools configuration, if present in the repo. +if [ -z "${disable_configure_toolset_import:-}" ]; then + configure_toolset_script="$eng_root/configure-toolset.sh" + if [[ -a "$configure_toolset_script" ]]; then + . "$configure_toolset_script" + fi +fi + +# TODO: https://github.com/dotnet/arcade/issues/1468 +# Temporary workaround to avoid breaking change. +# Remove once repos are updated. +if [[ -n "${useInstalledDotNetCli:-}" ]]; then + use_installed_dotnet_cli="$useInstalledDotNetCli" +fi From 94c05e32cb87a5770780471ab9a7ca948561bf61 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 19 May 2025 05:01:54 +0000 Subject: [PATCH 17/43] Update dependencies from https://github.com/dotnet/arcade build 20250518.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25268.1 --- eng/Version.Details.xml | 4 +- eng/common/build.sh | 4 +- .../core-templates/steps/source-build.yml | 17 +- eng/common/templates/steps/vmr-sync.yml | 207 ++++++++++++++++++ eng/common/templates/vmr-build-pr.yml | 33 +++ eng/common/tools.ps1 | 4 +- eng/common/tools.sh | 8 +- eng/common/vmr-sync.ps1 | 138 ++++++++++++ eng/common/vmr-sync.sh | 205 +++++++++++++++++ global.json | 2 +- 10 files changed, 600 insertions(+), 22 deletions(-) create mode 100644 eng/common/templates/steps/vmr-sync.yml create mode 100644 eng/common/templates/vmr-build-pr.yml create mode 100644 eng/common/vmr-sync.ps1 create mode 100644 eng/common/vmr-sync.sh diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 39441f99c..8bdb8c6dc 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 80c4e4d26cb85c86f7e1be77d2d9eceeef0f3493 + 35a34fa5ab9b2f97d3f7bdf48a7c2100727308b3 https://github.com/dotnet/wpf diff --git a/eng/common/build.sh b/eng/common/build.sh index 27ae2c856..08f99154b 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -129,14 +129,14 @@ while [[ $# > 0 ]]; do -pack) pack=true ;; - -sourcebuild|-sb) + -sourcebuild|-source-build|-sb) build=true source_build=true product_build=true restore=true pack=true ;; - -productbuild|-pb) + -productbuild|-product-build|-pb) build=true product_build=true restore=true diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index c7c062e88..0dde553c3 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -38,14 +38,9 @@ steps: targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' fi - runtimeOsArgs= - if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then - runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}' - fi - - baseOsArgs= - if [ '${{ parameters.platform.baseOS }}' != '' ]; then - baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}' + baseRidArgs= + if [ '${{ parameters.platform.baseRID }}' != '' ]; then + baseRidArgs='/p:BaseRid=${{ parameters.platform.baseRID }}' fi portableBuildArgs= @@ -56,14 +51,12 @@ steps: ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \ --configuration $buildConfig \ --restore --build --pack -bl \ + --source-build \ ${{ parameters.platform.buildArguments }} \ $internalRuntimeDownloadArgs \ $targetRidArgs \ - $runtimeOsArgs \ - $baseOsArgs \ + $baseRidArgs \ $portableBuildArgs \ - /p:DotNetBuildSourceOnly=true \ - /p:DotNetBuildRepo=true \ displayName: Build - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml diff --git a/eng/common/templates/steps/vmr-sync.yml b/eng/common/templates/steps/vmr-sync.yml new file mode 100644 index 000000000..599afb618 --- /dev/null +++ b/eng/common/templates/steps/vmr-sync.yml @@ -0,0 +1,207 @@ +### These steps synchronize new code from product repositories into the VMR (https://github.com/dotnet/dotnet). +### They initialize the darc CLI and pull the new updates. +### Changes are applied locally onto the already cloned VMR (located in $vmrPath). + +parameters: +- name: targetRef + displayName: Target revision in dotnet/ to synchronize + type: string + default: $(Build.SourceVersion) + +- name: vmrPath + displayName: Path where the dotnet/dotnet is checked out to + type: string + default: $(Agent.BuildDirectory)/vmr + +- name: additionalSyncs + displayName: Optional list of package names whose repo's source will also be synchronized in the local VMR, e.g. NuGet.Protocol + type: object + default: [] + +steps: +- checkout: vmr + displayName: Clone dotnet/dotnet + path: vmr + clean: true + +- checkout: self + displayName: Clone $(Build.Repository.Name) + path: repo + fetchDepth: 0 + +# This step is needed so that when we get a detached HEAD / shallow clone, +# we still pull the commit into the temporary repo clone to use it during the sync. +# Also unshallow the clone so that forwardflow command would work. +- script: | + git branch repo-head + git rev-parse HEAD + displayName: Label PR commit + workingDirectory: $(Agent.BuildDirectory)/repo + +- script: | + vmr_sha=$(grep -oP '(?<=Sha=")[^"]*' $(Agent.BuildDirectory)/repo/eng/Version.Details.xml) + echo "##vso[task.setvariable variable=vmr_sha]$vmr_sha" + displayName: Obtain the vmr sha from Version.Details.xml (Unix) + condition: ne(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo + +- powershell: | + [xml]$xml = Get-Content -Path $(Agent.BuildDirectory)/repo/eng/Version.Details.xml + $vmr_sha = $xml.SelectSingleNode("//Source").Sha + Write-Output "##vso[task.setvariable variable=vmr_sha]$vmr_sha" + displayName: Obtain the vmr sha from Version.Details.xml (Windows) + condition: eq(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo + +- script: | + git fetch --all + git checkout $(vmr_sha) + displayName: Checkout VMR at correct sha for repo flow + workingDirectory: ${{ parameters.vmrPath }} + +- script: | + git config --global user.name "dotnet-maestro[bot]" + git config --global user.email "dotnet-maestro[bot]@users.noreply.github.com" + displayName: Set git author to dotnet-maestro[bot] + workingDirectory: ${{ parameters.vmrPath }} + +- script: | + ./eng/common/vmr-sync.sh \ + --vmr ${{ parameters.vmrPath }} \ + --tmp $(Agent.TempDirectory) \ + --azdev-pat '$(dn-bot-all-orgs-code-r)' \ + --ci \ + --debug + + if [ "$?" -ne 0 ]; then + echo "##vso[task.logissue type=error]Failed to synchronize the VMR" + exit 1 + fi + displayName: Sync repo into VMR (Unix) + condition: ne(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo + +- script: | + git config --global diff.astextplain.textconv echo + git config --system core.longpaths true + displayName: Configure Windows git (longpaths, astextplain) + condition: eq(variables['Agent.OS'], 'Windows_NT') + +- powershell: | + ./eng/common/vmr-sync.ps1 ` + -vmr ${{ parameters.vmrPath }} ` + -tmp $(Agent.TempDirectory) ` + -azdevPat '$(dn-bot-all-orgs-code-r)' ` + -ci ` + -debugOutput + + if ($LASTEXITCODE -ne 0) { + echo "##vso[task.logissue type=error]Failed to synchronize the VMR" + exit 1 + } + displayName: Sync repo into VMR (Windows) + condition: eq(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo + +- ${{ if eq(variables['Build.Reason'], 'PullRequest') }}: + - task: CopyFiles@2 + displayName: Collect failed patches + condition: failed() + inputs: + SourceFolder: '$(Agent.TempDirectory)' + Contents: '*.patch' + TargetFolder: '$(Build.ArtifactStagingDirectory)/FailedPatches' + + - publish: '$(Build.ArtifactStagingDirectory)/FailedPatches' + artifact: $(System.JobDisplayName)_FailedPatches + displayName: Upload failed patches + condition: failed() + +- ${{ each assetName in parameters.additionalSyncs }}: + # The vmr-sync script ends up staging files in the local VMR so we have to commit those + - script: + git commit --allow-empty -am "Forward-flow $(Build.Repository.Name)" + displayName: Commit local VMR changes + workingDirectory: ${{ parameters.vmrPath }} + + - script: | + set -ex + + echo "Searching for details of asset ${{ assetName }}..." + + # Use darc to get dependencies information + dependencies=$(./.dotnet/dotnet darc get-dependencies --name '${{ assetName }}' --ci) + + # Extract repository URL and commit hash + repository=$(echo "$dependencies" | grep 'Repo:' | sed 's/Repo:[[:space:]]*//' | head -1) + + if [ -z "$repository" ]; then + echo "##vso[task.logissue type=error]Asset ${{ assetName }} not found in the dependency list" + exit 1 + fi + + commit=$(echo "$dependencies" | grep 'Commit:' | sed 's/Commit:[[:space:]]*//' | head -1) + + echo "Updating the VMR from $repository / $commit..." + cd .. + git clone $repository ${{ assetName }} + cd ${{ assetName }} + git checkout $commit + git branch "sync/$commit" + + ./eng/common/vmr-sync.sh \ + --vmr ${{ parameters.vmrPath }} \ + --tmp $(Agent.TempDirectory) \ + --azdev-pat '$(dn-bot-all-orgs-code-r)' \ + --ci \ + --debug + + if [ "$?" -ne 0 ]; then + echo "##vso[task.logissue type=error]Failed to synchronize the VMR" + exit 1 + fi + displayName: Sync ${{ assetName }} into (Unix) + condition: ne(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo + + - powershell: | + $ErrorActionPreference = 'Stop' + + Write-Host "Searching for details of asset ${{ assetName }}..." + + $dependencies = .\.dotnet\dotnet darc get-dependencies --name '${{ assetName }}' --ci + + $repository = $dependencies | Select-String -Pattern 'Repo:\s+([^\s]+)' | Select-Object -First 1 + $repository -match 'Repo:\s+([^\s]+)' | Out-Null + $repository = $matches[1] + + if ($repository -eq $null) { + Write-Error "Asset ${{ assetName }} not found in the dependency list" + exit 1 + } + + $commit = $dependencies | Select-String -Pattern 'Commit:\s+([^\s]+)' | Select-Object -First 1 + $commit -match 'Commit:\s+([^\s]+)' | Out-Null + $commit = $matches[1] + + Write-Host "Updating the VMR from $repository / $commit..." + cd .. + git clone $repository ${{ assetName }} + cd ${{ assetName }} + git checkout $commit + git branch "sync/$commit" + + .\eng\common\vmr-sync.ps1 ` + -vmr ${{ parameters.vmrPath }} ` + -tmp $(Agent.TempDirectory) ` + -azdevPat '$(dn-bot-all-orgs-code-r)' ` + -ci ` + -debugOutput + + if ($LASTEXITCODE -ne 0) { + echo "##vso[task.logissue type=error]Failed to synchronize the VMR" + exit 1 + } + displayName: Sync ${{ assetName }} into (Windows) + condition: ne(variables['Agent.OS'], 'Windows_NT') + workingDirectory: $(Agent.BuildDirectory)/repo diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml new file mode 100644 index 000000000..670cf32c3 --- /dev/null +++ b/eng/common/templates/vmr-build-pr.yml @@ -0,0 +1,33 @@ +trigger: none +pr: + branches: + include: + - main + - release/* + paths: + exclude: + - documentation/* + - README.md + - CODEOWNERS + +variables: +- template: /eng/common/templates/variables/pool-providers.yml@self + +- name: skipComponentGovernanceDetection # we run CG on internal builds only + value: true + +- name: Codeql.Enabled # we run CodeQL on internal builds only + value: false + +resources: + repositories: + - repository: vmr + type: github + name: dotnet/dotnet + endpoint: dotnet + +stages: +- template: /eng/pipelines/templates/stages/vmr-build.yml@vmr + parameters: + isBuiltFromVmr: false + scope: lite diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 7373e5305..5f40a3f82 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -68,8 +68,6 @@ $ErrorActionPreference = 'Stop' # True if the build is a product build [bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false } -[String[]]$properties = if (Test-Path variable:properties) { $properties } else { @() } - function Create-Directory ([string[]] $path) { New-Item -Path $path -Force -ItemType 'Directory' | Out-Null } @@ -853,7 +851,7 @@ function MSBuild-Core() { # When running on Azure Pipelines, override the returned exit code to avoid double logging. # Skip this when the build is a child of the VMR orchestrator build. - if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild -and -not($properties -like "*DotNetBuildRepo=true*")) { + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild) { Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/common/tools.sh b/eng/common/tools.sh index d51f300c7..25f5932ee 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -5,6 +5,9 @@ # CI mode - set to true on CI server for PR validation build or official build. ci=${ci:-false} +# Build mode +source_build=${source_build:-false} + # Set to true to use the pipelines logger which will enable Azure logging output. # https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md # This flag is meant as a temporary opt-opt for the feature while validate it across @@ -58,7 +61,8 @@ use_installed_dotnet_cli=${use_installed_dotnet_cli:-true} dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'} # True to use global NuGet cache instead of restoring packages to repository-local directory. -if [[ "$ci" == true ]]; then +# Keep in sync with NuGetPackageroot in Arcade SDK's RepositoryLayout.props. +if [[ "$ci" == true || "$source_build" == true ]]; then use_global_nuget_cache=${use_global_nuget_cache:-false} else use_global_nuget_cache=${use_global_nuget_cache:-true} @@ -503,7 +507,7 @@ function MSBuild-Core { # When running on Azure Pipelines, override the returned exit code to avoid double logging. # Skip this when the build is a child of the VMR orchestrator build. - if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true && "$properties" != *"DotNetBuildRepo=true"* ]]; then + if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true ]]; then Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1 new file mode 100644 index 000000000..8c3c91ce8 --- /dev/null +++ b/eng/common/vmr-sync.ps1 @@ -0,0 +1,138 @@ +<# +.SYNOPSIS + +This script is used for synchronizing the current repository into a local VMR. +It pulls the current repository's code into the specified VMR directory for local testing or +Source-Build validation. + +.DESCRIPTION + +The tooling used for synchronization will clone the VMR repository into a temporary folder if +it does not already exist. These clones can be reused in future synchronizations, so it is +recommended to dedicate a folder for this to speed up re-runs. + +.EXAMPLE + Synchronize current repository into a local VMR: + ./vmr-sync.ps1 -vmrDir "$HOME/repos/dotnet" -tmpDir "$HOME/repos/tmp" + +.PARAMETER tmpDir +Required. Path to the temporary folder where repositories will be cloned + +.PARAMETER vmrBranch +Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch + +.PARAMETER azdevPat +Optional. Azure DevOps PAT to use for cloning private repositories. + +.PARAMETER vmrDir +Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder + +.PARAMETER debugOutput +Optional. Enables debug logging in the darc vmr command. + +.PARAMETER ci +Optional. Denotes that the script is running in a CI environment. +#> +param ( + [Parameter(Mandatory=$true, HelpMessage="Path to the temporary folder where repositories will be cloned")] + [string][Alias('t', 'tmp')]$tmpDir, + [string][Alias('b', 'branch')]$vmrBranch, + [string]$remote, + [string]$azdevPat, + [string][Alias('v', 'vmr')]$vmrDir, + [switch]$ci, + [switch]$debugOutput +) + +function Fail { + Write-Host "> $($args[0])" -ForegroundColor 'Red' +} + +function Highlight { + Write-Host "> $($args[0])" -ForegroundColor 'Cyan' +} + +$verbosity = 'verbose' +if ($debugOutput) { + $verbosity = 'debug' +} +# Validation + +if (-not $tmpDir) { + Fail "Missing -tmpDir argument. Please specify the path to the temporary folder where the repositories will be cloned" + exit 1 +} + +# Sanitize the input + +if (-not $vmrDir) { + $vmrDir = Join-Path $tmpDir 'dotnet' +} + +if (-not (Test-Path -Path $tmpDir -PathType Container)) { + New-Item -ItemType Directory -Path $tmpDir | Out-Null +} + +# Prepare the VMR + +if (-not (Test-Path -Path $vmrDir -PathType Container)) { + Highlight "Cloning 'dotnet/dotnet' into $vmrDir.." + git clone https://github.com/dotnet/dotnet $vmrDir + + if ($vmrBranch) { + git -C $vmrDir switch -c $vmrBranch + } +} +else { + if ((git -C $vmrDir diff --quiet) -eq $false) { + Fail "There are changes in the working tree of $vmrDir. Please commit or stash your changes" + exit 1 + } + + if ($vmrBranch) { + Highlight "Preparing $vmrDir" + git -C $vmrDir checkout $vmrBranch + git -C $vmrDir pull + } +} + +Set-StrictMode -Version Latest + +# Prepare darc + +Highlight 'Installing .NET, preparing the tooling..' +. .\eng\common\tools.ps1 +$dotnetRoot = InitializeDotNetCli -install:$true +$dotnet = "$dotnetRoot\dotnet.exe" +& "$dotnet" tool restore + +Highlight "Starting the synchronization of VMR.." + +# Synchronize the VMR +$darcArgs = ( + "darc", "vmr", "forwardflow", + "--tmp", $tmpDir, + "--$verbosity", + $vmrDir +) + +if ($ci) { + $darcArgs += ("--ci") +} + +if ($azdevPat) { + $darcArgs += ("--azdev-pat", $azdevPat) +} + +& "$dotnet" $darcArgs + +if ($LASTEXITCODE -eq 0) { + Highlight "Synchronization succeeded" +} +else { + Fail "Synchronization of repo to VMR failed!" + Fail "'$vmrDir' is left in its last state (re-run of this script will reset it)." + Fail "Please inspect the logs which contain path to the failing patch file (use -debugOutput to get all the details)." + Fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." + exit 1 +} diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh new file mode 100644 index 000000000..86d77ccf5 --- /dev/null +++ b/eng/common/vmr-sync.sh @@ -0,0 +1,205 @@ +#!/bin/bash + +### This script is used for synchronizing the current repository into a local VMR. +### It pulls the current repository's code into the specified VMR directory for local testing or +### Source-Build validation. +### +### The tooling used for synchronization will clone the VMR repository into a temporary folder if +### it does not already exist. These clones can be reused in future synchronizations, so it is +### recommended to dedicate a folder for this to speed up re-runs. +### +### USAGE: +### Synchronize current repository into a local VMR: +### ./vmr-sync.sh --tmp "$HOME/repos/tmp" "$HOME/repos/dotnet" +### +### Options: +### -t, --tmp, --tmp-dir PATH +### Required. Path to the temporary folder where repositories will be cloned +### +### -b, --branch, --vmr-branch BRANCH_NAME +### Optional. Branch of the 'dotnet/dotnet' repo to synchronize. The VMR will be checked out to this branch +### +### --debug +### Optional. Turns on the most verbose logging for the VMR tooling +### +### --remote name:URI +### Optional. Additional remote to use during the synchronization +### This can be used to synchronize to a commit from a fork of the repository +### Example: 'runtime:https://github.com/yourfork/runtime' +### +### --azdev-pat +### Optional. Azure DevOps PAT to use for cloning private repositories. +### +### -v, --vmr, --vmr-dir PATH +### Optional. Path to the dotnet/dotnet repository. When null, gets cloned to the temporary folder + +source="${BASH_SOURCE[0]}" + +# resolve $source until the file is no longer a symlink +while [[ -h "$source" ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +function print_help () { + sed -n '/^### /,/^$/p' "$source" | cut -b 5- +} + +COLOR_RED=$(tput setaf 1 2>/dev/null || true) +COLOR_CYAN=$(tput setaf 6 2>/dev/null || true) +COLOR_CLEAR=$(tput sgr0 2>/dev/null || true) +COLOR_RESET=uniquesearchablestring +FAILURE_PREFIX='> ' + +function fail () { + echo "${COLOR_RED}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_RED}}${COLOR_CLEAR}" >&2 +} + +function highlight () { + echo "${COLOR_CYAN}$FAILURE_PREFIX${1//${COLOR_RESET}/${COLOR_CYAN}}${COLOR_CLEAR}" +} + +tmp_dir='' +vmr_dir='' +vmr_branch='' +additional_remotes='' +verbosity=verbose +azdev_pat='' +ci=false + +while [[ $# -gt 0 ]]; do + opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" + case "$opt" in + -t|--tmp|--tmp-dir) + tmp_dir=$2 + shift + ;; + -v|--vmr|--vmr-dir) + vmr_dir=$2 + shift + ;; + -b|--branch|--vmr-branch) + vmr_branch=$2 + shift + ;; + --remote) + additional_remotes="$additional_remotes $2" + shift + ;; + --azdev-pat) + azdev_pat=$2 + shift + ;; + --ci) + ci=true + ;; + -d|--debug) + verbosity=debug + ;; + -h|--help) + print_help + exit 0 + ;; + *) + fail "Invalid argument: $1" + print_help + exit 1 + ;; + esac + + shift +done + +# Validation + +if [[ -z "$tmp_dir" ]]; then + fail "Missing --tmp-dir argument. Please specify the path to the temporary folder where the repositories will be cloned" + exit 1 +fi + +# Sanitize the input + +if [[ -z "$vmr_dir" ]]; then + vmr_dir="$tmp_dir/dotnet" +fi + +if [[ ! -d "$tmp_dir" ]]; then + mkdir -p "$tmp_dir" +fi + +if [[ "$verbosity" == "debug" ]]; then + set -x +fi + +# Prepare the VMR + +if [[ ! -d "$vmr_dir" ]]; then + highlight "Cloning 'dotnet/dotnet' into $vmr_dir.." + git clone https://github.com/dotnet/dotnet "$vmr_dir" + + if [[ -n "$vmr_branch" ]]; then + git -C "$vmr_dir" switch -c "$vmr_branch" + fi +else + if ! git -C "$vmr_dir" diff --quiet; then + fail "There are changes in the working tree of $vmr_dir. Please commit or stash your changes" + exit 1 + fi + + if [[ -n "$vmr_branch" ]]; then + highlight "Preparing $vmr_dir" + git -C "$vmr_dir" checkout "$vmr_branch" + git -C "$vmr_dir" pull + fi +fi + +set -e + +# Prepare darc + +highlight 'Installing .NET, preparing the tooling..' +source "./eng/common/tools.sh" +InitializeDotNetCli true +dotnetDir=$( cd ./.dotnet/; pwd -P ) +dotnet=$dotnetDir/dotnet +"$dotnet" tool restore + +highlight "Starting the synchronization of VMR.." +set +e + +if [[ -n "$additional_remotes" ]]; then + additional_remotes="--additional-remotes $additional_remotes" +fi + +if [[ -n "$azdev_pat" ]]; then + azdev_pat="--azdev-pat $azdev_pat" +fi + +ci_arg='' +if [[ "$ci" == "true" ]]; then + ci_arg="--ci" +fi + +# Synchronize the VMR + +"$dotnet" darc vmr forwardflow \ + --tmp "$tmp_dir" \ + $azdev_pat \ + --$verbosity \ + $ci_arg \ + $additional_remotes \ + "$vmr_dir" + +if [[ $? == 0 ]]; then + highlight "Synchronization succeeded" +else + fail "Synchronization of repo to VMR failed!" + fail "'$vmr_dir' is left in its last state (re-run of this script will reset it)." + fail "Please inspect the logs which contain path to the failing patch file (use --debug to get all the details)." + fail "Once you make changes to the conflicting VMR patch, commit it locally and re-run this script." + exit 1 +fi diff --git a/global.json b/global.json index 2a9d40f3a..166fba594 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25259.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25268.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From e303b3f374383265bca03fcc2df74ce36237e245 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 26 May 2025 05:02:09 +0000 Subject: [PATCH 18/43] Update dependencies from https://github.com/dotnet/arcade build 20250521.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25271.2 --- eng/Version.Details.xml | 4 +-- eng/common/build.ps1 | 2 +- eng/common/build.sh | 2 +- .../core-templates/post-build/post-build.yml | 3 -- .../steps/install-microbuild.yml | 35 ------------------- global.json | 2 +- 6 files changed, 5 insertions(+), 43 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 8bdb8c6dc..913c8ff45 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 35a34fa5ab9b2f97d3f7bdf48a7c2100727308b3 + 12d3a9f5d6138e22270694574e73e4c58a815795 https://github.com/dotnet/wpf diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index 6b3be1916..ae2309e31 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -127,7 +127,7 @@ function Build { /p:Deploy=$deploy ` /p:Test=$test ` /p:Pack=$pack ` - /p:DotNetBuildRepo=$productBuild ` + /p:DotNetBuild=$productBuild ` /p:IntegrationTest=$integrationTest ` /p:PerformanceTest=$performanceTest ` /p:Sign=$sign ` diff --git a/eng/common/build.sh b/eng/common/build.sh index 08f99154b..da906da20 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -241,7 +241,7 @@ function Build { /p:RepoRoot="$repo_root" \ /p:Restore=$restore \ /p:Build=$build \ - /p:DotNetBuildRepo=$product_build \ + /p:DotNetBuild=$product_build \ /p:DotNetBuildSourceOnly=$source_build \ /p:Rebuild=$rebuild \ /p:Test=$test \ diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index 5757915ed..a151fd811 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -193,9 +193,6 @@ stages: buildId: $(AzDOBuildId) artifactName: PackageArtifacts checkDownloadedFiles: true - itemPattern: | - ** - !**/Microsoft.SourceBuild.Intermediate.*.nupkg # This is necessary whenever we want to publish/restore to an AzDO private feed # Since sdk-task.ps1 tries to restore packages we need to do this authentication here diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index 2bcf974ee..a3540ba00 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -48,38 +48,3 @@ steps: eq(variables['_SignType'], 'real') ) )) - - # Workaround for ESRP CLI on Linux - https://github.com/dotnet/source-build/issues/4964 - - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - - task: UseDotNet@2 - displayName: Install .NET 9.0 SDK for ESRP CLI Workaround - inputs: - packageType: sdk - version: 9.0.x - installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet - workingDirectory: ${{ parameters.microBuildOutputFolder }} - condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) - - - task: PowerShell@2 - displayName: Workaround for ESRP CLI on Linux - inputs: - targetType: 'inline' - script: | - Write-Host "Copying Linux Path" - $MBSIGN_APPFOLDER = '$(MBSIGN_APPFOLDER)' - $MBSIGN_APPFOLDER = ($MBSIGN_APPFOLDER -replace '/build', '') - - $versionRegex = '\d+\.\d+\.\d+' - $package = Get-ChildItem -Path $MBSIGN_APPFOLDER -Directory | - Where-Object { $_.Name -match $versionRegex } - - if ($package.Count -ne 1) { - Write-Host "There should be exactly one matching subfolder, but found $($package.Count)." - exit 1 - } - - $MBSIGN_APPFOLDER = $package[0].FullName + '/build' - $MBSIGN_APPFOLDER | Write-Host - $SignConfigPath = $MBSIGN_APPFOLDER + '/signconfig.xml' - Copy-Item -Path "$(MBSIGN_APPFOLDER)/signconfig.xml" -Destination $SignConfigPath -Force - condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux')) diff --git a/global.json b/global.json index 166fba594..9ca490cba 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25268.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25271.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 7cadbaf8443963420922c7f015178f9753651857 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 2 Jun 2025 05:01:36 +0000 Subject: [PATCH 19/43] Update dependencies from https://github.com/dotnet/arcade build 20250529.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25279.3 --- eng/Version.Details.xml | 4 +- eng/common/build.ps1 | 3 ++ eng/common/build.sh | 14 ++++-- .../job/publish-build-assets.yml | 49 ++++++++++++++++--- eng/common/tools.ps1 | 9 ++-- eng/common/tools.sh | 11 ++--- eng/common/vmr-sync.ps1 | 6 +-- eng/common/vmr-sync.sh | 6 ++- global.json | 4 +- 9 files changed, 75 insertions(+), 31 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 913c8ff45..1e376ff3b 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 12d3a9f5d6138e22270694574e73e4c58a815795 + 69b86684b43321e96f35fa3ea34c5315836ff858 https://github.com/dotnet/wpf diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index ae2309e31..8cfee107e 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -21,6 +21,7 @@ Param( [switch] $publish, [switch] $clean, [switch][Alias('pb')]$productBuild, + [switch]$fromVMR, [switch][Alias('bl')]$binaryLog, [switch][Alias('nobl')]$excludeCIBinarylog, [switch] $ci, @@ -74,6 +75,7 @@ function Print-Usage() { Write-Host " -nativeToolsOnMachine Sets the native tools on machine environment variable (indicating that the script should use native tools on machine)" Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" Write-Host " -buildCheck Sets /check msbuild parameter" + Write-Host " -fromVMR Set when building from within the VMR" Write-Host "" Write-Host "Command line arguments not listed above are passed thru to msbuild." @@ -128,6 +130,7 @@ function Build { /p:Test=$test ` /p:Pack=$pack ` /p:DotNetBuild=$productBuild ` + /p:DotNetBuildFromVMR=$fromVMR ` /p:IntegrationTest=$integrationTest ` /p:PerformanceTest=$performanceTest ` /p:Sign=$sign ` diff --git a/eng/common/build.sh b/eng/common/build.sh index da906da20..9767bb411 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -43,6 +43,7 @@ usage() echo " --nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" echo " --buildCheck Sets /check msbuild parameter" + echo " --fromVMR Set when building from within the VMR" echo "" echo "Command line arguments not listed above are passed thru to msbuild." echo "Arguments can also be passed in with a single hyphen." @@ -64,6 +65,7 @@ restore=false build=false source_build=false product_build=false +from_vmr=false rebuild=false test=false integration_test=false @@ -89,7 +91,7 @@ verbosity='minimal' runtime_source_feed='' runtime_source_feed_key='' -properties='' +properties=() while [[ $# > 0 ]]; do opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in @@ -142,6 +144,9 @@ while [[ $# > 0 ]]; do restore=true pack=true ;; + -fromvmr|-from-vmr) + from_vmr=true + ;; -test|-t) test=true ;; @@ -187,7 +192,7 @@ while [[ $# > 0 ]]; do shift ;; *) - properties="$properties $1" + properties+=("$1") ;; esac @@ -221,7 +226,7 @@ function Build { InitializeCustomToolset if [[ ! -z "$projects" ]]; then - properties="$properties /p:Projects=$projects" + properties+=("/p:Projects=$projects") fi local bl="" @@ -243,6 +248,7 @@ function Build { /p:Build=$build \ /p:DotNetBuild=$product_build \ /p:DotNetBuildSourceOnly=$source_build \ + /p:DotNetBuildFromVMR=$from_vmr \ /p:Rebuild=$rebuild \ /p:Test=$test \ /p:Pack=$pack \ @@ -251,7 +257,7 @@ function Build { /p:Sign=$sign \ /p:Publish=$publish \ /p:RestoreStaticGraphEnableBinaryLogger=$binary_log \ - $properties + ${properties[@]+"${properties[@]}"} ExitWithExitCode 0 } diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 4f1dc42e0..d5303229c 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -32,6 +32,12 @@ parameters: # Optional: 🌤️ or not the build has assets it wants to publish to BAR isAssetlessBuild: false + # Optional, publishing version + publishingVersion: 3 + + # Optional: A minimatch pattern for the asset manifests to publish to BAR + assetManifestsPattern: '*/manifests/**/*.xml' + jobs: - job: Asset_Registry_Publish @@ -77,13 +83,31 @@ jobs: clean: true - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: - - task: DownloadPipelineArtifact@2 - displayName: Download Asset Manifests - inputs: - artifactName: AssetManifests - targetPath: '$(Build.StagingDirectory)/AssetManifests' - condition: ${{ parameters.condition }} - continueOnError: ${{ parameters.continueOnError }} + - ${{ if eq(parameters.publishingVersion, 3) }}: + - task: DownloadPipelineArtifact@2 + displayName: Download Asset Manifests + inputs: + artifactName: AssetManifests + targetPath: '$(Build.StagingDirectory)/AssetManifests' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - ${{ if eq(parameters.publishingVersion, 4) }}: + - task: DownloadPipelineArtifact@2 + displayName: Download V4 asset manifests + inputs: + itemPattern: '*/manifests/**/*.xml' + targetPath: '$(Build.StagingDirectory)/AllAssetManifests' + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} + - task: CopyFiles@2 + displayName: Copy V4 asset manifests to AssetManifests + inputs: + SourceFolder: '$(Build.StagingDirectory)/AllAssetManifests' + Contents: ${{ parameters.assetManifestsPattern }} + TargetFolder: '$(Build.StagingDirectory)/AssetManifests' + flattenFolders: true + condition: ${{ parameters.condition }} + continueOnError: ${{ parameters.continueOnError }} - task: NuGetAuthenticate@1 @@ -120,6 +144,17 @@ jobs: Copy-Item -Path $symbolExclusionfile -Destination "$(Build.StagingDirectory)/ReleaseConfigs" } + - ${{ if eq(parameters.publishingVersion, 4) }}: + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + args: + targetPath: '$(Build.ArtifactStagingDirectory)/MergedManifest.xml' + artifactName: AssetManifests + displayName: 'Publish Merged Manifest' + retryCountOnTaskFailure: 10 # for any logs being locked + sbomEnabled: false # we don't need SBOM for logs + - template: /eng/common/core-templates/steps/publish-build-artifacts.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 5f40a3f82..c9e39595b 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -65,8 +65,8 @@ $ErrorActionPreference = 'Stop' # Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed [string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null } -# True if the build is a product build -[bool]$productBuild = if (Test-Path variable:productBuild) { $productBuild } else { $false } +# True when the build is running within the VMR. +[bool]$fromVMR = if (Test-Path variable:fromVMR) { $fromVMR } else { $false } function Create-Directory ([string[]] $path) { New-Item -Path $path -Force -ItemType 'Directory' | Out-Null @@ -644,7 +644,6 @@ function GetNuGetPackageCachePath() { $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\' } else { $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\' - $env:RESTORENOHTTPCACHE = $true } } @@ -850,8 +849,8 @@ function MSBuild-Core() { } # When running on Azure Pipelines, override the returned exit code to avoid double logging. - # Skip this when the build is a child of the VMR orchestrator build. - if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$productBuild) { + # Skip this when the build is a child of the VMR build. + if ($ci -and $env:SYSTEM_TEAMPROJECT -ne $null -and !$fromVMR) { Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 25f5932ee..a7e849315 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -72,8 +72,8 @@ fi runtime_source_feed=${runtime_source_feed:-''} runtime_source_feed_key=${runtime_source_feed_key:-''} -# True if the build is a product build -product_build=${product_build:-false} +# True when the build is running within the VMR. +from_vmr=${from_vmr:-false} # Resolve any symlinks in the given path. function ResolvePath { @@ -345,14 +345,12 @@ function InitializeBuildTool { _InitializeBuildToolCommand="msbuild" } -# Set RestoreNoHttpCache as a workaround for https://github.com/NuGet/Home/issues/3116 function GetNuGetPackageCachePath { if [[ -z ${NUGET_PACKAGES:-} ]]; then if [[ "$use_global_nuget_cache" == true ]]; then export NUGET_PACKAGES="$HOME/.nuget/packages/" else export NUGET_PACKAGES="$repo_root/.packages/" - export RESTORENOHTTPCACHE=true fi fi @@ -506,8 +504,8 @@ function MSBuild-Core { echo "Build failed with exit code $exit_code. Check errors above." # When running on Azure Pipelines, override the returned exit code to avoid double logging. - # Skip this when the build is a child of the VMR orchestrator build. - if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$product_build" != true ]]; then + # Skip this when the build is a child of the VMR build. + if [[ "$ci" == true && -n ${SYSTEM_TEAMPROJECT:-} && "$from_vmr" != true ]]; then Write-PipelineSetResult -result "Failed" -message "msbuild execution failed." # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error @@ -530,6 +528,7 @@ function GetDarc { fi "$eng_root/common/darc-init.sh" --toolpath "$darc_path" $version + darc_tool="$darc_path/darc" } # Returns a full path to an Arcade SDK task project file. diff --git a/eng/common/vmr-sync.ps1 b/eng/common/vmr-sync.ps1 index 8c3c91ce8..97302f320 100644 --- a/eng/common/vmr-sync.ps1 +++ b/eng/common/vmr-sync.ps1 @@ -103,14 +103,14 @@ Set-StrictMode -Version Latest Highlight 'Installing .NET, preparing the tooling..' . .\eng\common\tools.ps1 $dotnetRoot = InitializeDotNetCli -install:$true +$darc = Get-Darc $dotnet = "$dotnetRoot\dotnet.exe" -& "$dotnet" tool restore Highlight "Starting the synchronization of VMR.." # Synchronize the VMR $darcArgs = ( - "darc", "vmr", "forwardflow", + "vmr", "forwardflow", "--tmp", $tmpDir, "--$verbosity", $vmrDir @@ -124,7 +124,7 @@ if ($azdevPat) { $darcArgs += ("--azdev-pat", $azdevPat) } -& "$dotnet" $darcArgs +& "$darc" $darcArgs if ($LASTEXITCODE -eq 0) { Highlight "Synchronization succeeded" diff --git a/eng/common/vmr-sync.sh b/eng/common/vmr-sync.sh index 86d77ccf5..44239e331 100644 --- a/eng/common/vmr-sync.sh +++ b/eng/common/vmr-sync.sh @@ -164,9 +164,9 @@ set -e highlight 'Installing .NET, preparing the tooling..' source "./eng/common/tools.sh" InitializeDotNetCli true +GetDarc dotnetDir=$( cd ./.dotnet/; pwd -P ) dotnet=$dotnetDir/dotnet -"$dotnet" tool restore highlight "Starting the synchronization of VMR.." set +e @@ -186,7 +186,9 @@ fi # Synchronize the VMR -"$dotnet" darc vmr forwardflow \ +export DOTNET_ROOT="$dotnetDir" + +"$darc_tool" vmr forwardflow \ --tmp "$tmp_dir" \ $azdev_pat \ --$verbosity \ diff --git a/global.json b/global.json index 9ca490cba..9ef4fe0ec 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.3.25201.16", + "dotnet": "10.0.100-preview.5.25265.106", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25271.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25279.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 7c59c0743cb1ca385fa812eaf9e88728315074d8 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 9 Jun 2025 05:02:14 +0000 Subject: [PATCH 20/43] Update dependencies from https://github.com/dotnet/arcade build 20250605.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25305.3 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/onelocbuild.yml | 5 ++--- global.json | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 1e376ff3b..b8c5ec466 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 69b86684b43321e96f35fa3ea34c5315836ff858 + aba421eb78276b26d1a24df7772474806b27aa13 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml index 00feec8eb..8034815f4 100644 --- a/eng/common/core-templates/job/onelocbuild.yml +++ b/eng/common/core-templates/job/onelocbuild.yml @@ -86,8 +86,7 @@ jobs: isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }} ${{ if eq(parameters.CreatePr, true) }}: isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }} - ${{ if eq(parameters.RepoType, 'gitHub') }}: - isShouldReusePrSelected: ${{ parameters.ReusePr }} + isShouldReusePrSelected: ${{ parameters.ReusePr }} packageSourceAuth: patAuth patVariable: ${{ parameters.CeapexPat }} ${{ if eq(parameters.RepoType, 'gitHub') }}: @@ -118,4 +117,4 @@ jobs: pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' publishLocation: Container artifactName: Loc - condition: ${{ parameters.condition }} \ No newline at end of file + condition: ${{ parameters.condition }} diff --git a/global.json b/global.json index 9ef4fe0ec..8774bebcd 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25279.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25305.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From fddb63a321d37cebeb462145e2e2b5332fba9692 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 16 Jun 2025 05:01:41 +0000 Subject: [PATCH 21/43] Update dependencies from https://github.com/dotnet/arcade build 20250613.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25313.2 --- eng/Version.Details.xml | 4 +-- .../steps/source-index-stage1-publish.yml | 6 ++--- eng/common/cross/build-rootfs.sh | 4 +-- eng/common/dotnet.cmd | 7 +++++ eng/common/dotnet.ps1 | 11 ++++++++ eng/common/dotnet.sh | 26 +++++++++++++++++++ eng/common/templates/vmr-build-pr.yml | 11 +------- eng/common/tools.ps1 | 23 +++------------- eng/common/tools.sh | 22 +++------------- global.json | 4 +-- 10 files changed, 62 insertions(+), 56 deletions(-) create mode 100644 eng/common/dotnet.cmd create mode 100644 eng/common/dotnet.ps1 create mode 100644 eng/common/dotnet.sh diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index b8c5ec466..1cc8aab47 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - aba421eb78276b26d1a24df7772474806b27aa13 + 7a09c1144283309440a7966c664199944198c920 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index 99c2326fc..c2917c1ef 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -1,15 +1,15 @@ parameters: sourceIndexUploadPackageVersion: 2.0.0-20250425.2 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20250425.2 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20250515.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json binlogPath: artifacts/log/Debug/Build.binlog steps: - task: UseDotNet@2 - displayName: "Source Index: Use .NET 8 SDK" + displayName: "Source Index: Use .NET 9 SDK" inputs: packageType: sdk - version: 8.0.x + version: 9.0.x installationPath: $(Agent.TempDirectory)/dotnet workingDirectory: $(Agent.TempDirectory) diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index d6f005b5d..8abfb71f7 100644 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -295,8 +295,8 @@ while :; do ;; noble) # Ubuntu 24.04 __CodeName=noble - if [[ -n "$__LLDB_Package" ]]; then - __LLDB_Package="liblldb-18-dev" + if [[ -z "$__LLDB_Package" ]]; then + __LLDB_Package="liblldb-19-dev" fi ;; stretch) # Debian 9 diff --git a/eng/common/dotnet.cmd b/eng/common/dotnet.cmd new file mode 100644 index 000000000..527fa4bb3 --- /dev/null +++ b/eng/common/dotnet.cmd @@ -0,0 +1,7 @@ +@echo off + +:: This script is used to install the .NET SDK. +:: It will also invoke the SDK with any provided arguments. + +powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet.ps1""" %*" +exit /b %ErrorLevel% diff --git a/eng/common/dotnet.ps1 b/eng/common/dotnet.ps1 new file mode 100644 index 000000000..45e5676c9 --- /dev/null +++ b/eng/common/dotnet.ps1 @@ -0,0 +1,11 @@ +# This script is used to install the .NET SDK. +# It will also invoke the SDK with any provided arguments. + +. $PSScriptRoot\tools.ps1 +$dotnetRoot = InitializeDotNetCli -install:$true + +# Invoke acquired SDK with args if they are provided +if ($args.count -gt 0) { + $env:DOTNET_NOLOGO=1 + & "$dotnetRoot\dotnet.exe" $args +} diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh new file mode 100644 index 000000000..2ef682356 --- /dev/null +++ b/eng/common/dotnet.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +# This script is used to install the .NET SDK. +# It will also invoke the SDK with any provided arguments. + +source="${BASH_SOURCE[0]}" +# resolve $SOURCE until the file is no longer a symlink +while [[ -h $source ]]; do + scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + source="$(readlink "$source")" + + # if $source was a relative symlink, we need to resolve it relative to the path where the + # symlink file was located + [[ $source != /* ]] && source="$scriptroot/$source" +done +scriptroot="$( cd -P "$( dirname "$source" )" && pwd )" + +source $scriptroot/tools.sh +InitializeDotNetCli true # install + +# Invoke acquired SDK with args if they are provided +if [[ $# > 0 ]]; then + __dotnetDir=${_InitializeDotNetCli} + dotnetPath=${__dotnetDir}/dotnet + ${dotnetPath} "$@" +fi diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml index 670cf32c3..c1b6fec45 100644 --- a/eng/common/templates/vmr-build-pr.yml +++ b/eng/common/templates/vmr-build-pr.yml @@ -1,14 +1,5 @@ trigger: none -pr: - branches: - include: - - main - - release/* - paths: - exclude: - - documentation/* - - README.md - - CODEOWNERS +pr: none variables: - template: /eng/common/templates/variables/pool-providers.yml@self diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index c9e39595b..40f0aa861 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -765,28 +765,13 @@ function MSBuild() { $toolsetBuildProject = InitializeToolset $basePath = Split-Path -parent $toolsetBuildProject - $possiblePaths = @( - # new scripts need to work with old packages, so we need to look for the old names/versions - (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')), - (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')), - - # This list doesn't need to be updated anymore and can eventually be removed. - (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.ArcadeLogging.dll')), - (Join-Path $basePath (Join-Path net9.0 'Microsoft.DotNet.Arcade.Sdk.dll')), - (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.ArcadeLogging.dll')), - (Join-Path $basePath (Join-Path net8.0 'Microsoft.DotNet.Arcade.Sdk.dll')) - ) - $selectedPath = $null - foreach ($path in $possiblePaths) { - if (Test-Path $path -PathType Leaf) { - $selectedPath = $path - break - } - } + $selectedPath = Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll') + if (-not $selectedPath) { - Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.' + Write-PipelineTelemetryError -Category 'Build' -Message "Unable to find arcade sdk logger assembly: $selectedPath" ExitWithExitCode 1 } + $args += "/logger:$selectedPath" } diff --git a/eng/common/tools.sh b/eng/common/tools.sh index a7e849315..3def02a63 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -447,27 +447,13 @@ function MSBuild { fi local toolset_dir="${_InitializeToolset%/*}" - # new scripts need to work with old packages, so we need to look for the old names/versions - local selectedPath= - local possiblePaths=() - possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/net/Microsoft.DotNet.Arcade.Sdk.dll" ) - - # This list doesn't need to be updated anymore and can eventually be removed. - possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/net9.0/Microsoft.DotNet.Arcade.Sdk.dll" ) - possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.ArcadeLogging.dll" ) - possiblePaths+=( "$toolset_dir/net8.0/Microsoft.DotNet.Arcade.Sdk.dll" ) - for path in "${possiblePaths[@]}"; do - if [[ -f $path ]]; then - selectedPath=$path - break - fi - done + local selectedPath="$toolset_dir/net/Microsoft.DotNet.ArcadeLogging.dll" + if [[ -z "$selectedPath" ]]; then - Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly." + Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly: $selectedPath" ExitWithExitCode 1 fi + args+=( "-logger:$selectedPath" ) fi diff --git a/global.json b/global.json index 8774bebcd..2443b3ec9 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.5.25265.106", + "dotnet": "10.0.100-preview.6.25302.104", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25305.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25313.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 769dd9e4cf07dcc99f91ac8df138d1456e7a0f7e Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 23 Jun 2025 05:01:52 +0000 Subject: [PATCH 22/43] Update dependencies from https://github.com/dotnet/arcade build 20250620.5 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25320.5 --- eng/Version.Details.xml | 4 ++-- .../steps/install-microbuild.yml | 4 ++++ .../core-templates/steps/source-build.yml | 6 ------ eng/common/templates/vmr-build-pr.yml | 18 ++++++++++++++++++ global.json | 4 ++-- 5 files changed, 26 insertions(+), 10 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 1cc8aab47..6635a262f 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 7a09c1144283309440a7966c664199944198c920 + 066f0d1e5e1a59ce611e82f4a1146239d6253bd7 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index a3540ba00..f3064a783 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -30,6 +30,10 @@ steps: ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: azureSubscription: 'MicroBuild Signing Task (DevDiv)' useEsrpCli: true + ${{ elseif eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea + ${{ else }}: + ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca env: TeamName: $(_TeamName) MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index 0dde553c3..acf16ed34 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -38,11 +38,6 @@ steps: targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}' fi - baseRidArgs= - if [ '${{ parameters.platform.baseRID }}' != '' ]; then - baseRidArgs='/p:BaseRid=${{ parameters.platform.baseRID }}' - fi - portableBuildArgs= if [ '${{ parameters.platform.portableBuild }}' != '' ]; then portableBuildArgs='/p:PortableBuild=${{ parameters.platform.portableBuild }}' @@ -55,7 +50,6 @@ steps: ${{ parameters.platform.buildArguments }} \ $internalRuntimeDownloadArgs \ $targetRidArgs \ - $baseRidArgs \ $portableBuildArgs \ displayName: Build diff --git a/eng/common/templates/vmr-build-pr.yml b/eng/common/templates/vmr-build-pr.yml index c1b6fec45..ce3c29a62 100644 --- a/eng/common/templates/vmr-build-pr.yml +++ b/eng/common/templates/vmr-build-pr.yml @@ -1,3 +1,21 @@ +# This pipeline is used for running the VMR verification of the PR changes in repo-level PRs. +# +# It will run a full set of verification jobs defined in: +# https://github.com/dotnet/dotnet/blob/10060d128e3f470e77265f8490f5e4f72dae738e/eng/pipelines/templates/stages/vmr-build.yml#L27-L38 +# +# For repos that do not need to run the full set, you would do the following: +# +# 1. Copy this YML file to a repo-specific location, i.e. outside of eng/common. +# +# 2. Add `verifications` parameter to VMR template reference +# +# Examples: +# - For source-build stage 1 verification, add the following: +# verifications: [ "source-build-stage1" ] +# +# - For Windows only verifications, add the following: +# verifications: [ "unified-build-windows-x64", "unified-build-windows-x86" ] + trigger: none pr: none diff --git a/global.json b/global.json index 2443b3ec9..af0aa9183 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.6.25302.104", + "dotnet": "10.0.100-preview.6.25315.102", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25313.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25320.5", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 5237cd44bba5d87a78951f95fb705caaabb6e769 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 30 Jun 2025 05:02:02 +0000 Subject: [PATCH 23/43] Update dependencies from https://github.com/dotnet/arcade build 20250626.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25326.3 --- eng/Version.Details.xml | 4 ++-- eng/common/internal/NuGet.config | 3 +++ global.json | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 6635a262f..98cea7306 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 066f0d1e5e1a59ce611e82f4a1146239d6253bd7 + 0e335649fe2d2f98ea51e55cc1a0899af3617eba https://github.com/dotnet/wpf diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config index 19d3d311b..f70261ed6 100644 --- a/eng/common/internal/NuGet.config +++ b/eng/common/internal/NuGet.config @@ -4,4 +4,7 @@ + + + diff --git a/global.json b/global.json index af0aa9183..f568d73e2 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25320.5", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25326.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From e22aefcf7bb27a9a04ec0e64ca8dadf93da5a637 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 7 Jul 2025 05:02:08 +0000 Subject: [PATCH 24/43] Update dependencies from https://github.com/dotnet/arcade build 20250703.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25353.1 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 98cea7306..18522feed 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 0e335649fe2d2f98ea51e55cc1a0899af3617eba + aea743edf7c9345cfdbdf9593756973baadc6b37 https://github.com/dotnet/wpf diff --git a/global.json b/global.json index f568d73e2..b69a28742 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25326.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25353.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 02f41d5e119d7ce716d41125afec6ff6afbf3cff Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 14 Jul 2025 05:02:09 +0000 Subject: [PATCH 25/43] Update dependencies from https://github.com/dotnet/arcade build 20250708.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25358.3 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 18522feed..a48e94654 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - aea743edf7c9345cfdbdf9593756973baadc6b37 + 4e526204e83e615efe8eb5743be7fbccfa4e492a https://github.com/dotnet/wpf diff --git a/global.json b/global.json index b69a28742..8f81104c3 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25353.1", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25358.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From d9776ee8413b06a376bf7ed639ca9acecc673552 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 21 Jul 2025 05:02:48 +0000 Subject: [PATCH 26/43] Update dependencies from https://github.com/dotnet/arcade build 20250717.5 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25367.5 --- eng/Version.Details.xml | 4 ++-- eng/common/tools.ps1 | 2 +- global.json | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index a48e94654..93a83cd57 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 4e526204e83e615efe8eb5743be7fbccfa4e492a + d777c20040bdc2e52b372fa98dcb84141ed692d3 https://github.com/dotnet/wpf diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 40f0aa861..996a5f9c8 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -414,7 +414,7 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # Locate Visual Studio installation or download x-copy msbuild. $vsInfo = LocateVisualStudio $vsRequirements - if ($vsInfo -ne $null) { + if ($vsInfo -ne $null -and $env:ForceUseXCopyMSBuild -eq $null) { # Ensure vsInstallDir has a trailing slash $vsInstallDir = Join-Path $vsInfo.installationPath "\" $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0] diff --git a/global.json b/global.json index 8f81104c3..4a6ffa34b 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.6.25315.102", + "dotnet": "10.0.100-preview.7.25322.101", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25358.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25367.5", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 835442666256d98a879a182c07949f42b27ddbc0 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 28 Jul 2025 05:02:31 +0000 Subject: [PATCH 27/43] Update dependencies from https://github.com/dotnet/arcade build 20250724.4 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25374.4 --- eng/Version.Details.xml | 4 +- eng/common/core-templates/job/job.yml | 2 + eng/common/core-templates/jobs/jobs.yml | 3 -- .../core-templates/jobs/source-build.yml | 16 -------- .../steps/install-microbuild.yml | 40 +++++++++++++++---- global.json | 2 +- 6 files changed, 38 insertions(+), 29 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 93a83cd57..4c9672faf 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - d777c20040bdc2e52b372fa98dcb84141ed692d3 + e2fed65f9c524d12c64876194ae4ce177b935bb3 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index 6badecba7..d90132515 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -20,6 +20,7 @@ parameters: artifacts: '' enableMicrobuild: false enableMicrobuildForMacAndLinux: false + microbuildUseESRP: true enablePublishBuildArtifacts: false enablePublishBuildAssets: false enablePublishTestResults: false @@ -128,6 +129,7 @@ jobs: parameters: enableMicrobuild: ${{ parameters.enableMicrobuild }} enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} + microbuildUseESRP: ${{ parameters.microbuildUseESRP }} continueOnError: ${{ parameters.continueOnError }} - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}: diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index bf35b78fa..2f992b2c6 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -83,7 +83,6 @@ jobs: - template: /eng/common/core-templates/jobs/source-build.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} - allCompletedJobId: Source_Build_Complete ${{ each parameter in parameters.sourceBuildParameters }}: ${{ parameter.key }}: ${{ parameter.value }} @@ -108,8 +107,6 @@ jobs: - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}: - ${{ each job in parameters.jobs }}: - ${{ job.job }} - - ${{ if eq(parameters.enableSourceBuild, true) }}: - - Source_Build_Complete runAsPublic: ${{ parameters.runAsPublic }} publishAssetsImmediately: ${{ or(parameters.publishAssetsImmediately, parameters.isAssetlessBuild) }} diff --git a/eng/common/core-templates/jobs/source-build.yml b/eng/common/core-templates/jobs/source-build.yml index df24c948b..d92860cba 100644 --- a/eng/common/core-templates/jobs/source-build.yml +++ b/eng/common/core-templates/jobs/source-build.yml @@ -2,12 +2,6 @@ parameters: # This template adds arcade-powered source-build to CI. A job is created for each platform, as # well as an optional server job that completes when all platform jobs complete. - # The name of the "join" job for all source-build platforms. If set to empty string, the job is - # not included. Existing repo pipelines can use this job depend on all source-build jobs - # completing without maintaining a separate list of every single job ID: just depend on this one - # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'. - allCompletedJobId: '' - # See /eng/common/core-templates/job/source-build.yml jobNamePrefix: 'Source_Build' @@ -31,16 +25,6 @@ parameters: jobs: -- ${{ if ne(parameters.allCompletedJobId, '') }}: - - job: ${{ parameters.allCompletedJobId }} - displayName: Source-Build Complete - pool: server - dependsOn: - - ${{ each platform in parameters.platforms }}: - - ${{ parameters.jobNamePrefix }}_${{ platform.name }} - - ${{ if eq(length(parameters.platforms), 0) }}: - - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }} - - ${{ each platform in parameters.platforms }}: - template: /eng/common/core-templates/job/source-build.yml parameters: diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index f3064a783..da30e67bc 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -4,8 +4,16 @@ parameters: # Enable install tasks for MicroBuild on Mac and Linux # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' enableMicrobuildForMacAndLinux: false + # Determines whether the ESRP service connection information should be passed to the signing plugin. + # This overlaps with _SignType to some degree. We only need the service connection for real signing. + # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place. + # Doing so will cause the service connection to be authorized for the pipeline, which isn't allowed and won't work for non-prod. + # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The + # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough. + microbuildUseESRP: true # Location of the MicroBuild output folder microBuildOutputFolder: '$(Build.SourcesDirectory)' + continueOnError: false steps: @@ -21,19 +29,37 @@ steps: workingDirectory: ${{ parameters.microBuildOutputFolder }} condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + - script: | + REM Check if ESRP is disabled while SignType is real + if /I "${{ parameters.microbuildUseESRP }}"=="false" if /I "$(_SignType)"=="real" ( + echo Error: ESRP must be enabled when SignType is real. + exit /b 1 + ) + displayName: 'Validate ESRP usage (Windows)' + condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT')) + - script: | + # Check if ESRP is disabled while SignType is real + if [ "${{ parameters.microbuildUseESRP }}" = "false" ] && [ "$(_SignType)" = "real" ]; then + echo "Error: ESRP must be enabled when SignType is real." + exit 1 + fi + displayName: 'Validate ESRP usage (Non-Windows)' + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + - task: MicroBuildSigningPlugin@4 displayName: Install MicroBuild plugin inputs: signType: $(_SignType) zipSources: false feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}: - azureSubscription: 'MicroBuild Signing Task (DevDiv)' - useEsrpCli: true - ${{ elseif eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea - ${{ else }}: - ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca + ${{ if eq(parameters.microbuildUseESRP, true) }}: + ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: + azureSubscription: 'MicroBuild Signing Task (DevDiv)' + useEsrpCli: true + ${{ elseif eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea + ${{ else }}: + ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca env: TeamName: $(_TeamName) MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} diff --git a/global.json b/global.json index 4a6ffa34b..cfcf78381 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25367.5", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25374.4", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From a22640e0cc19d2da0b4cf450c32b820145721028 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 4 Aug 2025 05:02:08 +0000 Subject: [PATCH 28/43] Update dependencies from https://github.com/dotnet/arcade build 20250801.2 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25401.2 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 4c9672faf..0c30c620a 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - e2fed65f9c524d12c64876194ae4ce177b935bb3 + 40693ae2ee51e447f6ca96d07bc1ba779dcb9b9c https://github.com/dotnet/wpf diff --git a/global.json b/global.json index cfcf78381..476998b8f 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25374.4", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25401.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From d154d2736d2f73dadce162132bd28f2742513d60 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 11 Aug 2025 05:01:54 +0000 Subject: [PATCH 29/43] Update dependencies from https://github.com/dotnet/arcade build 20250808.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25408.3 --- eng/Version.Details.xml | 4 +-- eng/common/core-templates/job/onelocbuild.yml | 30 +++++++++---------- eng/common/tools.ps1 | 13 ++++++++ eng/common/tools.sh | 24 +++++++++++++++ global.json | 4 +-- 5 files changed, 55 insertions(+), 20 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0c30c620a..9b1e2356c 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 40693ae2ee51e447f6ca96d07bc1ba779dcb9b9c + d27184f7cb92b4abb4b20e91ecb5c43bc43de65b https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml index 8034815f4..8bf7d2335 100644 --- a/eng/common/core-templates/job/onelocbuild.yml +++ b/eng/common/core-templates/job/onelocbuild.yml @@ -4,7 +4,7 @@ parameters: # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool pool: '' - + CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex GithubPat: $(BotAccount-dotnet-bot-repo-PAT) @@ -27,7 +27,7 @@ parameters: is1ESPipeline: '' jobs: - job: OneLocBuild${{ parameters.JobNameSuffix }} - + dependsOn: ${{ parameters.dependsOn }} displayName: OneLocBuild${{ parameters.JobNameSuffix }} @@ -99,22 +99,20 @@ jobs: mirrorBranch: ${{ parameters.MirrorBranch }} condition: ${{ parameters.condition }} - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - args: - displayName: Publish Localization Files - pathToPublish: '$(Build.ArtifactStagingDirectory)/loc' - publishLocation: Container - artifactName: Loc - condition: ${{ parameters.condition }} + # Copy the locProject.json to the root of the Loc directory, then publish a pipeline artifact + - task: CopyFiles@2 + displayName: Copy LocProject.json + inputs: + SourceFolder: '$(Build.SourcesDirectory)/eng/Localize/' + Contents: 'LocProject.json' + TargetFolder: '$(Build.ArtifactStagingDirectory)/loc' + condition: ${{ parameters.condition }} - - template: /eng/common/core-templates/steps/publish-build-artifacts.yml + - template: /eng/common/core-templates/steps/publish-pipeline-artifacts.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} args: - displayName: Publish LocProject.json - pathToPublish: '$(Build.SourcesDirectory)/eng/Localize/' - publishLocation: Container - artifactName: Loc + targetPath: '$(Build.ArtifactStagingDirectory)/loc' + artifactName: 'Loc' + displayName: 'Publish Localization Files' condition: ${{ parameters.condition }} diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 996a5f9c8..d4cfd9ccd 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -257,7 +257,20 @@ function Retry($downloadBlock, $maxRetries = 5) { function GetDotNetInstallScript([string] $dotnetRoot) { $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1' + $shouldDownload = $false + if (!(Test-Path $installScript)) { + $shouldDownload = $true + } else { + # Check if the script is older than 30 days + $fileAge = (Get-Date) - (Get-Item $installScript).LastWriteTime + if ($fileAge.Days -gt 30) { + Write-Host "Existing install script is too old, re-downloading..." + $shouldDownload = $true + } + } + + if ($shouldDownload) { Create-Directory $dotnetRoot $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1" diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 3def02a63..c1841c9df 100644 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -300,8 +300,29 @@ function GetDotNetInstallScript { local root=$1 local install_script="$root/dotnet-install.sh" local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh" + local timestamp_file="$root/.dotnet-install.timestamp" + local should_download=false if [[ ! -a "$install_script" ]]; then + should_download=true + elif [[ -f "$timestamp_file" ]]; then + # Check if the script is older than 30 days using timestamp file + local download_time=$(cat "$timestamp_file" 2>/dev/null || echo "0") + local current_time=$(date +%s) + local age_seconds=$((current_time - download_time)) + + # 30 days = 30 * 24 * 60 * 60 = 2592000 seconds + if [[ $age_seconds -gt 2592000 ]]; then + echo "Existing install script is too old, re-downloading..." + should_download=true + fi + else + # No timestamp file exists, assume script is old and re-download + echo "No timestamp found for existing install script, re-downloading..." + should_download=true + fi + + if [[ "$should_download" == true ]]; then mkdir -p "$root" echo "Downloading '$install_script_url'" @@ -328,6 +349,9 @@ function GetDotNetInstallScript { ExitWithExitCode $exit_code } fi + + # Create timestamp file to track download time in seconds from epoch + date +%s > "$timestamp_file" fi # return value _GetDotNetInstallScript="$install_script" diff --git a/global.json b/global.json index 476998b8f..42387c172 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.7.25322.101", + "dotnet": "10.0.100-preview.7.25372.107", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25401.2", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25408.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 7f1a32786f518b2ea08b2ab3ff4409c7a94c2b7f Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 18 Aug 2025 05:02:30 +0000 Subject: [PATCH 30/43] Update dependencies from https://github.com/dotnet/arcade build 20250815.5 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25415.5 --- eng/Version.Details.xml | 4 +- eng/common/SetupNugetSources.ps1 | 4 +- eng/common/SetupNugetSources.sh | 4 +- eng/common/core-templates/job/job.yml | 6 +-- eng/common/core-templates/job/onelocbuild.yml | 6 +-- .../job/publish-build-assets.yml | 10 ++-- .../core-templates/jobs/codeql-build.yml | 2 +- eng/common/core-templates/jobs/jobs.yml | 2 + .../core-templates/post-build/post-build.yml | 8 +-- .../post-build/setup-maestro-vars.yml | 2 +- .../steps/enable-internal-sources.yml | 12 ++--- .../core-templates/steps/generate-sbom.yml | 2 +- .../steps/install-microbuild.yml | 47 +++++++++++------- .../core-templates/steps/publish-logs.yml | 14 +++--- .../steps/source-index-stage1-publish.yml | 2 +- eng/common/generate-locproject.ps1 | 49 ++++++++++++++++--- eng/common/template-guidance.md | 2 +- eng/common/templates-official/job/job.yml | 2 +- .../variables/sdl-variables.yml | 2 +- eng/common/templates/job/job.yml | 4 +- eng/common/tools.ps1 | 6 ++- global.json | 2 +- 22 files changed, 123 insertions(+), 69 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 9b1e2356c..7f7ecfaff 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - d27184f7cb92b4abb4b20e91ecb5c43bc43de65b + e58b086a85a1df5762c8980308f7e4e330edd0aa https://github.com/dotnet/wpf diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 5db4ad71e..792b60b49 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -10,8 +10,8 @@ # displayName: Setup Private Feeds Credentials # condition: eq(variables['Agent.OS'], 'Windows_NT') # inputs: -# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 -# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token +# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1 +# arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token # env: # Token: $(dn-bot-dnceng-artifact-feeds-rw) # diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index 4604b61b0..facb415ca 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -11,8 +11,8 @@ # - task: Bash@3 # displayName: Setup Internal Feeds # inputs: -# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh -# arguments: $(Build.SourcesDirectory)/NuGet.config +# filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.sh +# arguments: $(System.DefaultWorkingDirectory)/NuGet.config # condition: ne(variables['Agent.OS'], 'Windows_NT') # - task: NuGetAuthenticate@1 # diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index d90132515..5ce518406 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -163,7 +163,7 @@ jobs: inputs: testResultsFormat: 'xUnit' testResultsFiles: '*.xml' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)' testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit mergeTestResults: ${{ parameters.mergeTestResults }} continueOnError: true @@ -174,7 +174,7 @@ jobs: inputs: testResultsFormat: 'VSTest' testResultsFiles: '*.trx' - searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)' + searchFolder: '$(System.DefaultWorkingDirectory)/artifacts/TestResults/$(_BuildConfig)' testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx mergeTestResults: ${{ parameters.mergeTestResults }} continueOnError: true @@ -218,7 +218,7 @@ jobs: - task: CopyFiles@2 displayName: Gather buildconfiguration for build retry inputs: - SourceFolder: '$(Build.SourcesDirectory)/eng/common/BuildConfiguration' + SourceFolder: '$(System.DefaultWorkingDirectory)/eng/common/BuildConfiguration' Contents: '**' TargetFolder: '$(Build.ArtifactStagingDirectory)/eng/common/BuildConfiguration' continueOnError: true diff --git a/eng/common/core-templates/job/onelocbuild.yml b/eng/common/core-templates/job/onelocbuild.yml index 8bf7d2335..c5788829a 100644 --- a/eng/common/core-templates/job/onelocbuild.yml +++ b/eng/common/core-templates/job/onelocbuild.yml @@ -8,7 +8,7 @@ parameters: CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex GithubPat: $(BotAccount-dotnet-bot-repo-PAT) - SourcesDirectory: $(Build.SourcesDirectory) + SourcesDirectory: $(System.DefaultWorkingDirectory) CreatePr: true AutoCompletePr: false ReusePr: true @@ -68,7 +68,7 @@ jobs: - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}: - task: Powershell@2 inputs: - filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1 + filePath: $(System.DefaultWorkingDirectory)/eng/common/generate-locproject.ps1 arguments: $(_GenerateLocProjectArguments) displayName: Generate LocProject.json condition: ${{ parameters.condition }} @@ -103,7 +103,7 @@ jobs: - task: CopyFiles@2 displayName: Copy LocProject.json inputs: - SourceFolder: '$(Build.SourcesDirectory)/eng/Localize/' + SourceFolder: '$(System.DefaultWorkingDirectory)/eng/Localize/' Contents: 'LocProject.json' TargetFolder: '$(Build.ArtifactStagingDirectory)/loc' condition: ${{ parameters.condition }} diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index d5303229c..348cd1637 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -38,6 +38,8 @@ parameters: # Optional: A minimatch pattern for the asset manifests to publish to BAR assetManifestsPattern: '*/manifests/**/*.xml' + repositoryAlias: self + jobs: - job: Asset_Registry_Publish @@ -78,7 +80,7 @@ jobs: - 'Illegal entry point, is1ESPipeline is not defined. Repository yaml should not directly reference templates in core-templates folder.': error - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - checkout: self + - checkout: ${{ parameters.repositoryAlias }} fetchDepth: 3 clean: true @@ -117,7 +119,7 @@ jobs: azureSubscription: "Darc: Maestro Production" scriptType: ps scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/sdk-task.ps1 + scriptPath: $(System.DefaultWorkingDirectory)/eng/common/sdk-task.ps1 arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} @@ -137,7 +139,7 @@ jobs: Add-Content -Path $filePath -Value "$(DefaultChannels)" Add-Content -Path $filePath -Value $(IsStableBuild) - $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt" + $symbolExclusionfile = "$(System.DefaultWorkingDirectory)/eng/SymbolPublishingExclusionsFile.txt" if (Test-Path -Path $symbolExclusionfile) { Write-Host "SymbolExclusionFile exists" @@ -177,7 +179,7 @@ jobs: azureSubscription: "Darc: Maestro Production" scriptType: ps scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 arguments: > -BuildId $(BARBuildId) -PublishingInfraVersion 3 diff --git a/eng/common/core-templates/jobs/codeql-build.yml b/eng/common/core-templates/jobs/codeql-build.yml index 693b00b37..dbc14ac58 100644 --- a/eng/common/core-templates/jobs/codeql-build.yml +++ b/eng/common/core-templates/jobs/codeql-build.yml @@ -24,7 +24,7 @@ jobs: - name: DefaultGuardianVersion value: 0.109.0 - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config + value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config - name: GuardianVersion value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }} diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index 2f992b2c6..b637cb6e9 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -43,6 +43,7 @@ parameters: artifacts: {} is1ESPipeline: '' + repositoryAlias: self # Internal resources (telemetry, microbuild) can only be accessed from non-public projects, # and some (Microbuild) should only be applied to non-PR cases for internal builds. @@ -114,3 +115,4 @@ jobs: enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }} artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} + repositoryAlias: ${{ parameters.repositoryAlias }} diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index a151fd811..f6f87fe5c 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -154,7 +154,7 @@ stages: - task: PowerShell@2 displayName: Validate inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1 + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1 arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - job: @@ -208,7 +208,7 @@ stages: filePath: eng\common\sdk-task.ps1 arguments: -task SigningValidation -restore -msbuildEngine vs /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt' + /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt' ${{ parameters.signingValidationAdditionalParameters }} - template: /eng/common/core-templates/steps/publish-logs.yml @@ -258,7 +258,7 @@ stages: - task: PowerShell@2 displayName: Validate inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1 + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1 arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ -ExtractPath $(Agent.BuildDirectory)/Extract/ -GHRepoName $(Build.Repository.Name) @@ -313,7 +313,7 @@ stages: azureSubscription: "Darc: Maestro Production" scriptType: ps scriptLocation: scriptPath - scriptPath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1 + scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 arguments: > -BuildId $(BARBuildId) -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} diff --git a/eng/common/core-templates/post-build/setup-maestro-vars.yml b/eng/common/core-templates/post-build/setup-maestro-vars.yml index f7602980d..a7abd58c4 100644 --- a/eng/common/core-templates/post-build/setup-maestro-vars.yml +++ b/eng/common/core-templates/post-build/setup-maestro-vars.yml @@ -36,7 +36,7 @@ steps: $AzureDevOpsBuildId = $Env:Build_BuildId } else { - . $(Build.SourcesDirectory)\eng\common\tools.ps1 + . $(System.DefaultWorkingDirectory)\eng\common\tools.ps1 $darc = Get-Darc $buildInfo = & $darc get-build ` --id ${{ parameters.BARBuildId }} ` diff --git a/eng/common/core-templates/steps/enable-internal-sources.yml b/eng/common/core-templates/steps/enable-internal-sources.yml index 64f881bff..4085512b6 100644 --- a/eng/common/core-templates/steps/enable-internal-sources.yml +++ b/eng/common/core-templates/steps/enable-internal-sources.yml @@ -17,8 +17,8 @@ steps: - task: PowerShell@2 displayName: Setup Internal Feeds inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token + filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $Env:Token env: Token: ${{ parameters.legacyCredential }} # If running on dnceng (internal project), just use the default behavior for NuGetAuthenticate. @@ -29,8 +29,8 @@ steps: - task: PowerShell@2 displayName: Setup Internal Feeds inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config + filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config - ${{ else }}: - template: /eng/common/templates/steps/get-federated-access-token.yml parameters: @@ -39,8 +39,8 @@ steps: - task: PowerShell@2 displayName: Setup Internal Feeds inputs: - filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1 - arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) + filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1 + arguments: -ConfigFile $(System.DefaultWorkingDirectory)/NuGet.config -Password $(dnceng-artifacts-feeds-read-access-token) # This is required in certain scenarios to install the ADO credential provider. # It installed by default in some msbuild invocations (e.g. VS msbuild), but needs to be installed for others # (e.g. dotnet msbuild). diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml index 44a9636cd..c05f65027 100644 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -6,7 +6,7 @@ parameters: PackageVersion: 10.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' PackageName: '.NET' ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom IgnoreDirectories: '' diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index da30e67bc..d6b9878f5 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -12,6 +12,7 @@ parameters: # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough. microbuildUseESRP: true # Location of the MicroBuild output folder + # NOTE: There's something that relies on this being in the "default" source directory for tasks such as Signing to work properly. microBuildOutputFolder: '$(Build.SourcesDirectory)' continueOnError: false @@ -46,17 +47,19 @@ steps: displayName: 'Validate ESRP usage (Non-Windows)' condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) + # Two different MB install steps. This is due to not being able to use the agent OS during + # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However, + # we can avoid including the MB install step if not enabled at all. This avoids a bunch of + # extra pipeline authorizations, since most pipelines do not sign on non-Windows. - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin + displayName: Install MicroBuild plugin (Windows) inputs: signType: $(_SignType) zipSources: false feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json ${{ if eq(parameters.microbuildUseESRP, true) }}: - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - azureSubscription: 'MicroBuild Signing Task (DevDiv)' - useEsrpCli: true - ${{ elseif eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea ${{ else }}: ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca @@ -65,16 +68,24 @@ steps: MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} - condition: and( - succeeded(), - or( - and( - eq(variables['Agent.Os'], 'Windows_NT'), - in(variables['_SignType'], 'real', 'test') - ), - and( - ${{ eq(parameters.enableMicrobuildForMacAndLinux, true) }}, - ne(variables['Agent.Os'], 'Windows_NT'), - eq(variables['_SignType'], 'real') - ) - )) + condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) + + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin (non-Windows) + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + ${{ if eq(parameters.microbuildUseESRP, true) }}: + ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 + ${{ else }}: + ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml index de24d0087..10f825e27 100644 --- a/eng/common/core-templates/steps/publish-logs.yml +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -12,22 +12,22 @@ steps: inputs: targetType: inline script: | - New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ - Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + New-Item -ItemType Directory $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ + Move-Item -Path $(System.DefaultWorkingDirectory)/artifacts/log/Debug/* $(System.DefaultWorkingDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/ continueOnError: true condition: always() - task: PowerShell@2 displayName: Redact Logs inputs: - filePath: $(Build.SourcesDirectory)/eng/common/post-build/redact-logs.ps1 + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/redact-logs.ps1 # For now this needs to have explicit list of all sensitive data. Taken from eng/publishing/v3/publish.yml - # Sensitive data can as well be added to $(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + # Sensitive data can as well be added to $(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt' # If the file exists - sensitive data for redaction will be sourced from it # (single entry per line, lines starting with '# ' are considered comments and skipped) - arguments: -InputPath '$(Build.SourcesDirectory)/PostBuildLogs' + arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs' -BinlogToolVersion ${{parameters.BinlogToolVersion}} - -TokensFilePath '$(Build.SourcesDirectory)/eng/BinlogSecretsRedactionFile.txt' + -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt' '$(publishing-dnceng-devdiv-code-r-build-re)' '$(MaestroAccessToken)' '$(dn-bot-all-orgs-artifact-feeds-rw)' @@ -44,7 +44,7 @@ steps: - task: CopyFiles@2 displayName: Gather post build logs inputs: - SourceFolder: '$(Build.SourcesDirectory)/PostBuildLogs' + SourceFolder: '$(System.DefaultWorkingDirectory)/PostBuildLogs' Contents: '**' TargetFolder: '$(Build.ArtifactStagingDirectory)/PostBuildLogs' condition: always() diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index c2917c1ef..75600735f 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -20,7 +20,7 @@ steps: # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. workingDirectory: $(Agent.TempDirectory) -- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output +- script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i ${{parameters.BinlogPath}} -r $(System.DefaultWorkingDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output displayName: "Source Index: Process Binlog into indexable sln" - ${{ if and(ne(parameters.runAsPublic, 'true'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1 index 524aaa57f..fa1cdc2b3 100644 --- a/eng/common/generate-locproject.ps1 +++ b/eng/common/generate-locproject.ps1 @@ -33,15 +33,27 @@ $jsonTemplateFiles | ForEach-Object { $jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern +$wxlFilesV3 = @() +$wxlFilesV5 = @() $wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them if (-not $wxlFiles) { $wxlEnFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\1033\\.+\.wxl" } # pick up en files (1033 = en) specifically so we can copy them to use as the neutral xlf files if ($wxlEnFiles) { - $wxlFiles = @() - $wxlEnFiles | ForEach-Object { - $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" - $wxlFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru - } + $wxlFiles = @() + $wxlEnFiles | ForEach-Object { + $destinationFile = "$($_.Directory.Parent.FullName)\$($_.Name)" + $content = Get-Content $_.FullName -Raw + + # Split files on schema to select different parser settings in the generated project. + if ($content -like "*http://wixtoolset.org/schemas/v4/wxl*") + { + $wxlFilesV5 += Copy-Item $_.FullName -Destination $destinationFile -PassThru + } + elseif ($content -like "*http://schemas.microsoft.com/wix/2006/localization*") + { + $wxlFilesV3 += Copy-Item $_.FullName -Destination $destinationFile -PassThru + } + } } } @@ -114,7 +126,32 @@ $locJson = @{ CloneLanguageSet = "WiX_CloneLanguages" LssFiles = @( "wxl_loc.lss" ) LocItems = @( - $wxlFiles | ForEach-Object { + $wxlFilesV3 | ForEach-Object { + $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" + $continue = $true + foreach ($exclusion in $exclusions.Exclusions) { + if ($_.FullName.Contains($exclusion)) { + $continue = $false + } + } + $sourceFile = ($_.FullName | Resolve-Path -Relative) + if ($continue) + { + return @{ + SourceFile = $sourceFile + CopyOption = "LangIDOnPath" + OutputPath = $outputPath + } + } + } + ) + }, + @{ + LanguageSet = $LanguageSet + CloneLanguageSet = "WiX_CloneLanguages" + LssFiles = @( "P210WxlSchemaV4.lss" ) + LocItems = @( + $wxlFilesV5 | ForEach-Object { $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\" $continue = $true foreach ($exclusion in $exclusions.Exclusions) { diff --git a/eng/common/template-guidance.md b/eng/common/template-guidance.md index 98bbc1ded..4bf4cf41b 100644 --- a/eng/common/template-guidance.md +++ b/eng/common/template-guidance.md @@ -50,7 +50,7 @@ extends: - task: CopyFiles@2 displayName: Gather build output inputs: - SourceFolder: '$(Build.SourcesDirectory)/artifacts/marvel' + SourceFolder: '$(System.DefaultWorkingDirectory)/artifacts/marvel' Contents: '**' TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/marvel' ``` diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml index a8a943287..92a0664f5 100644 --- a/eng/common/templates-official/job/job.yml +++ b/eng/common/templates-official/job/job.yml @@ -3,7 +3,7 @@ parameters: enableSbom: true runAsPublic: false PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' jobs: - template: /eng/common/core-templates/job/job.yml diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml index dbdd66d4a..f1311bbb1 100644 --- a/eng/common/templates-official/variables/sdl-variables.yml +++ b/eng/common/templates-official/variables/sdl-variables.yml @@ -4,4 +4,4 @@ variables: - name: DefaultGuardianVersion value: 0.109.0 - name: GuardianPackagesConfigFile - value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file + value: $(System.DefaultWorkingDirectory)\eng\common\sdl\packages.config \ No newline at end of file diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml index 7cbf668c2..238fa0818 100644 --- a/eng/common/templates/job/job.yml +++ b/eng/common/templates/job/job.yml @@ -6,7 +6,7 @@ parameters: enableSbom: true runAsPublic: false PackageVersion: 9.0.0 - BuildDropPath: '$(Build.SourcesDirectory)/artifacts' + BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' jobs: - template: /eng/common/core-templates/job/job.yml @@ -77,7 +77,7 @@ jobs: parameters: is1ESPipeline: false args: - targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration' + targetPath: '$(System.DefaultWorkingDirectory)\eng\common\BuildConfiguration' artifactName: 'BuildConfiguration' displayName: 'Publish build retry configuration' continueOnError: true diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index d4cfd9ccd..4920464cc 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -544,7 +544,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){ if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') { $vswhereVersion = $GlobalJson.tools.vswhere } else { - $vswhereVersion = '2.5.2' + # keep this in sync with the VSWhereVersion in DefaultVersions.props + $vswhereVersion = '3.1.7' } $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion" @@ -552,7 +553,8 @@ function LocateVisualStudio([object]$vsRequirements = $null){ if (!(Test-Path $vsWhereExe)) { Create-Directory $vsWhereDir - Write-Host 'Downloading vswhere' + Write-Host 'Downloading vswhere $vswhereVersion' + $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit Retry({ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe }) diff --git a/global.json b/global.json index 42387c172..005b06173 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25408.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25415.5", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 51ce49b154ed77dde1baf9bb2b3fa1ed7f0dba79 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 25 Aug 2025 05:02:53 +0000 Subject: [PATCH 31/43] Update dependencies from https://github.com/dotnet/arcade build 20250822.3 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25422.3 --- eng/Version.Details.xml | 4 ++-- .../core-templates/steps/source-index-stage1-publish.yml | 4 ++-- eng/common/sdk-task.ps1 | 3 ++- eng/common/sdk-task.sh | 7 ++++++- eng/common/tools.ps1 | 2 +- global.json | 4 ++-- 6 files changed, 15 insertions(+), 9 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 7f7ecfaff..668739083 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - e58b086a85a1df5762c8980308f7e4e330edd0aa + f2cdf946c00a12a2c283835bb41ddc2255832055 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index 75600735f..e9a694afa 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -1,6 +1,6 @@ parameters: - sourceIndexUploadPackageVersion: 2.0.0-20250425.2 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20250515.1 + sourceIndexUploadPackageVersion: 2.0.0-20250818.1 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json binlogPath: artifacts/log/Debug/Build.binlog diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index a9d2a2d26..b62e132d3 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -7,13 +7,14 @@ Param( [switch] $restore, [switch] $prepareMachine, [switch][Alias('nobl')]$excludeCIBinaryLog, + [switch]$noWarnAsError, [switch] $help, [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) $ci = $true $binaryLog = if ($excludeCIBinaryLog) { $false } else { $true } -$warnAsError = $true +$warnAsError = if ($noWarnAsError) { $false } else { $true } . $PSScriptRoot\tools.ps1 diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh index 2f83adc02..0c92f81d7 100644 --- a/eng/common/sdk-task.sh +++ b/eng/common/sdk-task.sh @@ -10,6 +10,7 @@ show_usage() { echo "Advanced settings:" echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" + echo " --noWarnAsError Do not warn as error echo "" echo "Command line arguments not listed above are passed thru to msbuild." } @@ -52,6 +53,7 @@ exclude_ci_binary_log=false restore=false help=false properties='' +warnAsError=true while (($# > 0)); do lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")" @@ -73,6 +75,10 @@ while (($# > 0)); do exclude_ci_binary_log=true shift 1 ;; + --noWarnAsError) + warnAsError=false + shift 1 + ;; --help) help=true shift 1 @@ -85,7 +91,6 @@ while (($# > 0)); do done ci=true -warnAsError=true if $help; then show_usage diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 4920464cc..06b44de78 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -553,7 +553,7 @@ function LocateVisualStudio([object]$vsRequirements = $null){ if (!(Test-Path $vsWhereExe)) { Create-Directory $vsWhereDir - Write-Host 'Downloading vswhere $vswhereVersion' + Write-Host "Downloading vswhere $vswhereVersion" $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit Retry({ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe diff --git a/global.json b/global.json index 005b06173..7597123d0 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-preview.7.25372.107", + "dotnet": "10.0.100-rc.1.25411.109", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25415.5", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25422.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 9220bcd96454e38110cccb7adf1504463af6fea0 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 1 Sep 2025 05:02:20 +0000 Subject: [PATCH 32/43] Update dependencies from https://github.com/dotnet/arcade build 20250830.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 10.0.0-beta.25430.1 --- eng/Version.Details.xml | 4 ++-- eng/common/sdk-task.sh | 2 +- global.json | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 668739083..0c3fbbbd2 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - f2cdf946c00a12a2c283835bb41ddc2255832055 + 6122520523105dacdd702177a76c9f6b982fd759 https://github.com/dotnet/wpf diff --git a/eng/common/sdk-task.sh b/eng/common/sdk-task.sh index 0c92f81d7..3270f83fa 100644 --- a/eng/common/sdk-task.sh +++ b/eng/common/sdk-task.sh @@ -10,7 +10,7 @@ show_usage() { echo "Advanced settings:" echo " --excludeCIBinarylog Don't output binary log (short: -nobl)" - echo " --noWarnAsError Do not warn as error + echo " --noWarnAsError Do not warn as error" echo "" echo "Command line arguments not listed above are passed thru to msbuild." } diff --git a/global.json b/global.json index 7597123d0..6355bb6a4 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25422.3", + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25430.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From dc750e59d9a762c791b2fb30adb4e35afe002ed2 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 8 Sep 2025 05:02:10 +0000 Subject: [PATCH 33/43] Update dependencies from https://github.com/dotnet/arcade build 20250907.1 Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25457.1 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/steps/generate-sbom.yml | 2 +- global.json | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0c3fbbbd2..78921b1f6 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 6122520523105dacdd702177a76c9f6b982fd759 + d7585e79b1397fe8783ac30c81bf5be4d250bcb1 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml index c05f65027..003f7eae0 100644 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -5,7 +5,7 @@ # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. parameters: - PackageVersion: 10.0.0 + PackageVersion: 11.0.0 BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' PackageName: '.NET' ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom diff --git a/global.json b/global.json index 6355bb6a4..1d81bf136 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-rc.1.25411.109", + "dotnet": "10.0.100-rc.1.25420.111", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25430.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25457.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From c9b26b5da198e9a7011bc53f3255154b4e32ff4a Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 15 Sep 2025 05:01:57 +0000 Subject: [PATCH 34/43] Update dependencies from https://github.com/dotnet/arcade build 20250912.2 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25462.2 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/job/publish-build-assets.yml | 9 ++++++++- eng/common/core-templates/jobs/jobs.yml | 2 ++ .../core-templates/steps/source-index-stage1-publish.yml | 4 ++-- global.json | 2 +- 5 files changed, 15 insertions(+), 6 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 78921b1f6..0b5dc800c 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - d7585e79b1397fe8783ac30c81bf5be4d250bcb1 + c32cd132a730a7b9f947498b2ae75dbdc6785456 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 348cd1637..37dff559f 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -40,6 +40,8 @@ parameters: repositoryAlias: self + officialBuildId: '' + jobs: - job: Asset_Registry_Publish @@ -62,6 +64,11 @@ jobs: value: false # unconditional - needed for logs publishing (redactor tool version) - template: /eng/common/core-templates/post-build/common-variables.yml + - name: OfficialBuildId + ${{ if ne(parameters.officialBuildId, '') }}: + value: ${{ parameters.officialBuildId }} + ${{ else }}: + value: $(Build.BuildNumber) pool: # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com) @@ -124,7 +131,7 @@ jobs: /p:ManifestsPath='$(Build.StagingDirectory)/AssetManifests' /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} /p:MaestroApiEndpoint=https://maestro.dot.net - /p:OfficialBuildId=$(Build.BuildNumber) + /p:OfficialBuildId=$(OfficialBuildId) condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/jobs/jobs.yml b/eng/common/core-templates/jobs/jobs.yml index b637cb6e9..01ada7476 100644 --- a/eng/common/core-templates/jobs/jobs.yml +++ b/eng/common/core-templates/jobs/jobs.yml @@ -44,6 +44,7 @@ parameters: artifacts: {} is1ESPipeline: '' repositoryAlias: self + officialBuildId: '' # Internal resources (telemetry, microbuild) can only be accessed from non-public projects, # and some (Microbuild) should only be applied to non-PR cases for internal builds. @@ -116,3 +117,4 @@ jobs: artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }} signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }} repositoryAlias: ${{ parameters.repositoryAlias }} + officialBuildId: ${{ parameters.officialBuildId }} diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index e9a694afa..eff4573c6 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -1,6 +1,6 @@ parameters: - sourceIndexUploadPackageVersion: 2.0.0-20250818.1 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1 + sourceIndexUploadPackageVersion: 2.0.0-20250906.1 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20250906.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json binlogPath: artifacts/log/Debug/Build.binlog diff --git a/global.json b/global.json index 1d81bf136..6c89212d3 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25457.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25462.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From a9679a52e1939686e7cc75dba9e27656afb6a44a Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 22 Sep 2025 05:02:18 +0000 Subject: [PATCH 35/43] Update dependencies from https://github.com/dotnet/arcade build 20250919.3 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25469.3 --- eng/Version.Details.xml | 4 ++-- eng/common/SetupNugetSources.ps1 | 2 +- eng/common/SetupNugetSources.sh | 2 +- global.json | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 0b5dc800c..8e78ec231 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - c32cd132a730a7b9f947498b2ae75dbdc6785456 + dbc4dce57f6a2087f13d86e89dfb0334be3c42e2 https://github.com/dotnet/wpf diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 792b60b49..9445c3143 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -157,7 +157,7 @@ if ($dotnet31Source -ne $null) { AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password } -$dotnetVersions = @('5','6','7','8','9') +$dotnetVersions = @('5','6','7','8','9','10') foreach ($dotnetVersion in $dotnetVersions) { $feedPrefix = "dotnet" + $dotnetVersion; diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index facb415ca..ddf4efc81 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -99,7 +99,7 @@ if [ "$?" == "0" ]; then PackageSources+=('dotnet3.1-internal-transport') fi -DotNetVersions=('5' '6' '7' '8' '9') +DotNetVersions=('5' '6' '7' '8' '9' '10') for DotNetVersion in ${DotNetVersions[@]} ; do FeedPrefix="dotnet${DotNetVersion}"; diff --git a/global.json b/global.json index 6c89212d3..c9634bb7e 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25462.2", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25469.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 2a41ed747a5323a64043202ae2859bde1d9b8a2d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 29 Sep 2025 05:02:41 +0000 Subject: [PATCH 36/43] Update dependencies from https://github.com/dotnet/arcade build 20250927.2 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25477.2 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 8e78ec231..ce28f9d30 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - dbc4dce57f6a2087f13d86e89dfb0334be3c42e2 + e19df003785d0b81e2e3e1bf6e588bf8d913e95e https://github.com/dotnet/wpf diff --git a/global.json b/global.json index c9634bb7e..4fd798c2f 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25469.3", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25477.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From f83229f0ffc361bf7494509750f7da41ac1fad1d Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 6 Oct 2025 05:02:26 +0000 Subject: [PATCH 37/43] Update dependencies from https://github.com/dotnet/arcade build 20251004.3 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25504.3 --- eng/Version.Details.xml | 4 ++-- eng/common/build.sh | 2 +- eng/common/darc-init.sh | 2 +- eng/common/dotnet-install.sh | 2 +- eng/common/dotnet.sh | 2 +- eng/common/internal-feed-operations.sh | 2 +- eng/common/post-build/nuget-verification.ps1 | 2 +- eng/common/sdk-task.ps1 | 2 +- eng/common/tools.ps1 | 4 ++-- global.json | 2 +- 10 files changed, 12 insertions(+), 12 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index ce28f9d30..2182ef5ce 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - e19df003785d0b81e2e3e1bf6e588bf8d913e95e + db534d10fc5ab858568b2f27d9b1383099a8019c https://github.com/dotnet/wpf diff --git a/eng/common/build.sh b/eng/common/build.sh index 9767bb411..ec3e80d18 100644 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -92,7 +92,7 @@ runtime_source_feed='' runtime_source_feed_key='' properties=() -while [[ $# > 0 ]]; do +while [[ $# -gt 0 ]]; do opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -help|-h) diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index e889f439b..9f5ad6b76 100644 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -5,7 +5,7 @@ darcVersion='' versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20' verbosity='minimal' -while [[ $# > 0 ]]; do +while [[ $# -gt 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --darcversion) diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index 7b9d97e3b..61f302bb6 100644 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -18,7 +18,7 @@ architecture='' runtime='dotnet' runtimeSourceFeed='' runtimeSourceFeedKey='' -while [[ $# > 0 ]]; do +while [[ $# -gt 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in -version|-v) diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh index 2ef682356..f6d24871c 100644 --- a/eng/common/dotnet.sh +++ b/eng/common/dotnet.sh @@ -19,7 +19,7 @@ source $scriptroot/tools.sh InitializeDotNetCli true # install # Invoke acquired SDK with args if they are provided -if [[ $# > 0 ]]; then +if [[ $# -gt 0 ]]; then __dotnetDir=${_InitializeDotNetCli} dotnetPath=${__dotnetDir}/dotnet ${dotnetPath} "$@" diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh index 9378223ba..6299e7eff 100644 --- a/eng/common/internal-feed-operations.sh +++ b/eng/common/internal-feed-operations.sh @@ -100,7 +100,7 @@ operation='' authToken='' repoName='' -while [[ $# > 0 ]]; do +while [[ $# -gt 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --operation) diff --git a/eng/common/post-build/nuget-verification.ps1 b/eng/common/post-build/nuget-verification.ps1 index a365194a9..ac5c69ffc 100644 --- a/eng/common/post-build/nuget-verification.ps1 +++ b/eng/common/post-build/nuget-verification.ps1 @@ -30,7 +30,7 @@ [CmdletBinding(PositionalBinding = $false)] param( [string]$NuGetExePath, - [string]$PackageSource = "https://api.nuget.org/v3/index.json", + [string]$PackageSource = "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json", [string]$DownloadPath, [Parameter(ValueFromRemainingArguments = $true)] [string[]]$args diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index b62e132d3..4655af7a2 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -68,7 +68,7 @@ try { $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty } if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) { - $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.13.0" -MemberType NoteProperty + $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.14.16" -MemberType NoteProperty } if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") { $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index 06b44de78..4bc50bd56 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -394,8 +394,8 @@ function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = # If the version of msbuild is going to be xcopied, # use this version. Version matches a package here: - # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.13.0 - $defaultXCopyMSBuildVersion = '17.13.0' + # https://dev.azure.com/dnceng/public/_artifacts/feed/dotnet-eng/NuGet/Microsoft.DotNet.Arcade.MSBuild.Xcopy/versions/17.14.16 + $defaultXCopyMSBuildVersion = '17.14.16' if (!$vsRequirements) { if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') { diff --git a/global.json b/global.json index 4fd798c2f..95a4ea2e3 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25477.2", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25504.3", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From bc77c815665b275e0080de44f95a5cbfc2240cab Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 13 Oct 2025 05:02:28 +0000 Subject: [PATCH 38/43] Update dependencies from https://github.com/dotnet/arcade build 20251009.1 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25509.1 --- eng/Version.Details.xml | 4 +- eng/common/SetupNugetSources.ps1 | 71 +++---- eng/common/SetupNugetSources.sh | 175 +++++++++++------- eng/common/core-templates/job/job.yml | 6 + .../steps/install-microbuild-impl.yml | 34 ++++ .../steps/install-microbuild.yml | 62 ++++--- eng/common/native/install-dependencies.sh | 2 + global.json | 2 +- 8 files changed, 221 insertions(+), 135 deletions(-) create mode 100644 eng/common/core-templates/steps/install-microbuild-impl.yml diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 2182ef5ce..27e808e40 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - db534d10fc5ab858568b2f27d9b1383099a8019c + 488413fe104056170673a048a07906314e101e5d https://github.com/dotnet/wpf diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index 9445c3143..fc8d61801 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -7,7 +7,7 @@ # See example call for this script below. # # - task: PowerShell@2 -# displayName: Setup Private Feeds Credentials +# displayName: Setup internal Feeds Credentials # condition: eq(variables['Agent.OS'], 'Windows_NT') # inputs: # filePath: $(System.DefaultWorkingDirectory)/eng/common/SetupNugetSources.ps1 @@ -34,19 +34,28 @@ Set-StrictMode -Version 2.0 . $PSScriptRoot\tools.ps1 +# Adds or enables the package source with the given name +function AddOrEnablePackageSource($sources, $disabledPackageSources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { + if ($disabledPackageSources -eq $null -or -not (EnableInternalPackageSource -DisabledPackageSources $disabledPackageSources -Creds $creds -PackageSourceName $SourceName)) { + AddPackageSource -Sources $sources -SourceName $SourceName -SourceEndPoint $SourceEndPoint -Creds $creds -Username $userName -pwd $Password + } +} + # Add source entry to PackageSources function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) { $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']") if ($packageSource -eq $null) { + Write-Host "Adding package source $SourceName" + $packageSource = $doc.CreateElement("add") $packageSource.SetAttribute("key", $SourceName) $packageSource.SetAttribute("value", $SourceEndPoint) $sources.AppendChild($packageSource) | Out-Null } else { - Write-Host "Package source $SourceName already present." + Write-Host "Package source $SourceName already present and enabled." } AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd @@ -59,6 +68,8 @@ function AddCredential($creds, $source, $username, $pwd) { return; } + Write-Host "Inserting credential for feed: " $source + # Looks for credential configuration for the given SourceName. Create it if none is found. $sourceElement = $creds.SelectSingleNode($Source) if ($sourceElement -eq $null) @@ -91,24 +102,27 @@ function AddCredential($creds, $source, $username, $pwd) { $passwordElement.SetAttribute("value", $pwd) } -function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) { - $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]") - - Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds." - - ForEach ($PackageSource in $maestroPrivateSources) { - Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key - AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd +# Enable all darc-int package sources. +function EnableMaestroInternalPackageSources($DisabledPackageSources, $Creds) { + $maestroInternalSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") + ForEach ($DisabledPackageSource in $maestroInternalSources) { + EnableInternalPackageSource -DisabledPackageSources $DisabledPackageSources -Creds $Creds -PackageSourceName $DisabledPackageSource.key } } -function EnablePrivatePackageSources($DisabledPackageSources) { - $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]") - ForEach ($DisabledPackageSource in $maestroPrivateSources) { - Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource" +# Enables an internal package source by name, if found. Returns true if the package source was found and enabled, false otherwise. +function EnableInternalPackageSource($DisabledPackageSources, $Creds, $PackageSourceName) { + $DisabledPackageSource = $DisabledPackageSources.SelectSingleNode("add[@key='$PackageSourceName']") + if ($DisabledPackageSource) { + Write-Host "Enabling internal source '$($DisabledPackageSource.key)'." + # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries $DisabledPackageSources.RemoveChild($DisabledPackageSource) + + AddCredential -Creds $creds -Source $DisabledPackageSource.Key -Username $userName -pwd $Password + return $true } + return $false } if (!(Test-Path $ConfigFile -PathType Leaf)) { @@ -121,15 +135,17 @@ $doc = New-Object System.Xml.XmlDocument $filename = (Get-Item $ConfigFile).FullName $doc.Load($filename) -# Get reference to or create one if none exist already +# Get reference to - fail if none exist $sources = $doc.DocumentElement.SelectSingleNode("packageSources") if ($sources -eq $null) { - $sources = $doc.CreateElement("packageSources") - $doc.DocumentElement.AppendChild($sources) | Out-Null + Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile" + ExitWithExitCode 1 } $creds = $null +$feedSuffix = "v3/index.json" if ($Password) { + $feedSuffix = "v2" # Looks for a node. Create it if none is found. $creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials") if ($creds -eq $null) { @@ -138,33 +154,22 @@ if ($Password) { } } +$userName = "dn-bot" + # Check for disabledPackageSources; we'll enable any darc-int ones we find there $disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources") if ($disabledSources -ne $null) { Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node" - EnablePrivatePackageSources -DisabledPackageSources $disabledSources -} - -$userName = "dn-bot" - -# Insert credential nodes for Maestro's private feeds -InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password - -# 3.1 uses a different feed url format so it's handled differently here -$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']") -if ($dotnet31Source -ne $null) { - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password - AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password + EnableMaestroInternalPackageSources -DisabledPackageSources $disabledSources -Creds $creds } - $dotnetVersions = @('5','6','7','8','9','10') foreach ($dotnetVersion in $dotnetVersions) { $feedPrefix = "dotnet" + $dotnetVersion; $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']") if ($dotnetSource -ne $null) { - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password - AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password + AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password + AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password } } diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index ddf4efc81..dd2564aef 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -52,78 +52,126 @@ if [[ `uname -s` == "Darwin" ]]; then TB='' fi -# Ensure there is a ... section. -grep -i "" $ConfigFile -if [ "$?" != "0" ]; then - echo "Adding ... section." - ConfigNodeHeader="" - PackageSourcesTemplate="${TB}${NL}${TB}" +# Enables an internal package source by name, if found. Returns 0 if found and enabled, 1 if not found. +EnableInternalPackageSource() { + local PackageSourceName="$1" + + # Check if disabledPackageSources section exists + grep -i "" "$ConfigFile" > /dev/null + if [ "$?" != "0" ]; then + return 1 # No disabled sources section + fi + + # Check if this source name is disabled + grep -i " /dev/null + if [ "$?" == "0" ]; then + echo "Enabling internal source '$PackageSourceName'." + # Remove the disabled entry + local OldDisableValue="" + local NewDisableValue="" + sed -i.bak "s|$OldDisableValue|$NewDisableValue|" "$ConfigFile" + + # Add the source name to PackageSources for credential handling + PackageSources+=("$PackageSourceName") + return 0 # Found and enabled + fi + + return 1 # Not found in disabled sources +} + +# Add source entry to PackageSources +AddPackageSource() { + local SourceName="$1" + local SourceEndPoint="$2" + + # Check if source already exists + grep -i " /dev/null + if [ "$?" == "0" ]; then + echo "Package source $SourceName already present and enabled." + PackageSources+=("$SourceName") + return + fi + + echo "Adding package source $SourceName" + PackageSourcesNodeFooter="" + PackageSourceTemplate="${TB}" + + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" "$ConfigFile" + PackageSources+=("$SourceName") +} + +# Adds or enables the package source with the given name +AddOrEnablePackageSource() { + local SourceName="$1" + local SourceEndPoint="$2" + + # Try to enable if disabled, if not found then add new source + EnableInternalPackageSource "$SourceName" + if [ "$?" != "0" ]; then + AddPackageSource "$SourceName" "$SourceEndPoint" + fi +} - sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile -fi +# Enable all darc-int package sources +EnableMaestroInternalPackageSources() { + # Check if disabledPackageSources section exists + grep -i "" "$ConfigFile" > /dev/null + if [ "$?" != "0" ]; then + return # No disabled sources section + fi + + # Find all darc-int disabled sources + local DisabledDarcIntSources=() + DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' "$ConfigFile" | tr -d '"') + + for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do + if [[ $DisabledSourceName == darc-int* ]]; then + EnableInternalPackageSource "$DisabledSourceName" + fi + done +} -# Ensure there is a ... section. -grep -i "" $ConfigFile +# Ensure there is a ... section. +grep -i "" $ConfigFile if [ "$?" != "0" ]; then - echo "Adding ... section." - - PackageSourcesNodeFooter="" - PackageSourceCredentialsTemplate="${TB}${NL}${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile + Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. NuGet config file must contain a packageSources section: $ConfigFile" + ExitWithExitCode 1 fi PackageSources=() -# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present -grep -i "... section. + grep -i "" $ConfigFile if [ "$?" != "0" ]; then - echo "Adding dotnet3.1-internal to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" + echo "Adding ... section." - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=('dotnet3.1-internal') - - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding dotnet3.1-internal-transport to the packageSources." PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" + PackageSourceCredentialsTemplate="${TB}${NL}${TB}" - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile + sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile fi - PackageSources+=('dotnet3.1-internal-transport') +fi + +# Check for disabledPackageSources; we'll enable any darc-int ones we find there +grep -i "" $ConfigFile > /dev/null +if [ "$?" == "0" ]; then + echo "Checking for any darc-int disabled package sources in the disabledPackageSources node" + EnableMaestroInternalPackageSources fi DotNetVersions=('5' '6' '7' '8' '9' '10') for DotNetVersion in ${DotNetVersions[@]} ; do FeedPrefix="dotnet${DotNetVersion}"; - grep -i " /dev/null if [ "$?" == "0" ]; then - grep -i "" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=("$FeedPrefix-internal") - - grep -i "" $ConfigFile - if [ "$?" != "0" ]; then - echo "Adding $FeedPrefix-internal-transport to the packageSources." - PackageSourcesNodeFooter="" - PackageSourceTemplate="${TB}" - - sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile - fi - PackageSources+=("$FeedPrefix-internal-transport") + AddOrEnablePackageSource "$FeedPrefix-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/$FeedSuffix" + AddOrEnablePackageSource "$FeedPrefix-internal-transport" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/$FeedSuffix" fi done @@ -139,29 +187,12 @@ if [ "$CredToken" ]; then # Check if there is no existing credential for this FeedName grep -i "<$FeedName>" $ConfigFile if [ "$?" != "0" ]; then - echo "Adding credentials for $FeedName." + echo " Inserting credential for feed: $FeedName" PackageSourceCredentialsNodeFooter="" - NewCredential="${TB}${TB}<$FeedName>${NL}${NL}${NL}" + NewCredential="${TB}${TB}<$FeedName>${NL}${TB}${NL}${TB}${TB}${NL}${TB}${TB}" sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile fi done fi - -# Re-enable any entries in disabledPackageSources where the feed name contains darc-int -grep -i "" $ConfigFile -if [ "$?" == "0" ]; then - DisabledDarcIntSources=() - echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile" - DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"') - for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do - if [[ $DisabledSourceName == darc-int* ]] - then - OldDisableValue="" - NewDisableValue="" - sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile - echo "Neutralized disablePackageSources entry for '$DisabledSourceName'" - fi - done -fi diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index 5ce518406..cb4ccc023 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -19,6 +19,8 @@ parameters: # publishing defaults artifacts: '' enableMicrobuild: false + enablePreviewMicrobuild: false + microbuildPluginVersion: 'latest' enableMicrobuildForMacAndLinux: false microbuildUseESRP: true enablePublishBuildArtifacts: false @@ -128,6 +130,8 @@ jobs: - template: /eng/common/core-templates/steps/install-microbuild.yml parameters: enableMicrobuild: ${{ parameters.enableMicrobuild }} + enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} + microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }} enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} microbuildUseESRP: ${{ parameters.microbuildUseESRP }} continueOnError: ${{ parameters.continueOnError }} @@ -153,6 +157,8 @@ jobs: - template: /eng/common/core-templates/steps/cleanup-microbuild.yml parameters: enableMicrobuild: ${{ parameters.enableMicrobuild }} + enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} + microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }} enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/steps/install-microbuild-impl.yml b/eng/common/core-templates/steps/install-microbuild-impl.yml new file mode 100644 index 000000000..9fdf3a116 --- /dev/null +++ b/eng/common/core-templates/steps/install-microbuild-impl.yml @@ -0,0 +1,34 @@ +parameters: + - name: microbuildTaskInputs + type: object + default: {} + + - name: microbuildEnv + type: object + default: {} + + - name: enablePreviewMicrobuild + type: boolean + default: false + + - name: condition + type: string + + - name: continueOnError + type: boolean + +steps: +- ${{ if eq(parameters.enablePreviewMicrobuild, 'true') }}: + - task: MicroBuildSigningPluginPreview@4 + displayName: Install Preview MicroBuild plugin (Windows) + inputs: ${{ parameters.microbuildTaskInputs }} + env: ${{ parameters.microbuildEnv }} + continueOnError: ${{ parameters.continueOnError }} + condition: ${{ parameters.condition }} +- ${{ else }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin (Windows) + inputs: ${{ parameters.microbuildTaskInputs }} + env: ${{ parameters.microbuildEnv }} + continueOnError: ${{ parameters.continueOnError }} + condition: ${{ parameters.condition }} \ No newline at end of file diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index d6b9878f5..bdebec0ea 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -4,6 +4,8 @@ parameters: # Enable install tasks for MicroBuild on Mac and Linux # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' enableMicrobuildForMacAndLinux: false + # Enable preview version of MB signing plugin + enablePreviewMicrobuild: false # Determines whether the ESRP service connection information should be passed to the signing plugin. # This overlaps with _SignType to some degree. We only need the service connection for real signing. # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place. @@ -14,6 +16,8 @@ parameters: # Location of the MicroBuild output folder # NOTE: There's something that relies on this being in the "default" source directory for tasks such as Signing to work properly. microBuildOutputFolder: '$(Build.SourcesDirectory)' + # Microbuild version + microbuildPluginVersion: 'latest' continueOnError: false @@ -51,41 +55,45 @@ steps: # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However, # we can avoid including the MB install step if not enabled at all. This avoids a bunch of # extra pipeline authorizations, since most pipelines do not sign on non-Windows. - - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin (Windows) - inputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - ${{ if eq(parameters.microbuildUseESRP, true) }}: - ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea - ${{ else }}: - ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca - env: - TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) - - - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: - - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin (non-Windows) - inputs: + - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self + parameters: + enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} + microbuildTaskInputs: signType: $(_SignType) zipSources: false feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + version: ${{ parameters.microbuildPluginVersion }} ${{ if eq(parameters.microbuildUseESRP, true) }}: ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 + ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea ${{ else }}: - ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc - env: + ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca + microbuildEnv: TeamName: $(_TeamName) MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) + condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) + + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: + - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self + parameters: + enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} + microbuildTaskInputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + version: ${{ parameters.microbuildPluginVersion }} + ${{ if eq(parameters.microbuildUseESRP, true) }}: + ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 + ${{ else }}: + ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc + microbuildEnv: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index 477a44f33..f7bd4af0c 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -30,6 +30,8 @@ case "$os" in elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio + elif [ "$ID" = "amzn" ]; then + dnf install -y cmake llvm lld lldb clang python libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio elif [ "$ID" = "alpine" ]; then apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio else diff --git a/global.json b/global.json index 95a4ea2e3..3e7c5b7d2 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25504.3", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25509.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 31f5c9b707ef29f2e4c79419ab59a2329114f1fd Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 27 Oct 2025 05:02:02 +0000 Subject: [PATCH 39/43] Update dependencies from https://github.com/dotnet/arcade build 20251024.1 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25524.1 --- eng/Version.Details.xml | 4 +- eng/common/SetupNugetSources.sh | 6 +- .../job/publish-build-assets.yml | 24 +- .../core-templates/post-build/post-build.yml | 450 +++++++++--------- .../steps/install-microbuild.yml | 15 +- eng/common/post-build/publish-using-darc.ps1 | 4 +- eng/common/sdk-task.ps1 | 2 + global.json | 2 +- 8 files changed, 264 insertions(+), 243 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 27e808e40..8f9c09c61 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 488413fe104056170673a048a07906314e101e5d + 904bfd153de2a88471c00a7cdd3450948e758db8 https://github.com/dotnet/wpf diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index dd2564aef..b97cc5363 100644 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -66,10 +66,8 @@ EnableInternalPackageSource() { grep -i " /dev/null if [ "$?" == "0" ]; then echo "Enabling internal source '$PackageSourceName'." - # Remove the disabled entry - local OldDisableValue="" - local NewDisableValue="" - sed -i.bak "s|$OldDisableValue|$NewDisableValue|" "$ConfigFile" + # Remove the disabled entry (including any surrounding comments or whitespace on the same line) + sed -i.bak "//d" "$ConfigFile" # Add the source name to PackageSources for credential handling PackageSources+=("$PackageSourceName") diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 37dff559f..721a55666 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -91,8 +91,8 @@ jobs: fetchDepth: 3 clean: true - - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: - - ${{ if eq(parameters.publishingVersion, 3) }}: + - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: + - ${{ if eq(parameters.publishingVersion, 3) }}: - task: DownloadPipelineArtifact@2 displayName: Download Asset Manifests inputs: @@ -117,9 +117,16 @@ jobs: flattenFolders: true condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: NuGetAuthenticate@1 + # Populate internal runtime variables. + - template: /eng/common/templates/steps/enable-internal-sources.yml + parameters: + legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw) + + - template: /eng/common/templates/steps/enable-internal-runtimes.yml + - task: AzureCLI@2 displayName: Publish Build Assets inputs: @@ -132,9 +139,12 @@ jobs: /p:IsAssetlessBuild=${{ parameters.isAssetlessBuild }} /p:MaestroApiEndpoint=https://maestro.dot.net /p:OfficialBuildId=$(OfficialBuildId) + -runtimeSourceFeed https://ci.dot.net/internal + -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) + condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: powershell@2 displayName: Create ReleaseConfigs Artifact inputs: @@ -162,7 +172,7 @@ jobs: artifactName: AssetManifests displayName: 'Publish Merged Manifest' retryCountOnTaskFailure: 10 # for any logs being locked - sbomEnabled: false # we don't need SBOM for logs + sbomEnabled: false # we don't need SBOM for logs - template: /eng/common/core-templates/steps/publish-build-artifacts.yml parameters: @@ -195,9 +205,11 @@ jobs: -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' + -runtimeSourceFeed https://ci.dot.net/internal + -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/core-templates/steps/publish-logs.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} - JobLabel: 'Publish_Artifacts_Logs' + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index f6f87fe5c..0cea81c74 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -1,106 +1,106 @@ parameters: - # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. - # Publishing V1 is no longer supported - # Publishing V2 is no longer supported - # Publishing V3 is the default - - name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - - - name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - - - name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - - - name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - - - name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - - - name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - - - name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - - - name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - - - name: requireDefaultChannels - displayName: Fail the build if there are no default channel(s) registrations for the current build - type: boolean - default: false - - - name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - - - name: isAssetlessBuild - type: boolean - displayName: Is Assetless Build - default: false - - # These parameters let the user customize the call to sdk-task.ps1 for publishing - # symbols & general artifacts as well as for signing validation - - name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - - - name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - - - name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - - # Which stages should finish execution before post-build stages start - - name: validateDependsOn - type: object - default: - - build - - - name: publishDependsOn - type: object - default: - - Validate - - # Optional: Call asset publishing rather than running in a separate stage - - name: publishAssetsImmediately - type: boolean - default: false - - - name: is1ESPipeline - type: boolean - default: false +# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. +# Publishing V1 is no longer supported +# Publishing V2 is no longer supported +# Publishing V3 is the default +- name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + +- name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + +- name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + +- name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + +- name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + +- name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + +- name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + +- name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + +- name: requireDefaultChannels + displayName: Fail the build if there are no default channel(s) registrations for the current build + type: boolean + default: false + +- name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + +- name: isAssetlessBuild + type: boolean + displayName: Is Assetless Build + default: false + +# These parameters let the user customize the call to sdk-task.ps1 for publishing +# symbols & general artifacts as well as for signing validation +- name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + +- name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + +- name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + +# Which stages should finish execution before post-build stages start +- name: validateDependsOn + type: object + default: + - build + +- name: publishDependsOn + type: object + default: + - Validate + +# Optional: Call asset publishing rather than running in a separate stage +- name: publishAssetsImmediately + type: boolean + default: false + +- name: is1ESPipeline + type: boolean + default: false stages: - ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: @@ -108,10 +108,10 @@ stages: dependsOn: ${{ parameters.validateDependsOn }} displayName: Validate Build Assets variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} jobs: - job: displayName: NuGet Validation @@ -134,28 +134,28 @@ stages: demands: ImageOverride -equals windows.vs2022.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - job: displayName: Signing Validation @@ -169,54 +169,54 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows ${{ else }}: name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 + demands: ImageOverride -equals windows.vs2022.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) - job: displayName: SourceLink Validation @@ -230,41 +230,41 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows ${{ else }}: name: $(DncEngInternalBuildPool) - demands: ImageOverride -equals windows.vs2022.amd64 + demands: ImageOverride -equals windows.vs2022.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true - ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - stage: publish_using_darc @@ -274,10 +274,10 @@ stages: dependsOn: ${{ parameters.validateDependsOn }} displayName: Publish using Darc variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} jobs: - job: displayName: Publish Using Darc @@ -291,30 +291,36 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: NetCore1ESPool-Publishing-Internal image: windows.vs2019.amd64 os: windows ${{ else }}: name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 + demands: ImageOverride -equals windows.vs2019.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: NuGetAuthenticate@1 - - - task: AzureCLI@2 - displayName: Publish Using Darc - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: ps - scriptLocation: scriptPath - scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: > + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: NuGetAuthenticate@1 # Populate internal runtime variables. + + - template: /eng/common/templates/steps/enable-internal-sources.yml + parameters: + legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw) + + - template: /eng/common/templates/steps/enable-internal-runtimes.yml + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > -BuildId $(BARBuildId) -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -AzdoToken '$(System.AccessToken)' @@ -323,3 +329,5 @@ stages: -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' + -runtimeSourceFeed https://ci.dot.net/internal + -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index bdebec0ea..3d42d9a56 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -13,9 +13,6 @@ parameters: # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough. microbuildUseESRP: true - # Location of the MicroBuild output folder - # NOTE: There's something that relies on this being in the "default" source directory for tasks such as Signing to work properly. - microBuildOutputFolder: '$(Build.SourcesDirectory)' # Microbuild version microbuildPluginVersion: 'latest' @@ -24,14 +21,16 @@ parameters: steps: - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable + # Installing .NET 8 is required to use the MicroBuild signing plugin on non-Windows platforms - task: UseDotNet@2 displayName: Install .NET 8.0 SDK for MicroBuild Plugin inputs: packageType: sdk version: 8.0.x - installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet - workingDirectory: ${{ parameters.microBuildOutputFolder }} + # Installing the SDK in a '.dotnet-microbuild' directory is required for signing. + # See target FindDotNetPathForMicroBuild in arcade/src/Microsoft.DotNet.Arcade.Sdk/tools/Sign.proj + # Do not remove '.dotnet-microbuild' from the path without changing the corresponding logic. + installationPath: $(Agent.TempDirectory)/.dotnet-microbuild condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - script: | @@ -71,7 +70,7 @@ steps: ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca microbuildEnv: TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) @@ -93,7 +92,7 @@ steps: ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc microbuildEnv: TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1 index 1eda208a3..48e55598b 100644 --- a/eng/common/post-build/publish-using-darc.ps1 +++ b/eng/common/post-build/publish-using-darc.ps1 @@ -7,7 +7,9 @@ param( [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters, [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters, [Parameter(Mandatory=$false)][string] $RequireDefaultChannels, - [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing + [Parameter(Mandatory=$false)][string] $SkipAssetsPublishing, + [Parameter(Mandatory=$false)][string] $runtimeSourceFeed, + [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey ) try { diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1 index 4655af7a2..9ae443f1c 100644 --- a/eng/common/sdk-task.ps1 +++ b/eng/common/sdk-task.ps1 @@ -9,6 +9,8 @@ Param( [switch][Alias('nobl')]$excludeCIBinaryLog, [switch]$noWarnAsError, [switch] $help, + [string] $runtimeSourceFeed = '', + [string] $runtimeSourceFeedKey = '', [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) diff --git a/global.json b/global.json index 3e7c5b7d2..2b85f4178 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25509.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25524.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 885d4ac98f004e690a329476dba232314bb47edd Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 3 Nov 2025 05:02:25 +0000 Subject: [PATCH 40/43] Update dependencies from https://github.com/dotnet/arcade build 20251031.1 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25531.1 --- eng/Version.Details.xml | 4 ++-- eng/common/core-templates/steps/publish-logs.yml | 2 ++ eng/common/post-build/redact-logs.ps1 | 4 +++- global.json | 4 ++-- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 8f9c09c61..26f09ba11 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 904bfd153de2a88471c00a7cdd3450948e758db8 + d8816877a6b0e6cb39cb57d92557ad4952715a2e https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml index 10f825e27..0664c343b 100644 --- a/eng/common/core-templates/steps/publish-logs.yml +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -28,6 +28,8 @@ steps: arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs' -BinlogToolVersion ${{parameters.BinlogToolVersion}} -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt' + -runtimeSourceFeed https://ci.dot.net/internal + -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) '$(publishing-dnceng-devdiv-code-r-build-re)' '$(MaestroAccessToken)' '$(dn-bot-all-orgs-artifact-feeds-rw)' diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1 index b7fc19591..fc0218a01 100644 --- a/eng/common/post-build/redact-logs.ps1 +++ b/eng/common/post-build/redact-logs.ps1 @@ -7,7 +7,9 @@ param( # File with strings to redact - separated by newlines. # For comments start the line with '# ' - such lines are ignored [Parameter(Mandatory=$false)][string] $TokensFilePath, - [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact + [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact, + [Parameter(Mandatory=$false)][string] $runtimeSourceFeed, + [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey ) try { diff --git a/global.json b/global.json index 2b85f4178..041a2924f 100644 --- a/global.json +++ b/global.json @@ -1,6 +1,6 @@ { "tools": { - "dotnet": "10.0.100-rc.1.25420.111", + "dotnet": "10.0.100-rc.2.25502.107", "runtimes": { "dotnet": [ "2.1.7", @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25524.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25531.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 741a533d9786c0c700a0e2af477c9937cc0f93c4 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 10 Nov 2025 05:02:22 +0000 Subject: [PATCH 41/43] Update dependencies from https://github.com/dotnet/arcade build 20251107.2 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25557.2 --- eng/Version.Details.xml | 4 ++-- .../core-templates/job/publish-build-assets.yml | 11 ++++++++--- .../core-templates/post-build/post-build.yml | 6 +++++- .../steps/install-microbuild-impl.yml | 6 +++--- .../core-templates/steps/install-microbuild.yml | 15 ++++++++------- eng/common/core-templates/steps/publish-logs.yml | 4 ++-- .../steps/source-index-stage1-publish.yml | 4 ++-- eng/common/cross/x64/tizen/tizen.patch | 9 +++++++++ eng/common/cross/x86/tizen/tizen.patch | 9 +++++++++ eng/common/native/install-dependencies.sh | 2 +- global.json | 2 +- 11 files changed, 50 insertions(+), 22 deletions(-) create mode 100644 eng/common/cross/x64/tizen/tizen.patch create mode 100644 eng/common/cross/x86/tizen/tizen.patch diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 26f09ba11..fa696fe2d 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - d8816877a6b0e6cb39cb57d92557ad4952715a2e + 7d717a49d570577936361c14de38bf61271aa274 https://github.com/dotnet/wpf diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 721a55666..4a417e003 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -140,7 +140,7 @@ jobs: /p:MaestroApiEndpoint=https://maestro.dot.net /p:OfficialBuildId=$(OfficialBuildId) -runtimeSourceFeed https://ci.dot.net/internal - -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) + -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)' condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} @@ -189,6 +189,11 @@ jobs: BARBuildId: ${{ parameters.BARBuildId }} PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} is1ESPipeline: ${{ parameters.is1ESPipeline }} + + # Darc is targeting 8.0, so make sure it's installed + - task: UseDotNet@2 + inputs: + version: 8.0.x - task: AzureCLI@2 displayName: Publish Using Darc @@ -205,8 +210,8 @@ jobs: -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}' -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' - -runtimeSourceFeed https://ci.dot.net/internal - -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) + -runtimeSourceFeed https://ci.dot.net/internal + -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)' - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}: - template: /eng/common/core-templates/steps/publish-logs.yml diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index 0cea81c74..27763a825 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -313,6 +313,10 @@ stages: - template: /eng/common/templates/steps/enable-internal-runtimes.yml + - task: UseDotNet@2 + inputs: + version: 8.0.x + - task: AzureCLI@2 displayName: Publish Using Darc inputs: @@ -330,4 +334,4 @@ stages: -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' -SkipAssetsPublishing '${{ parameters.isAssetlessBuild }}' -runtimeSourceFeed https://ci.dot.net/internal - -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) + -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)' diff --git a/eng/common/core-templates/steps/install-microbuild-impl.yml b/eng/common/core-templates/steps/install-microbuild-impl.yml index 9fdf3a116..b9e0143ee 100644 --- a/eng/common/core-templates/steps/install-microbuild-impl.yml +++ b/eng/common/core-templates/steps/install-microbuild-impl.yml @@ -20,15 +20,15 @@ parameters: steps: - ${{ if eq(parameters.enablePreviewMicrobuild, 'true') }}: - task: MicroBuildSigningPluginPreview@4 - displayName: Install Preview MicroBuild plugin (Windows) + displayName: Install Preview MicroBuild plugin inputs: ${{ parameters.microbuildTaskInputs }} env: ${{ parameters.microbuildEnv }} continueOnError: ${{ parameters.continueOnError }} condition: ${{ parameters.condition }} - ${{ else }}: - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin (Windows) + displayName: Install MicroBuild plugin inputs: ${{ parameters.microbuildTaskInputs }} env: ${{ parameters.microbuildEnv }} continueOnError: ${{ parameters.continueOnError }} - condition: ${{ parameters.condition }} \ No newline at end of file + condition: ${{ parameters.condition }} diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index 3d42d9a56..bdebec0ea 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -13,6 +13,9 @@ parameters: # Unfortunately, _SignType can't be used to exclude the use of the service connection in non-real sign scenarios. The # variable is not available in template expression. _SignType has a very large proliferation across .NET, so replacing it is tough. microbuildUseESRP: true + # Location of the MicroBuild output folder + # NOTE: There's something that relies on this being in the "default" source directory for tasks such as Signing to work properly. + microBuildOutputFolder: '$(Build.SourcesDirectory)' # Microbuild version microbuildPluginVersion: 'latest' @@ -21,16 +24,14 @@ parameters: steps: - ${{ if eq(parameters.enableMicrobuild, 'true') }}: - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}: - # Installing .NET 8 is required to use the MicroBuild signing plugin on non-Windows platforms + # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable - task: UseDotNet@2 displayName: Install .NET 8.0 SDK for MicroBuild Plugin inputs: packageType: sdk version: 8.0.x - # Installing the SDK in a '.dotnet-microbuild' directory is required for signing. - # See target FindDotNetPathForMicroBuild in arcade/src/Microsoft.DotNet.Arcade.Sdk/tools/Sign.proj - # Do not remove '.dotnet-microbuild' from the path without changing the corresponding logic. - installationPath: $(Agent.TempDirectory)/.dotnet-microbuild + installationPath: ${{ parameters.microBuildOutputFolder }}/.dotnet + workingDirectory: ${{ parameters.microBuildOutputFolder }} condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT')) - script: | @@ -70,7 +71,7 @@ steps: ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca microbuildEnv: TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) @@ -92,7 +93,7 @@ steps: ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc microbuildEnv: TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: $(Agent.TempDirectory)/MicroBuild + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml index 0664c343b..5a927b4c7 100644 --- a/eng/common/core-templates/steps/publish-logs.yml +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -26,10 +26,10 @@ steps: # If the file exists - sensitive data for redaction will be sourced from it # (single entry per line, lines starting with '# ' are considered comments and skipped) arguments: -InputPath '$(System.DefaultWorkingDirectory)/PostBuildLogs' - -BinlogToolVersion ${{parameters.BinlogToolVersion}} + -BinlogToolVersion '${{parameters.BinlogToolVersion}}' -TokensFilePath '$(System.DefaultWorkingDirectory)/eng/BinlogSecretsRedactionFile.txt' -runtimeSourceFeed https://ci.dot.net/internal - -runtimeSourceFeedKey $(dotnetbuilds-internal-container-read-token-base64) + -runtimeSourceFeedKey '$(dotnetbuilds-internal-container-read-token-base64)' '$(publishing-dnceng-devdiv-code-r-build-re)' '$(MaestroAccessToken)' '$(dn-bot-all-orgs-artifact-feeds-rw)' diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index eff4573c6..ac019e2d0 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -14,8 +14,8 @@ steps: workingDirectory: $(Agent.TempDirectory) - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools displayName: "Source Index: Download netsourceindex Tools" # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. workingDirectory: $(Agent.TempDirectory) diff --git a/eng/common/cross/x64/tizen/tizen.patch b/eng/common/cross/x64/tizen/tizen.patch new file mode 100644 index 000000000..56fbc8810 --- /dev/null +++ b/eng/common/cross/x64/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib64/libc.so b/usr/lib64/libc.so +--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf64-x86-64) +-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib64/ld-linux-x86-64.so.2 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-x86-64.so.2 ) ) diff --git a/eng/common/cross/x86/tizen/tizen.patch b/eng/common/cross/x86/tizen/tizen.patch new file mode 100644 index 000000000..f4fe8838a --- /dev/null +++ b/eng/common/cross/x86/tizen/tizen.patch @@ -0,0 +1,9 @@ +diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so +--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900 ++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900 +@@ -2,4 +2,4 @@ + Use the shared library, but some functions are only in + the static library, so try that secondarily. */ + OUTPUT_FORMAT(elf32-i386) +-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) ) ++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) ) diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index f7bd4af0c..64b87d0bc 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -27,7 +27,7 @@ case "$os" in libssl-dev libkrb5-dev pigz cpio localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 - elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then + elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ] || [ "$ID" = "centos"]; then pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio elif [ "$ID" = "amzn" ]; then diff --git a/global.json b/global.json index 041a2924f..f822fc615 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25531.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25557.2", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From 08458b56020410d80bb31f8f999fa0b509026713 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 10 Nov 2025 13:25:44 +0000 Subject: [PATCH 42/43] Update dependencies from https://github.com/dotnet/arcade build 20251110.1 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25560.1 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index fa696fe2d..5d86dc7d1 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 7d717a49d570577936361c14de38bf61271aa274 + 3a7f017be1c42a4ecc516446977cf55d429cf390 https://github.com/dotnet/wpf diff --git a/global.json b/global.json index f822fc615..ebc7e2ff3 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25557.2", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25560.1", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" }, From cfa80bc2fb33e3c9cd7690a72a9dc2eb72234750 Mon Sep 17 00:00:00 2001 From: "dotnet-maestro[bot]" Date: Mon, 17 Nov 2025 05:02:13 +0000 Subject: [PATCH 43/43] Update dependencies from https://github.com/dotnet/arcade build 20251112.6 On relative base path root Microsoft.DotNet.Arcade.Sdk From Version 10.0.0-beta.24510.1 -> To Version 11.0.0-beta.25562.6 --- eng/Version.Details.xml | 4 ++-- global.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 5d86dc7d1..15a01d10b 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -1,9 +1,9 @@ - + https://github.com/dotnet/arcade - 3a7f017be1c42a4ecc516446977cf55d429cf390 + 4bb60deca10193c27f5bdf0a6afc5878ef558455 https://github.com/dotnet/wpf diff --git a/global.json b/global.json index ebc7e2ff3..67a2feda4 100644 --- a/global.json +++ b/global.json @@ -12,7 +12,7 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25560.1", + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25562.6", "Microsoft.DotNet.Arcade.Wpf.Sdk": "6.0.0-alpha.1.21071.6", "Microsoft.NET.Sdk.WindowsDesktop": "6.0.0-preview.6.21276.7" },