Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/cecil.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTlakaelel Axayakatl Ceja <tlakaelel.ceja@microsoft.com>2022-10-06 22:25:27 +0300
committerGitHub <noreply@github.com>2022-10-06 22:25:27 +0300
commita12da51e26be8df07e43a09bc56dc4db85df6204 (patch)
tree053a40c2d667c61623addf0258f0b903ac3a6939
parent1a88006d339e21a684b28efc62fe7b587b464fc3 (diff)
Add arcade infrastructure to dotnet/cecil (#37)
Add arcade infrastructure to dotnet/cecil Adds eng/common folder from arcade Adds NuGet.config file with the dotnet eng/tools/public package sources Adds build scripts at root level to execute the eng/common/build scripts Adds eng/Versions.props and eng/Version.Details.xml files with minimal dependencies since Cecil doesn't require anything, mostly just setting up the package version Adds global.json Adds arcade artifacts to .gitignore file Add Microsoft.Dotnet.Arcade.Sdk Make Mono.Cecil a package Remove frameworks non-compatible with Arcade SDK Make the package non-shippable so it doesn't publish in Nuget Suppress license validation since Cecil has a different license than MIT one Workaround the publickey and publickey token generation from arcade Modify version to match Cecil version 0.11.4.0 Remove helix SDK since we don't use helix testing Add pipeline yaml file Add support for NetCoreAppToolCurrent equals to net7.0 and ToolsFramework netstandard 2.0 Add a variable to find the resources folder given that there is a new structure in the artifacts directory Use VS test runner to avoid running tests using xunit Add signing properties for 3rd party libraries
-rw-r--r--.gitignore5
-rw-r--r--Directory.Build.props23
-rw-r--r--Directory.Build.targets4
-rw-r--r--Mono.Cecil.Tests.props3
-rw-r--r--Mono.Cecil.csproj6
-rw-r--r--Mono.Cecil.nuspec42
-rw-r--r--NuGet.config10
-rw-r--r--Test/Mono.Cecil.Tests.csproj11
-rw-r--r--Test/Mono.Cecil.Tests/BaseTestFixture.cs10
-rw-r--r--build.cmd2
-rw-r--r--build.sh16
-rw-r--r--eng/Signing.props9
-rw-r--r--eng/Versions.props11
-rw-r--r--eng/azure-pipelines.yml138
-rw-r--r--eng/common/CIBuild.cmd2
-rw-r--r--eng/common/PSScriptAnalyzerSettings.psd111
-rw-r--r--eng/common/README.md28
-rw-r--r--eng/common/SetupNugetSources.ps1167
-rw-r--r--eng/common/SetupNugetSources.sh171
-rw-r--r--eng/common/build.ps1161
-rw-r--r--eng/common/build.sh247
-rw-r--r--eng/common/cibuild.sh16
-rw-r--r--eng/common/cross/arm/sources.list.bionic11
-rw-r--r--eng/common/cross/arm/sources.list.focal11
-rw-r--r--eng/common/cross/arm/sources.list.jammy11
-rw-r--r--eng/common/cross/arm/sources.list.jessie3
-rw-r--r--eng/common/cross/arm/sources.list.xenial11
-rw-r--r--eng/common/cross/arm/sources.list.zesty11
-rw-r--r--eng/common/cross/arm/tizen-build-rootfs.sh35
-rw-r--r--eng/common/cross/arm/tizen-fetch.sh170
-rw-r--r--eng/common/cross/arm/tizen/tizen.patch9
-rw-r--r--eng/common/cross/arm64/sources.list.bionic11
-rw-r--r--eng/common/cross/arm64/sources.list.buster11
-rw-r--r--eng/common/cross/arm64/sources.list.focal11
-rw-r--r--eng/common/cross/arm64/sources.list.jammy11
-rw-r--r--eng/common/cross/arm64/sources.list.stretch12
-rw-r--r--eng/common/cross/arm64/sources.list.xenial11
-rw-r--r--eng/common/cross/arm64/sources.list.zesty11
-rw-r--r--eng/common/cross/arm64/tizen-build-rootfs.sh35
-rw-r--r--eng/common/cross/arm64/tizen-fetch.sh170
-rw-r--r--eng/common/cross/arm64/tizen/tizen.patch9
-rw-r--r--eng/common/cross/armel/armel.jessie.patch43
-rw-r--r--eng/common/cross/armel/sources.list.jessie3
-rw-r--r--eng/common/cross/armel/tizen-build-rootfs.sh35
-rw-r--r--eng/common/cross/armel/tizen-fetch.sh170
-rw-r--r--eng/common/cross/armel/tizen/tizen-dotnet.ks50
-rw-r--r--eng/common/cross/armel/tizen/tizen.patch9
-rw-r--r--eng/common/cross/armv6/sources.list.buster2
-rw-r--r--eng/common/cross/build-android-rootfs.sh131
-rw-r--r--eng/common/cross/build-rootfs.sh508
-rw-r--r--eng/common/cross/ppc64le/sources.list.bionic11
-rw-r--r--eng/common/cross/riscv64/sources.list.sid1
-rw-r--r--eng/common/cross/s390x/sources.list.bionic11
-rw-r--r--eng/common/cross/toolchain.cmake336
-rw-r--r--eng/common/cross/x86/sources.list.bionic11
-rw-r--r--eng/common/cross/x86/sources.list.focal11
-rw-r--r--eng/common/cross/x86/sources.list.jammy11
-rw-r--r--eng/common/cross/x86/sources.list.xenial11
-rw-r--r--eng/common/cross/x86/tizen-build-rootfs.sh35
-rw-r--r--eng/common/cross/x86/tizen-fetch.sh170
-rw-r--r--eng/common/cross/x86/tizen/tizen.patch9
-rw-r--r--eng/common/darc-init.ps147
-rw-r--r--eng/common/darc-init.sh82
-rw-r--r--eng/common/dotnet-install.cmd2
-rw-r--r--eng/common/dotnet-install.ps128
-rw-r--r--eng/common/dotnet-install.sh87
-rw-r--r--eng/common/enable-cross-org-publishing.ps113
-rw-r--r--eng/common/generate-locproject.ps1142
-rw-r--r--eng/common/generate-sbom-prep.ps121
-rw-r--r--eng/common/generate-sbom-prep.sh34
-rw-r--r--eng/common/helixpublish.proj26
-rw-r--r--eng/common/init-tools-native.cmd3
-rw-r--r--eng/common/init-tools-native.ps1205
-rw-r--r--eng/common/init-tools-native.sh238
-rw-r--r--eng/common/internal-feed-operations.ps1132
-rw-r--r--eng/common/internal-feed-operations.sh141
-rw-r--r--eng/common/internal/Directory.Build.props4
-rw-r--r--eng/common/internal/NuGet.config7
-rw-r--r--eng/common/internal/Tools.csproj30
-rw-r--r--eng/common/msbuild.ps128
-rw-r--r--eng/common/msbuild.sh58
-rw-r--r--eng/common/native/CommonLibrary.psm1400
-rw-r--r--eng/common/native/common-library.sh172
-rw-r--r--eng/common/native/init-compiler.sh144
-rw-r--r--eng/common/native/install-cmake-test.sh117
-rw-r--r--eng/common/native/install-cmake.sh117
-rw-r--r--eng/common/native/install-tool.ps1132
-rw-r--r--eng/common/pipeline-logging-functions.ps1260
-rw-r--r--eng/common/pipeline-logging-functions.sh206
-rw-r--r--eng/common/post-build/add-build-to-channel.ps148
-rw-r--r--eng/common/post-build/check-channel-consistency.ps140
-rw-r--r--eng/common/post-build/nuget-validation.ps124
-rw-r--r--eng/common/post-build/post-build-utils.ps191
-rw-r--r--eng/common/post-build/publish-using-darc.ps154
-rw-r--r--eng/common/post-build/sourcelink-validation.ps1319
-rw-r--r--eng/common/post-build/symbols-validation.ps1339
-rw-r--r--eng/common/post-build/trigger-subscriptions.ps164
-rw-r--r--eng/common/retain-build.ps145
-rw-r--r--eng/common/sdk-task.ps197
-rw-r--r--eng/common/sdl/NuGet.config18
-rw-r--r--eng/common/sdl/configure-sdl-tool.ps1116
-rw-r--r--eng/common/sdl/execute-all-sdl-tools.ps1165
-rw-r--r--eng/common/sdl/extract-artifact-archives.ps163
-rw-r--r--eng/common/sdl/extract-artifact-packages.ps180
-rw-r--r--eng/common/sdl/init-sdl.ps155
-rw-r--r--eng/common/sdl/packages.config4
-rw-r--r--eng/common/sdl/run-sdl.ps149
-rw-r--r--eng/common/sdl/sdl.ps138
-rw-r--r--eng/common/templates/job/execute-sdl.yml133
-rw-r--r--eng/common/templates/job/job.yml226
-rw-r--r--eng/common/templates/job/onelocbuild.yml106
-rw-r--r--eng/common/templates/job/publish-build-assets.yml147
-rw-r--r--eng/common/templates/job/source-build.yml63
-rw-r--r--eng/common/templates/job/source-index-stage1.yml66
-rw-r--r--eng/common/templates/jobs/codeql-build.yml31
-rw-r--r--eng/common/templates/jobs/jobs.yml106
-rw-r--r--eng/common/templates/jobs/source-build.yml46
-rw-r--r--eng/common/templates/post-build/common-variables.yml26
-rw-r--r--eng/common/templates/post-build/post-build.yml285
-rw-r--r--eng/common/templates/post-build/setup-maestro-vars.yml70
-rw-r--r--eng/common/templates/post-build/trigger-subscription.yml13
-rw-r--r--eng/common/templates/steps/add-build-to-channel.yml13
-rw-r--r--eng/common/templates/steps/build-reason.yml12
-rw-r--r--eng/common/templates/steps/execute-codeql.yml32
-rw-r--r--eng/common/templates/steps/execute-sdl.yml88
-rw-r--r--eng/common/templates/steps/generate-sbom.yml44
-rw-r--r--eng/common/templates/steps/publish-logs.yml23
-rw-r--r--eng/common/templates/steps/retain-build.yml28
-rw-r--r--eng/common/templates/steps/run-on-unix.yml7
-rw-r--r--eng/common/templates/steps/run-on-windows.yml7
-rw-r--r--eng/common/templates/steps/run-script-ifequalelse.yml33
-rw-r--r--eng/common/templates/steps/send-to-helix.yml91
-rw-r--r--eng/common/templates/steps/source-build.yml102
-rw-r--r--eng/common/templates/steps/telemetry-end.yml102
-rw-r--r--eng/common/templates/steps/telemetry-start.yml241
-rw-r--r--eng/common/templates/variables/sdl-variables.yml7
-rw-r--r--eng/common/tools.ps1919
-rw-r--r--eng/common/tools.sh550
-rw-r--r--global.json8
-rw-r--r--rocks/Mono.Cecil.Rocks.csproj4
-rw-r--r--rocks/Test/Mono.Cecil.Rocks.Tests.csproj9
-rw-r--r--symbols/mdb/Mono.Cecil.Mdb.csproj4
-rw-r--r--symbols/mdb/Test/Mono.Cecil.Mdb.Tests.csproj7
-rw-r--r--symbols/pdb/Microsoft.Cci.Pdb/IntHashTable.cs16
-rw-r--r--symbols/pdb/Mono.Cecil.Pdb.csproj4
-rw-r--r--symbols/pdb/Mono.Cecil.Pdb/SymWriter.cs2
-rw-r--r--symbols/pdb/Test/Mono.Cecil.Pdb.Tests.csproj7
147 files changed, 11411 insertions, 77 deletions
diff --git a/.gitignore b/.gitignore
index 956178e..e31eccc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,3 +16,8 @@ project.lock.json
.idea/
packages/*
nuget.exe
+
+# arcade artifacts
+.dotnet
+.packages
+artifacts
diff --git a/Directory.Build.props b/Directory.Build.props
index 06f38c4..1175095 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -1,13 +1,21 @@
<Project>
+ <Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
<PropertyGroup>
+ <!-- The TFM for the product -->
+ <ToolsFramework>netstandard2.0</ToolsFramework>
+ <!-- The TFM for the test projects -->
+ <NetCoreAppTestsCurrent>net7.0</NetCoreAppTestsCurrent>
<EnableDefaultItems>false</EnableDefaultItems>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<IsPackable>false</IsPackable>
+ <IsShipping>false</IsShipping>
+ <SuppressLicenseValidation>true</SuppressLicenseValidation>
<Configurations>Debug;Release</Configurations>
<PublicSign Condition=" '$(OS)' != 'Windows_NT' ">true</PublicSign>
<SignAssembly>true</SignAssembly>
+ <PackageLicenseExpression>MIT</PackageLicenseExpression>
<AssemblyOriginatorKeyFile>$(MSBuildThisFileDirectory)\cecil.snk</AssemblyOriginatorKeyFile>
- <DefineConstants Condition=" '$(TargetFramework)' == 'netstandard2.0' OR '$(TargetFramework)' == 'netcoreapp3.1' ">$(DefineConstants);NET_CORE</DefineConstants>
+ <DefineConstants>$(DefineConstants);NET_CORE</DefineConstants>
<RootNamespace></RootNamespace>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net40' ">
@@ -27,4 +35,17 @@
<CecilOverrides Condition="'$(CecilOverrides)' == ''">$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildProjectDirectory), Mono.Cecil.overrides))\Mono.Cecil.overrides</CecilOverrides>
</PropertyGroup>
<Import Project="$(CecilOverrides)" Condition="Exists('$(CecilOverrides)')" />
+ <PropertyGroup>
+ <!-- Work around
+ https://github.com/dotnet/arcade/issues/2321. This disables
+ arcade's selection of AssemblyOriginatorKeyFile based on the
+ StrongNameKeyId, since we want to use cecil's key file. -->
+ <StrongNameKeyId>CecilStrongNameKey</StrongNameKeyId>
+ <!-- PublicKey and PublicKeyToken are only consumed by arcade's
+ InternalsVisibleTo generator. We don't use this, but we set
+ these properties anyway to avoid hitting an arcade check. -->
+ <PublicKey>00240000048000009400000006020000002400005253413100040000010001002b5c9f7f04346c324a3176f8d3ee823bbf2d60efdbc35f86fd9e65ea3e6cd11bcdcba3a353e55133c8ac5c4caaba581b2c6dfff2cc2d0edc43959ddb86b973300a479a82419ef489c3225f1fe429a708507bd515835160e10bc743d20ca33ab9570cfd68d479fcf0bc797a763bec5d1000f0159ef619e709d915975e87beebaf</PublicKey>
+ <PublicKeyToken>50cebf1cceb9d05e</PublicKeyToken>
+ <NoWarn>$(NoWarn);0618</NoWarn>
+ </PropertyGroup>
</Project>
diff --git a/Directory.Build.targets b/Directory.Build.targets
new file mode 100644
index 0000000..177d64c
--- /dev/null
+++ b/Directory.Build.targets
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project>
+ <Import Project="Sdk.targets" Sdk="Microsoft.DotNet.Arcade.Sdk" />
+</Project>
diff --git a/Mono.Cecil.Tests.props b/Mono.Cecil.Tests.props
index bd6df1f..35cbbfc 100644
--- a/Mono.Cecil.Tests.props
+++ b/Mono.Cecil.Tests.props
@@ -6,9 +6,6 @@
<PackageReference Include="NUnit">
<Version>3.11.0</Version>
</PackageReference>
- <PackageReference Include="Microsoft.NET.Test.Sdk">
- <Version>15.9.0</Version>
- </PackageReference>
<PackageReference Include="NUnit3TestAdapter">
<Version>3.12.0</Version>
</PackageReference>
diff --git a/Mono.Cecil.csproj b/Mono.Cecil.csproj
index d374551..b1c5a14 100644
--- a/Mono.Cecil.csproj
+++ b/Mono.Cecil.csproj
@@ -1,6 +1,8 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
- <TargetFrameworks>netstandard2.0;net40</TargetFrameworks>
+ <TargetFramework>$(ToolsFramework)</TargetFramework>
+ <PackageId>Mono.Cecil</PackageId>
+ <IsPackable>true</IsPackable>
</PropertyGroup>
<ItemGroup>
<Compile Include="ProjectInfo.cs" />
diff --git a/Mono.Cecil.nuspec b/Mono.Cecil.nuspec
deleted file mode 100644
index 626c5ac..0000000
--- a/Mono.Cecil.nuspec
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0"?>
-<package xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <metadata xmlns="http://schemas.microsoft.com/packaging/2010/07/nuspec.xsd">
- <id>Mono.Cecil</id>
- <version>0.11.4.0</version>
- <title>Mono.Cecil</title>
- <authors>Jb Evain</authors>
- <owners>Jb Evain</owners>
- <license type="expression">MIT</license>
- <requireLicenseAcceptance>false</requireLicenseAcceptance>
- <projectUrl>http://github.com/jbevain/cecil/</projectUrl>
- <summary>Cecil is a library written by Jb Evain to generate and inspect programs and libraries in the ECMA CIL format.</summary>
- <description>Cecil is a library written by Jb Evain to generate and inspect programs and libraries in the ECMA CIL format. It has full support for generics, and support some debugging symbol format. In simple English, with Cecil, you can load existing managed assemblies, browse all the contained types, modify them on the fly and save back to the disk the modified assembly.</description>
- <language>en-US</language>
- <tags>assembly assemblies module modules il cil msil bytecode reflection injection cecil mono aop</tags>
- <dependencies>
- <group targetFramework=".NETFramework4.0" />
- <group targetFramework=".NETStandard2.0" />
- </dependencies>
- </metadata>
- <files>
- <file src="bin\Release\net40\Mono.Cecil.dll" target="lib/net40" />
- <file src="bin\Release\net40\Mono.Cecil.pdb" target="lib/net40" />
- <file src="bin\Release\netstandard2.0\Mono.Cecil.dll" target="lib/netstandard2.0" />
- <file src="bin\Release\netstandard2.0\Mono.Cecil.pdb" target="lib/netstandard2.0" />
-
- <file src="rocks\bin\Release\net40\Mono.Cecil.Rocks.dll" target="lib/net40" />
- <file src="rocks\bin\Release\net40\Mono.Cecil.Rocks.pdb" target="lib/net40" />
- <file src="rocks\bin\Release\netstandard2.0\Mono.Cecil.Rocks.dll" target="lib/netstandard2.0" />
- <file src="rocks\bin\Release\netstandard2.0\Mono.Cecil.Rocks.pdb" target="lib/netstandard2.0" />
-
- <file src="symbols\mdb\bin\Release\net40\Mono.Cecil.Mdb.dll" target="lib/net40" />
- <file src="symbols\mdb\bin\Release\net40\Mono.Cecil.Mdb.pdb" target="lib/net40" />
- <file src="symbols\mdb\bin\Release\netstandard2.0\Mono.Cecil.Mdb.dll" target="lib/netstandard2.0" />
- <file src="symbols\mdb\bin\Release\netstandard2.0\Mono.Cecil.Mdb.pdb" target="lib/netstandard2.0" />
-
- <file src="symbols\pdb\bin\Release\net40\Mono.Cecil.Pdb.dll" target="lib/net40" />
- <file src="symbols\pdb\bin\Release\net40\Mono.Cecil.Pdb.pdb" target="lib/net40" />
- <file src="symbols\pdb\bin\Release\netstandard2.0\Mono.Cecil.Pdb.dll" target="lib/netstandard2.0" />
- <file src="symbols\pdb\bin\Release\netstandard2.0\Mono.Cecil.Pdb.pdb" target="lib/netstandard2.0" />
- </files>
-</package>
diff --git a/NuGet.config b/NuGet.config
new file mode 100644
index 0000000..8c51fcf
--- /dev/null
+++ b/NuGet.config
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<configuration>
+ <packageSources>
+ <clear />
+ <add key="dotnet-eng" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json" />
+ <add key="dotnet-tools" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json" />
+ <add key="dotnet-public" value="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" />
+ </packageSources>
+ <disabledPackageSources />
+</configuration> \ No newline at end of file
diff --git a/Test/Mono.Cecil.Tests.csproj b/Test/Mono.Cecil.Tests.csproj
index f7762e3..2383bc2 100644
--- a/Test/Mono.Cecil.Tests.csproj
+++ b/Test/Mono.Cecil.Tests.csproj
@@ -1,9 +1,9 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\Mono.Cecil.Tests.props" />
<PropertyGroup>
- <TargetFrameworks>netcoreapp3.1;net40</TargetFrameworks>
+ <TargetFramework>$(NetCoreAppTestsCurrent)</TargetFramework>
</PropertyGroup>
- <ItemGroup Condition=" '$(TargetFramework)' == 'netcoreapp3.1' ">
+ <ItemGroup Condition=" '$(TargetFramework)' == '$(NetCoreAppTestsCurrent)' ">
<PackageReference Include="Microsoft.CodeAnalysis.CSharp">
<Version>2.10.0</Version>
</PackageReference>
@@ -26,4 +26,9 @@
<ItemGroup>
<None Include="Resources\**\*" />
</ItemGroup>
+ <ItemGroup>
+ <RuntimeHostConfigurationOption Include="Mono.Cecil.ResourcesDirectory">
+ <Value>$(MSBuildThisFileDirectory)Resources</Value>
+ </RuntimeHostConfigurationOption>
+ </ItemGroup>
</Project>
diff --git a/Test/Mono.Cecil.Tests/BaseTestFixture.cs b/Test/Mono.Cecil.Tests/BaseTestFixture.cs
index 046791b..6504249 100644
--- a/Test/Mono.Cecil.Tests/BaseTestFixture.cs
+++ b/Test/Mono.Cecil.Tests/BaseTestFixture.cs
@@ -99,14 +99,8 @@ namespace Mono.Cecil.Tests {
public static string FindResourcesDirectory (string sourceFilePath)
{
- var path = Path.GetDirectoryName (sourceFilePath);
- while (!Directory.Exists (Path.Combine (path, "Resources"))) {
- var old = path;
- path = Path.GetDirectoryName (path);
- Assert.AreNotEqual (old, path);
- }
-
- return Path.Combine (path, "Resources");
+ var resourcesDirectory = (string)AppContext.GetData ("Mono.Cecil.ResourcesDirectory")!;
+ return Path.GetFullPath (resourcesDirectory);
}
public static void AssertCode (string expected, MethodDefinition method)
diff --git a/build.cmd b/build.cmd
new file mode 100644
index 0000000..675fdf8
--- /dev/null
+++ b/build.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0eng\common\Build.ps1""" -restore -build %*"
diff --git a/build.sh b/build.sh
new file mode 100644
index 0000000..8477d5a
--- /dev/null
+++ b/build.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+"$scriptroot/eng/common/build.sh" --build --restore $@
diff --git a/eng/Signing.props b/eng/Signing.props
new file mode 100644
index 0000000..8d2bc3a
--- /dev/null
+++ b/eng/Signing.props
@@ -0,0 +1,9 @@
+<Project>
+ <ItemGroup>
+ <!-- These libraries are 3rd-party based on the copyright info. -->
+ <FileSignInfo Include="Mono.Cecil.dll" CertificateName="3PartySHA2" />
+ <FileSignInfo Include="Mono.Cecil.Rocks.dll" CertificateName="3PartySHA2" />
+ <FileSignInfo Include="Mono.Cecil.Mdb.dll" CertificateName="3PartySHA2" />
+ <FileSignInfo Include="Mono.Cecil.Pdb.dll" CertificateName="3PartySHA2" />
+ </ItemGroup>
+</Project>
diff --git a/eng/Versions.props b/eng/Versions.props
new file mode 100644
index 0000000..20d0206
--- /dev/null
+++ b/eng/Versions.props
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project>
+ <PropertyGroup>
+ <!-- This repo version -->
+ <!-- We need to modify the version here manually everytime the version changes from the upstream branch jbevain/cecil -->
+ <VersionPrefix>0.11.4.0</VersionPrefix>
+ <PreReleaseVersionLabel>alpha</PreReleaseVersionLabel>
+ <!-- Use VSTestRunner instead of allowing Arcade to use xunit, since Cecil tests are nunit based -->
+ <UseVSTestRunner>true</UseVSTestRunner>
+ </PropertyGroup>
+</Project>
diff --git a/eng/azure-pipelines.yml b/eng/azure-pipelines.yml
new file mode 100644
index 0000000..e826b9f
--- /dev/null
+++ b/eng/azure-pipelines.yml
@@ -0,0 +1,138 @@
+trigger:
+ batch: true
+ branches:
+ include:
+ - main
+
+pr:
+ branches:
+ include:
+ - main
+
+variables:
+- name: officialBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}
+- name: _BuildConfig
+ value: Release
+- name: _BuildArgs
+ value:
+- name: _DotNetArtifactsCategory
+ value: .NETCore
+- ${{ if eq(variables.officialBuild, 'true') }}:
+ - name: _BuildArgs
+ value: ${{ format('{0} /p:OfficialBuildId=$(Build.BuildNumber) /p:Test=false /p:IntegrationTest=false', variables['_BuildArgs']) }}
+ # Provide HelixApiAccessToken for telemetry
+ - group: DotNet-HelixApi-Access
+
+stages:
+- stage: build
+ displayName: Build
+ jobs:
+ - template: /eng/common/templates/jobs/jobs.yml
+ parameters:
+
+ enableTelemetry: true # send helix telemetry
+ enablePublishUsingPipelines: true
+ enablePublishBuildArtifacts: true # publish build logs to pipeline storage
+ ${{ if eq(variables.officialBuild, 'false') }}:
+ enablePublishTestResults: true
+ testResultsFormat: vstest
+ enablePublishBuildAssets: true # generate build manifests and publish to BAR in internal builds
+ enableMicrobuild: true # only affects internal builds
+ enableSourceBuild: true
+
+ jobs:
+
+ - job: Windows_NT
+ pool:
+ ${{ if eq(variables.officialBuild, 'false') }}:
+ vmImage: windows-latest
+ ${{ if eq(variables.officialBuild, 'true') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals 1es-windows-2019
+ variables:
+ - ${{ if eq(variables.officialBuild, 'false') }}:
+ - _SignType: test
+ - _PublishArgs: ''
+ - ${{ if eq(variables.officialBuild, 'true') }}:
+ - group: DotNet-Blob-Feed
+ - _TeamName: .NET # required by microbuild install step
+ - _SignType: real # used in the arcade templates that install microbuild.
+ - _DotNetPublishToBlobFeed: true # used by arcade templates that gather build asset manifests
+ - _PublishArgs: /p:DotNetPublishToBlobFeed=$(_DotNetPublishToBlobFeed)
+ /p:DotNetPublishBlobFeedUrl=https://dotnetfeed.blob.core.windows.net/dotnet-core/index.json
+ /p:DotNetPublishBlobFeedKey=$(dotnetfeed-storage-access-key-1)
+ /p:DotNetArtifactsCategory=$(_DotNetArtifactsCategory)
+ /p:DotNetPublishUsingPipelines=true
+ - DotNetSignType: ${{ format('{0}', variables._SignType) }} # DotNetSignType defaults to real if not specified
+ steps:
+ - checkout: self
+ submodules: false
+ - script: eng\common\cibuild.cmd -projects $(Build.SourcesDirectory)\Mono.Cecil.sln
+ -configuration $(_BuildConfig) $(_BuildArgs) $(_PublishArgs)
+ -integrationTest
+ -nodeReuse "$false" # https://github.com/Microsoft/vstest/issues/1503
+ env:
+ # https://github.com/Microsoft/vstest/issues/1503#issuecomment-410732193
+ MSBUILDENSURESTDOUTFORTASKPROCESSES: 1
+ ${{ if eq(variables.officialBuild, 'false') }}:
+ displayName: Build Mono.Cecil.sln $(_BuildConfig)
+ ${{ if eq(variables.officialBuild, 'true') }}:
+ displayName: Build and publish Mono.Cecil.sln $(_BuildConfig)
+
+ - job: SourceBuild_Managed
+ displayName: Source-Build (Managed)
+ pool:
+ ${{ if eq(variables.officialBuild, 'false' )}}:
+ vmImage: ubuntu-latest
+ ${{ if eq(variables.officialBuild, 'true' )}}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-7-3e800f1-20190501005343'
+ workspace:
+ clean: all
+ steps:
+ - checkout: self
+ submodules: false
+ - template: /eng/common/templates/steps/source-build.yml
+ parameters:
+ enablePublishTestResults: true
+
+ - ${{ if eq(variables.officialBuild, 'false') }}:
+ - job: Linux
+ condition: eq(variables.officialBuild, 'false')
+ pool:
+ vmImage: ubuntu-latest
+ steps:
+ - checkout: self
+ submodules: false
+ - script: eng/common/cibuild.sh --projects $(Build.SourcesDirectory)/Mono.Cecil.sln
+ --configuration $(_BuildConfig) $(_BuildArgs)
+ --integrationTest
+ --nodeReuse false # https://github.com/Microsoft/vstest/issues/1503
+ env:
+ # https://github.com/Microsoft/vstest/issues/1503#issuecomment-410732193
+ MSBUILDENSURESTDOUTFORTASKPROCESSES: 1
+ displayName: Build Mono.Cecil.sln $(_BuildConfig)
+
+ - ${{ if eq(variables.officialBuild, 'false') }}:
+ - job: macOS
+ pool:
+ vmImage: macOS-latest
+ steps:
+ - checkout: self
+ submodules: false
+ - script: eng/common/cibuild.sh --projects $(Build.SourcesDirectory)/Mono.Cecil.sln
+ --configuration $(_BuildConfig) $(_BuildArgs)
+ --integrationTest
+ --nodeReuse false # https://github.com/Microsoft/vstest/issues/1503
+ env:
+ # https://github.com/Microsoft/vstest/issues/1503#issuecomment-410732193
+ MSBUILDENSURESTDOUTFORTASKPROCESSES: 1
+ displayName: Build Mono.Cecil.sln $(_BuildConfig)
+
+# Post-Build Arcade logic
+- ${{ if eq(variables.officialBuild, 'true') }}:
+ - template: /eng/common/templates/post-build/post-build.yml
+ parameters:
+ publishingInfraVersion: 3
diff --git a/eng/common/CIBuild.cmd b/eng/common/CIBuild.cmd
new file mode 100644
index 0000000..56c2f25
--- /dev/null
+++ b/eng/common/CIBuild.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0Build.ps1""" -restore -build -test -sign -pack -publish -ci %*" \ No newline at end of file
diff --git a/eng/common/PSScriptAnalyzerSettings.psd1 b/eng/common/PSScriptAnalyzerSettings.psd1
new file mode 100644
index 0000000..4c1ea7c
--- /dev/null
+++ b/eng/common/PSScriptAnalyzerSettings.psd1
@@ -0,0 +1,11 @@
+@{
+ IncludeRules=@('PSAvoidUsingCmdletAliases',
+ 'PSAvoidUsingWMICmdlet',
+ 'PSAvoidUsingPositionalParameters',
+ 'PSAvoidUsingInvokeExpression',
+ 'PSUseDeclaredVarsMoreThanAssignments',
+ 'PSUseCmdletCorrectly',
+ 'PSStandardDSCFunctionsInResource',
+ 'PSUseIdenticalMandatoryParametersForDSC',
+ 'PSUseIdenticalParametersForDSC')
+} \ No newline at end of file
diff --git a/eng/common/README.md b/eng/common/README.md
new file mode 100644
index 0000000..ff49c37
--- /dev/null
+++ b/eng/common/README.md
@@ -0,0 +1,28 @@
+# Don't touch this folder
+
+ uuuuuuuuuuuuuuuuuuuu
+ u" uuuuuuuuuuuuuuuuuu "u
+ u" u$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ $ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
+ $ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
+ $ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
+ $ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
+ $ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
+ $ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
+ "u "$$$$$$$$$$$$$$$$$$$$" u"
+ "u """""""""""""""""" u"
+ """"""""""""""""""""
+
+!!! Changes made in this directory are subject to being overwritten by automation !!!
+
+The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
new file mode 100644
index 0000000..6e99723
--- /dev/null
+++ b/eng/common/SetupNugetSources.ps1
@@ -0,0 +1,167 @@
+# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
+# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+#
+# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
+# under <packageSourceCredentials> for each Maestro managed private feed. Two additional credential
+# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+#
+# This script needs to be called in every job that will restore packages and which the base repo has
+# private AzDO feeds in the NuGet.config.
+#
+# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
+# from the AzureDevOps-Artifact-Feeds-Pats variable group.
+#
+# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing
+#
+# - task: PowerShell@2
+# displayName: Setup Private Feeds Credentials
+# condition: eq(variables['Agent.OS'], 'Windows_NT')
+# inputs:
+# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.ps1
+# arguments: -ConfigFile $(Build.SourcesDirectory)/NuGet.config -Password $Env:Token
+# env:
+# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+
+[CmdletBinding()]
+param (
+ [Parameter(Mandatory = $true)][string]$ConfigFile,
+ [Parameter(Mandatory = $true)][string]$Password
+)
+
+$ErrorActionPreference = "Stop"
+Set-StrictMode -Version 2.0
+[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
+
+. $PSScriptRoot\tools.ps1
+
+# Add source entry to PackageSources
+function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) {
+ $packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
+
+ if ($packageSource -eq $null)
+ {
+ $packageSource = $doc.CreateElement("add")
+ $packageSource.SetAttribute("key", $SourceName)
+ $packageSource.SetAttribute("value", $SourceEndPoint)
+ $sources.AppendChild($packageSource) | Out-Null
+ }
+ else {
+ Write-Host "Package source $SourceName already present."
+ }
+
+ AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password
+}
+
+# Add a credential node for the specified source
+function AddCredential($creds, $source, $username, $password) {
+ # Looks for credential configuration for the given SourceName. Create it if none is found.
+ $sourceElement = $creds.SelectSingleNode($Source)
+ if ($sourceElement -eq $null)
+ {
+ $sourceElement = $doc.CreateElement($Source)
+ $creds.AppendChild($sourceElement) | Out-Null
+ }
+
+ # Add the <Username> node to the credential if none is found.
+ $usernameElement = $sourceElement.SelectSingleNode("add[@key='Username']")
+ if ($usernameElement -eq $null)
+ {
+ $usernameElement = $doc.CreateElement("add")
+ $usernameElement.SetAttribute("key", "Username")
+ $sourceElement.AppendChild($usernameElement) | Out-Null
+ }
+ $usernameElement.SetAttribute("value", $Username)
+
+ # Add the <ClearTextPassword> to the credential if none is found.
+ # Add it as a clear text because there is no support for encrypted ones in non-windows .Net SDKs.
+ # -> https://github.com/NuGet/Home/issues/5526
+ $passwordElement = $sourceElement.SelectSingleNode("add[@key='ClearTextPassword']")
+ if ($passwordElement -eq $null)
+ {
+ $passwordElement = $doc.CreateElement("add")
+ $passwordElement.SetAttribute("key", "ClearTextPassword")
+ $sourceElement.AppendChild($passwordElement) | Out-Null
+ }
+ $passwordElement.SetAttribute("value", $Password)
+}
+
+function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) {
+ $maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
+
+ Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
+
+ ForEach ($PackageSource in $maestroPrivateSources) {
+ Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
+ AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password
+ }
+}
+
+function EnablePrivatePackageSources($DisabledPackageSources) {
+ $maestroPrivateSources = $DisabledPackageSources.SelectNodes("add[contains(@key,'darc-int')]")
+ ForEach ($DisabledPackageSource in $maestroPrivateSources) {
+ Write-Host "`tEnsuring private source '$($DisabledPackageSource.key)' is enabled by deleting it from disabledPackageSource"
+ # Due to https://github.com/NuGet/Home/issues/10291, we must actually remove the disabled entries
+ $DisabledPackageSources.RemoveChild($DisabledPackageSource)
+ }
+}
+
+if (!(Test-Path $ConfigFile -PathType Leaf)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
+ ExitWithExitCode 1
+}
+
+if (!$Password) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Eng/common/SetupNugetSources.ps1 returned a non-zero exit code. Please supply a valid PAT'
+ ExitWithExitCode 1
+}
+
+# Load NuGet.config
+$doc = New-Object System.Xml.XmlDocument
+$filename = (Get-Item $ConfigFile).FullName
+$doc.Load($filename)
+
+# Get reference to <PackageSources> or create one if none exist already
+$sources = $doc.DocumentElement.SelectSingleNode("packageSources")
+if ($sources -eq $null) {
+ $sources = $doc.CreateElement("packageSources")
+ $doc.DocumentElement.AppendChild($sources) | Out-Null
+}
+
+# Looks for a <PackageSourceCredentials> node. Create it if none is found.
+$creds = $doc.DocumentElement.SelectSingleNode("packageSourceCredentials")
+if ($creds -eq $null) {
+ $creds = $doc.CreateElement("packageSourceCredentials")
+ $doc.DocumentElement.AppendChild($creds) | Out-Null
+}
+
+# Check for disabledPackageSources; we'll enable any darc-int ones we find there
+$disabledSources = $doc.DocumentElement.SelectSingleNode("disabledPackageSources")
+if ($disabledSources -ne $null) {
+ Write-Host "Checking for any darc-int disabled package sources in the disabledPackageSources node"
+ EnablePrivatePackageSources -DisabledPackageSources $disabledSources
+}
+
+$userName = "dn-bot"
+
+# Insert credential nodes for Maestro's private feeds
+InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
+
+# 3.1 uses a different feed url format so it's handled differently here
+$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
+if ($dotnet31Source -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+}
+
+$dotnetVersions = @('5','6','7')
+
+foreach ($dotnetVersion in $dotnetVersions) {
+ $feedPrefix = "dotnet" + $dotnetVersion;
+ $dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
+ if ($dotnetSource -ne $null) {
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ }
+}
+
+$doc.Save($filename)
diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh
new file mode 100644
index 0000000..8af7d89
--- /dev/null
+++ b/eng/common/SetupNugetSources.sh
@@ -0,0 +1,171 @@
+#!/usr/bin/env bash
+
+# This file is a temporary workaround for internal builds to be able to restore from private AzDO feeds.
+# This file should be removed as part of this issue: https://github.com/dotnet/arcade/issues/4080
+#
+# What the script does is iterate over all package sources in the pointed NuGet.config and add a credential entry
+# under <packageSourceCredentials> for each Maestro's managed private feed. Two additional credential
+# entries are also added for the two private static internal feeds: dotnet3-internal and dotnet3-internal-transport.
+#
+# This script needs to be called in every job that will restore packages and which the base repo has
+# private AzDO feeds in the NuGet.config.
+#
+# See example YAML call for this script below. Note the use of the variable `$(dn-bot-dnceng-artifact-feeds-rw)`
+# from the AzureDevOps-Artifact-Feeds-Pats variable group.
+#
+# Any disabledPackageSources entries which start with "darc-int" will be re-enabled as part of this script executing.
+#
+# - task: Bash@3
+# displayName: Setup Private Feeds Credentials
+# inputs:
+# filePath: $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+# arguments: $(Build.SourcesDirectory)/NuGet.config $Token
+# condition: ne(variables['Agent.OS'], 'Windows_NT')
+# env:
+# Token: $(dn-bot-dnceng-artifact-feeds-rw)
+
+ConfigFile=$1
+CredToken=$2
+NL='\n'
+TB=' '
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ ! -f "$ConfigFile" ]; then
+ Write-PipelineTelemetryError -Category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Couldn't find the NuGet config file: $ConfigFile"
+ ExitWithExitCode 1
+fi
+
+if [ -z "$CredToken" ]; then
+ Write-PipelineTelemetryError -category 'Build' "Error: Eng/common/SetupNugetSources.sh returned a non-zero exit code. Please supply a valid PAT"
+ ExitWithExitCode 1
+fi
+
+if [[ `uname -s` == "Darwin" ]]; then
+ NL=$'\\\n'
+ TB=''
+fi
+
+# Ensure there is a <packageSources>...</packageSources> section.
+grep -i "<packageSources>" $ConfigFile
+if [ "$?" != "0" ]; then
+ echo "Adding <packageSources>...</packageSources> section."
+ ConfigNodeHeader="<configuration>"
+ PackageSourcesTemplate="${TB}<packageSources>${NL}${TB}</packageSources>"
+
+ sed -i.bak "s|$ConfigNodeHeader|$ConfigNodeHeader${NL}$PackageSourcesTemplate|" $ConfigFile
+fi
+
+# Ensure there is a <packageSourceCredentials>...</packageSourceCredentials> section.
+grep -i "<packageSourceCredentials>" $ConfigFile
+if [ "$?" != "0" ]; then
+ echo "Adding <packageSourceCredentials>...</packageSourceCredentials> section."
+
+ PackageSourcesNodeFooter="</packageSources>"
+ PackageSourceCredentialsTemplate="${TB}<packageSourceCredentials>${NL}${TB}</packageSourceCredentials>"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourcesNodeFooter${NL}$PackageSourceCredentialsTemplate|" $ConfigFile
+fi
+
+PackageSources=()
+
+# Ensure dotnet3.1-internal and dotnet3.1-internal-transport are in the packageSources if the public dotnet3.1 feeds are present
+grep -i "<add key=\"dotnet3.1\"" $ConfigFile
+if [ "$?" == "0" ]; then
+ grep -i "<add key=\"dotnet3.1-internal\"" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding dotnet3.1-internal to the packageSources."
+ PackageSourcesNodeFooter="</packageSources>"
+ PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2\" />"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet3.1-internal')
+
+ grep -i "<add key=\"dotnet3.1-internal-transport\">" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding dotnet3.1-internal-transport to the packageSources."
+ PackageSourcesNodeFooter="</packageSources>"
+ PackageSourceTemplate="${TB}<add key=\"dotnet3.1-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2\" />"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=('dotnet3.1-internal-transport')
+fi
+
+DotNetVersions=('5' '6' '7')
+
+for DotNetVersion in ${DotNetVersions[@]} ; do
+ FeedPrefix="dotnet${DotNetVersion}";
+ grep -i "<add key=\"$FeedPrefix\"" $ConfigFile
+ if [ "$?" == "0" ]; then
+ grep -i "<add key=\"$FeedPrefix-internal\"" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding $FeedPrefix-internal to the packageSources."
+ PackageSourcesNodeFooter="</packageSources>"
+ PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal/nuget/v2\" />"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=("$FeedPrefix-internal")
+
+ grep -i "<add key=\"$FeedPrefix-internal-transport\">" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding $FeedPrefix-internal-transport to the packageSources."
+ PackageSourcesNodeFooter="</packageSources>"
+ PackageSourceTemplate="${TB}<add key=\"$FeedPrefix-internal-transport\" value=\"https://pkgs.dev.azure.com/dnceng/internal/_packaging/$FeedPrefix-internal-transport/nuget/v2\" />"
+
+ sed -i.bak "s|$PackageSourcesNodeFooter|$PackageSourceTemplate${NL}$PackageSourcesNodeFooter|" $ConfigFile
+ fi
+ PackageSources+=("$FeedPrefix-internal-transport")
+ fi
+done
+
+# I want things split line by line
+PrevIFS=$IFS
+IFS=$'\n'
+PackageSources+="$IFS"
+PackageSources+=$(grep -oh '"darc-int-[^"]*"' $ConfigFile | tr -d '"')
+IFS=$PrevIFS
+
+for FeedName in ${PackageSources[@]} ; do
+ # Check if there is no existing credential for this FeedName
+ grep -i "<$FeedName>" $ConfigFile
+ if [ "$?" != "0" ]; then
+ echo "Adding credentials for $FeedName."
+
+ PackageSourceCredentialsNodeFooter="</packageSourceCredentials>"
+ NewCredential="${TB}${TB}<$FeedName>${NL}<add key=\"Username\" value=\"dn-bot\" />${NL}<add key=\"ClearTextPassword\" value=\"$CredToken\" />${NL}</$FeedName>"
+
+ sed -i.bak "s|$PackageSourceCredentialsNodeFooter|$NewCredential${NL}$PackageSourceCredentialsNodeFooter|" $ConfigFile
+ fi
+done
+
+# Re-enable any entries in disabledPackageSources where the feed name contains darc-int
+grep -i "<disabledPackageSources>" $ConfigFile
+if [ "$?" == "0" ]; then
+ DisabledDarcIntSources=()
+ echo "Re-enabling any disabled \"darc-int\" package sources in $ConfigFile"
+ DisabledDarcIntSources+=$(grep -oh '"darc-int-[^"]*" value="true"' $ConfigFile | tr -d '"')
+ for DisabledSourceName in ${DisabledDarcIntSources[@]} ; do
+ if [[ $DisabledSourceName == darc-int* ]]
+ then
+ OldDisableValue="<add key=\"$DisabledSourceName\" value=\"true\" />"
+ NewDisableValue="<!-- Reenabled for build : $DisabledSourceName -->"
+ sed -i.bak "s|$OldDisableValue|$NewDisableValue|" $ConfigFile
+ echo "Neutralized disablePackageSources entry for '$DisabledSourceName'"
+ fi
+ done
+fi
diff --git a/eng/common/build.ps1 b/eng/common/build.ps1
new file mode 100644
index 0000000..8943da2
--- /dev/null
+++ b/eng/common/build.ps1
@@ -0,0 +1,161 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string][Alias('c')]$configuration = "Debug",
+ [string]$platform = $null,
+ [string] $projects,
+ [string][Alias('v')]$verbosity = "minimal",
+ [string] $msbuildEngine = $null,
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [switch][Alias('r')]$restore,
+ [switch] $deployDeps,
+ [switch][Alias('b')]$build,
+ [switch] $rebuild,
+ [switch] $deploy,
+ [switch][Alias('t')]$test,
+ [switch] $integrationTest,
+ [switch] $performanceTest,
+ [switch] $sign,
+ [switch] $pack,
+ [switch] $publish,
+ [switch] $clean,
+ [switch][Alias('bl')]$binaryLog,
+ [switch][Alias('nobl')]$excludeCIBinarylog,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
+ [switch] $excludePrereleaseVS,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+# Unset 'Platform' environment variable to avoid unwanted collision in InstallDotNetCore.targets file
+# some computer has this env var defined (e.g. Some HP)
+if($env:Platform) {
+ $env:Platform=""
+}
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
+ Write-Host " -platform <value> Platform configuration: 'x86', 'x64' or any valid Platform value to pass to msbuild"
+ Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ Write-Host " -binaryLog Output binary log (short: -bl)"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Actions:"
+ Write-Host " -restore Restore dependencies (short: -r)"
+ Write-Host " -build Build solution (short: -b)"
+ Write-Host " -rebuild Rebuild solution"
+ Write-Host " -deploy Deploy built VSIXes"
+ Write-Host " -deployDeps Deploy dependencies (e.g. VSIXes for integration tests)"
+ Write-Host " -test Run all unit tests in the solution (short: -t)"
+ Write-Host " -integrationTest Run all integration tests in the solution"
+ Write-Host " -performanceTest Run all performance tests in the solution"
+ Write-Host " -pack Package build outputs into NuGet packages and Willow components"
+ Write-Host " -sign Sign build outputs"
+ Write-Host " -publish Publish artifacts (e.g. symbols)"
+ Write-Host " -clean Clean the solution"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -projects <value> Semi-colon delimited list of sln/proj's to build. Globbing is supported (*.sln)"
+ Write-Host " -ci Set when running on CI server"
+ Write-Host " -excludeCIBinarylog Don't output binary log (short: -nobl)"
+ Write-Host " -prepareMachine Prepare machine for CI run, clean up processes after build"
+ Write-Host " -warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
+ Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host " -excludePrereleaseVS Set to exclude build engines in prerelease versions of Visual Studio"
+ Write-Host ""
+
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+ Write-Host "The above arguments can be shortened as much as to be unambiguous (e.g. -co for configuration, -t for test, etc.)."
+}
+
+. $PSScriptRoot\tools.ps1
+
+function InitializeCustomToolset {
+ if (-not $restore) {
+ return
+ }
+
+ $script = Join-Path $EngRoot 'restore-toolset.ps1'
+
+ if (Test-Path $script) {
+ . $script
+ }
+}
+
+function Build {
+ $toolsetBuildProj = InitializeToolset
+ InitializeCustomToolset
+
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'Build.binlog') } else { '' }
+ $platformArg = if ($platform) { "/p:Platform=$platform" } else { '' }
+
+ if ($projects) {
+ # Re-assign properties to a new variable because PowerShell doesn't let us append properties directly for unclear reasons.
+ # Explicitly set the type as string[] because otherwise PowerShell would make this char[] if $properties is empty.
+ [string[]] $msbuildArgs = $properties
+
+ # Resolve relative project paths into full paths
+ $projects = ($projects.Split(';').ForEach({Resolve-Path $_}) -join ';')
+
+ $msbuildArgs += "/p:Projects=$projects"
+ $properties = $msbuildArgs
+ }
+
+ MSBuild $toolsetBuildProj `
+ $bl `
+ $platformArg `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:Restore=$restore `
+ /p:DeployDeps=$deployDeps `
+ /p:Build=$build `
+ /p:Rebuild=$rebuild `
+ /p:Deploy=$deploy `
+ /p:Test=$test `
+ /p:Pack=$pack `
+ /p:IntegrationTest=$integrationTest `
+ /p:PerformanceTest=$performanceTest `
+ /p:Sign=$sign `
+ /p:Publish=$publish `
+ @properties
+}
+
+try {
+ if ($clean) {
+ if (Test-Path $ArtifactsDir) {
+ Remove-Item -Recurse -Force $ArtifactsDir
+ Write-Host 'Artifacts directory deleted.'
+ }
+ exit 0
+ }
+
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($ci) {
+ if (-not $excludeCIBinarylog) {
+ $binaryLog = $true
+ }
+ $nodeReuse = $false
+ }
+
+ if ($restore) {
+ InitializeNativeTools
+ }
+
+ Build
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/build.sh b/eng/common/build.sh
new file mode 100644
index 0000000..50af40c
--- /dev/null
+++ b/eng/common/build.sh
@@ -0,0 +1,247 @@
+#!/usr/bin/env bash
+
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+
+# Stop script if command returns non-zero exit code.
+# Prevents hidden errors caused by missing error code propagation.
+set -e
+
+usage()
+{
+ echo "Common settings:"
+ echo " --configuration <value> Build configuration: 'Debug' or 'Release' (short: -c)"
+ echo " --verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic] (short: -v)"
+ echo " --binaryLog Create MSBuild binary log (short: -bl)"
+ echo " --help Print help and exit (short: -h)"
+ echo ""
+
+ echo "Actions:"
+ echo " --restore Restore dependencies (short: -r)"
+ echo " --build Build solution (short: -b)"
+ echo " --sourceBuild Source-build the solution (short: -sb)"
+ echo " Will additionally trigger the following actions: --restore, --build, --pack"
+ echo " If --configuration is not set explicitly, will also set it to 'Release'"
+ echo " --rebuild Rebuild solution"
+ echo " --test Run all unit tests in the solution (short: -t)"
+ echo " --integrationTest Run all integration tests in the solution"
+ echo " --performanceTest Run all performance tests in the solution"
+ echo " --pack Package build outputs into NuGet packages and Willow components"
+ echo " --sign Sign build outputs"
+ echo " --publish Publish artifacts (e.g. symbols)"
+ echo " --clean Clean the solution"
+ echo ""
+
+ echo "Advanced settings:"
+ echo " --projects <value> Project or solution file(s) to build"
+ echo " --ci Set when running on CI server"
+ echo " --excludeCIBinarylog Don't output binary log (short: -nobl)"
+ echo " --prepareMachine Prepare machine for CI run, clean up processes after build"
+ echo " --nodeReuse <value> Sets nodereuse msbuild parameter ('true' or 'false')"
+ echo " --warnAsError <value> Sets warnaserror msbuild parameter ('true' or 'false')"
+ echo ""
+ echo "Command line arguments not listed above are passed thru to msbuild."
+ echo "Arguments can also be passed in with a single hyphen."
+}
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+restore=false
+build=false
+source_build=false
+rebuild=false
+test=false
+integration_test=false
+performance_test=false
+pack=false
+publish=false
+sign=false
+public=false
+ci=false
+clean=false
+
+warn_as_error=true
+node_reuse=true
+binary_log=false
+exclude_ci_binary_log=false
+pipelines_log=false
+
+projects=''
+configuration=''
+prepare_machine=false
+verbosity='minimal'
+runtime_source_feed=''
+runtime_source_feed_key=''
+
+properties=''
+while [[ $# > 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -help|-h)
+ usage
+ exit 0
+ ;;
+ -clean)
+ clean=true
+ ;;
+ -configuration|-c)
+ configuration=$2
+ shift
+ ;;
+ -verbosity|-v)
+ verbosity=$2
+ shift
+ ;;
+ -binarylog|-bl)
+ binary_log=true
+ ;;
+ -excludeCIBinarylog|-nobl)
+ exclude_ci_binary_log=true
+ ;;
+ -pipelineslog|-pl)
+ pipelines_log=true
+ ;;
+ -restore|-r)
+ restore=true
+ ;;
+ -build|-b)
+ build=true
+ ;;
+ -rebuild)
+ rebuild=true
+ ;;
+ -pack)
+ pack=true
+ ;;
+ -sourcebuild|-sb)
+ build=true
+ source_build=true
+ restore=true
+ pack=true
+ ;;
+ -test|-t)
+ test=true
+ ;;
+ -integrationtest)
+ integration_test=true
+ ;;
+ -performancetest)
+ performance_test=true
+ ;;
+ -sign)
+ sign=true
+ ;;
+ -publish)
+ publish=true
+ ;;
+ -preparemachine)
+ prepare_machine=true
+ ;;
+ -projects)
+ projects=$2
+ shift
+ ;;
+ -ci)
+ ci=true
+ ;;
+ -warnaserror)
+ warn_as_error=$2
+ shift
+ ;;
+ -nodereuse)
+ node_reuse=$2
+ shift
+ ;;
+ -runtimesourcefeed)
+ runtime_source_feed=$2
+ shift
+ ;;
+ -runtimesourcefeedkey)
+ runtime_source_feed_key=$2
+ shift
+ ;;
+ *)
+ properties="$properties $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [[ -z "$configuration" ]]; then
+ if [[ "$source_build" = true ]]; then configuration="Release"; else configuration="Debug"; fi
+fi
+
+if [[ "$ci" == true ]]; then
+ pipelines_log=true
+ node_reuse=false
+ if [[ "$exclude_ci_binary_log" == false ]]; then
+ binary_log=true
+ fi
+fi
+
+. "$scriptroot/tools.sh"
+
+function InitializeCustomToolset {
+ local script="$eng_root/restore-toolset.sh"
+
+ if [[ -a "$script" ]]; then
+ . "$script"
+ fi
+}
+
+function Build {
+ InitializeToolset
+ InitializeCustomToolset
+
+ if [[ ! -z "$projects" ]]; then
+ properties="$properties /p:Projects=$projects"
+ fi
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:\"$log_dir/Build.binlog\""
+ fi
+
+ MSBuild $_InitializeToolset \
+ $bl \
+ /p:Configuration=$configuration \
+ /p:RepoRoot="$repo_root" \
+ /p:Restore=$restore \
+ /p:Build=$build \
+ /p:ArcadeBuildFromSource=$source_build \
+ /p:Rebuild=$rebuild \
+ /p:Test=$test \
+ /p:Pack=$pack \
+ /p:IntegrationTest=$integration_test \
+ /p:PerformanceTest=$performance_test \
+ /p:Sign=$sign \
+ /p:Publish=$publish \
+ $properties
+
+ ExitWithExitCode 0
+}
+
+if [[ "$clean" == true ]]; then
+ if [ -d "$artifacts_dir" ]; then
+ rm -rf $artifacts_dir
+ echo "Artifacts directory deleted."
+ fi
+ exit 0
+fi
+
+if [[ "$restore" == true ]]; then
+ InitializeNativeTools
+fi
+
+Build
diff --git a/eng/common/cibuild.sh b/eng/common/cibuild.sh
new file mode 100644
index 0000000..1a02c0d
--- /dev/null
+++ b/eng/common/cibuild.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where
+ # the symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/build.sh" --restore --build --test --pack --publish --ci $@ \ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.bionic b/eng/common/cross/arm/sources.list.bionic
new file mode 100644
index 0000000..2109557
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.focal b/eng/common/cross/arm/sources.list.focal
new file mode 100644
index 0000000..4de2600
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.focal
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jammy b/eng/common/cross/arm/sources.list.jammy
new file mode 100644
index 0000000..6bb0453
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.jammy
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/sources.list.jessie b/eng/common/cross/arm/sources.list.jessie
new file mode 100644
index 0000000..4d142ac
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (sid) # UNSTABLE
+deb http://ftp.debian.org/debian/ sid main contrib non-free
+deb-src http://ftp.debian.org/debian/ sid main contrib non-free
diff --git a/eng/common/cross/arm/sources.list.xenial b/eng/common/cross/arm/sources.list.xenial
new file mode 100644
index 0000000..eacd86b
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file
diff --git a/eng/common/cross/arm/sources.list.zesty b/eng/common/cross/arm/sources.list.zesty
new file mode 100644
index 0000000..ea2c14a
--- /dev/null
+++ b/eng/common/cross/arm/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm/tizen-build-rootfs.sh b/eng/common/cross/arm/tizen-build-rootfs.sh
new file mode 100644
index 0000000..9fdb32e
--- /dev/null
+++ b/eng/common/cross/arm/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__ARM_HARDFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__ARM_HARDFP_CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__ARM_HARDFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<<Finish downloading files"
+
+echo ">>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<<Finish constructing Tizen rootfs"
+
+# Cleanup tmp
+rm -rf $TIZEN_TMP_DIR
+
+# Configure Tizen rootfs
+echo ">>Start configuring Tizen rootfs"
+ln -sfn asm-arm ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "<<Finish configuring Tizen rootfs"
diff --git a/eng/common/cross/arm/tizen-fetch.sh b/eng/common/cross/arm/tizen-fetch.sh
new file mode 100644
index 0000000..eabd06c
--- /dev/null
+++ b/eng/common/cross/arm/tizen-fetch.sh
@@ -0,0 +1,170 @@
+#!/usr/bin/env bash
+set -e
+
+if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs armv7hl gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs armv7hl lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7hl libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7hl gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/arm/tizen/tizen.patch b/eng/common/cross/arm/tizen/tizen.patch
new file mode 100644
index 0000000..fb12ade
--- /dev/null
+++ b/eng/common/cross/arm/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-littlearm)
+-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-armhf.so.3 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-armhf.so.3 ) )
diff --git a/eng/common/cross/arm64/sources.list.bionic b/eng/common/cross/arm64/sources.list.bionic
new file mode 100644
index 0000000..2109557
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.buster b/eng/common/cross/arm64/sources.list.buster
new file mode 100644
index 0000000..7194ac6
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.buster
@@ -0,0 +1,11 @@
+deb http://deb.debian.org/debian buster main
+deb-src http://deb.debian.org/debian buster main
+
+deb http://deb.debian.org/debian-security/ buster/updates main
+deb-src http://deb.debian.org/debian-security/ buster/updates main
+
+deb http://deb.debian.org/debian buster-updates main
+deb-src http://deb.debian.org/debian buster-updates main
+
+deb http://deb.debian.org/debian buster-backports main contrib non-free
+deb-src http://deb.debian.org/debian buster-backports main contrib non-free
diff --git a/eng/common/cross/arm64/sources.list.focal b/eng/common/cross/arm64/sources.list.focal
new file mode 100644
index 0000000..4de2600
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.focal
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.jammy b/eng/common/cross/arm64/sources.list.jammy
new file mode 100644
index 0000000..6bb0453
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.jammy
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/sources.list.stretch b/eng/common/cross/arm64/sources.list.stretch
new file mode 100644
index 0000000..0e12157
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.stretch
@@ -0,0 +1,12 @@
+deb http://deb.debian.org/debian stretch main
+deb-src http://deb.debian.org/debian stretch main
+
+deb http://deb.debian.org/debian-security/ stretch/updates main
+deb-src http://deb.debian.org/debian-security/ stretch/updates main
+
+deb http://deb.debian.org/debian stretch-updates main
+deb-src http://deb.debian.org/debian stretch-updates main
+
+deb http://deb.debian.org/debian stretch-backports main contrib non-free
+deb-src http://deb.debian.org/debian stretch-backports main contrib non-free
+
diff --git a/eng/common/cross/arm64/sources.list.xenial b/eng/common/cross/arm64/sources.list.xenial
new file mode 100644
index 0000000..eacd86b
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ xenial-security main restricted universe multiverse \ No newline at end of file
diff --git a/eng/common/cross/arm64/sources.list.zesty b/eng/common/cross/arm64/sources.list.zesty
new file mode 100644
index 0000000..ea2c14a
--- /dev/null
+++ b/eng/common/cross/arm64/sources.list.zesty
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ zesty-security main restricted universe multiverse
diff --git a/eng/common/cross/arm64/tizen-build-rootfs.sh b/eng/common/cross/arm64/tizen-build-rootfs.sh
new file mode 100644
index 0000000..13bfddb
--- /dev/null
+++ b/eng/common/cross/arm64/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<<Finish downloading files"
+
+echo ">>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<<Finish constructing Tizen rootfs"
+
+# Cleanup tmp
+rm -rf $TIZEN_TMP_DIR
+
+# Configure Tizen rootfs
+echo ">>Start configuring Tizen rootfs"
+ln -sfn asm-arm64 ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "<<Finish configuring Tizen rootfs"
diff --git a/eng/common/cross/arm64/tizen-fetch.sh b/eng/common/cross/arm64/tizen-fetch.sh
new file mode 100644
index 0000000..16d1301
--- /dev/null
+++ b/eng/common/cross/arm64/tizen-fetch.sh
@@ -0,0 +1,170 @@
+#!/usr/bin/env bash
+set -e
+
+if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen/
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs aarch64 gcc glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs aarch64 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs aarch64 gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/arm64/tizen/tizen.patch b/eng/common/cross/arm64/tizen/tizen.patch
new file mode 100644
index 0000000..af7c8be
--- /dev/null
+++ b/eng/common/cross/arm64/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib64/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib64/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf64-littleaarch64)
+-GROUP ( /lib64/libc.so.6 /usr/lib64/libc_nonshared.a AS_NEEDED ( /lib/ld-linux-aarch64.so.1 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux-aarch64.so.1 ) )
diff --git a/eng/common/cross/armel/armel.jessie.patch b/eng/common/cross/armel/armel.jessie.patch
new file mode 100644
index 0000000..2d26156
--- /dev/null
+++ b/eng/common/cross/armel/armel.jessie.patch
@@ -0,0 +1,43 @@
+diff -u -r a/usr/include/urcu/uatomic/generic.h b/usr/include/urcu/uatomic/generic.h
+--- a/usr/include/urcu/uatomic/generic.h 2014-10-22 15:00:58.000000000 -0700
++++ b/usr/include/urcu/uatomic/generic.h 2020-10-30 21:38:28.550000000 -0700
+@@ -69,10 +69,10 @@
+ #endif
+ #ifdef UATOMIC_HAS_ATOMIC_SHORT
+ case 2:
+- return __sync_val_compare_and_swap_2(addr, old, _new);
++ return __sync_val_compare_and_swap_2((uint16_t*) addr, old, _new);
+ #endif
+ case 4:
+- return __sync_val_compare_and_swap_4(addr, old, _new);
++ return __sync_val_compare_and_swap_4((uint32_t*) addr, old, _new);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+ return __sync_val_compare_and_swap_8(addr, old, _new);
+@@ -109,7 +109,7 @@
+ return;
+ #endif
+ case 4:
+- __sync_and_and_fetch_4(addr, val);
++ __sync_and_and_fetch_4((uint32_t*) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+@@ -148,7 +148,7 @@
+ return;
+ #endif
+ case 4:
+- __sync_or_and_fetch_4(addr, val);
++ __sync_or_and_fetch_4((uint32_t*) addr, val);
+ return;
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+@@ -187,7 +187,7 @@
+ return __sync_add_and_fetch_2(addr, val);
+ #endif
+ case 4:
+- return __sync_add_and_fetch_4(addr, val);
++ return __sync_add_and_fetch_4((uint32_t*) addr, val);
+ #if (CAA_BITS_PER_LONG == 64)
+ case 8:
+ return __sync_add_and_fetch_8(addr, val);
diff --git a/eng/common/cross/armel/sources.list.jessie b/eng/common/cross/armel/sources.list.jessie
new file mode 100644
index 0000000..3d9c305
--- /dev/null
+++ b/eng/common/cross/armel/sources.list.jessie
@@ -0,0 +1,3 @@
+# Debian (jessie) # Stable
+deb http://ftp.debian.org/debian/ jessie main contrib non-free
+deb-src http://ftp.debian.org/debian/ jessie main contrib non-free
diff --git a/eng/common/cross/armel/tizen-build-rootfs.sh b/eng/common/cross/armel/tizen-build-rootfs.sh
new file mode 100644
index 0000000..9a4438a
--- /dev/null
+++ b/eng/common/cross/armel/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__ARM_SOFTFP_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__ARM_SOFTFP_CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__ARM_SOFTFP_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<<Finish downloading files"
+
+echo ">>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<<Finish constructing Tizen rootfs"
+
+# Cleanup tmp
+rm -rf $TIZEN_TMP_DIR
+
+# Configure Tizen rootfs
+echo ">>Start configuring Tizen rootfs"
+ln -sfn asm-arm ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "<<Finish configuring Tizen rootfs"
diff --git a/eng/common/cross/armel/tizen-fetch.sh b/eng/common/cross/armel/tizen-fetch.sh
new file mode 100644
index 0000000..64f0187
--- /dev/null
+++ b/eng/common/cross/armel/tizen-fetch.sh
@@ -0,0 +1,170 @@
+#!/usr/bin/env bash
+set -e
+
+if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize arm base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs armv7l gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs armv7l lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs armv7l gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/armel/tizen/tizen-dotnet.ks b/eng/common/cross/armel/tizen/tizen-dotnet.ks
new file mode 100644
index 0000000..506d455
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen-dotnet.ks
@@ -0,0 +1,50 @@
+lang en_US.UTF-8
+keyboard us
+timezone --utc Asia/Seoul
+
+part / --fstype="ext4" --size=3500 --ondisk=mmcblk0 --label rootfs --fsoptions=defaults,noatime
+
+rootpw tizen
+desktop --autologinuser=root
+user --name root --groups audio,video --password 'tizen'
+
+repo --name=standard --baseurl=http://download.tizen.org/releases/milestone/tizen/unified/latest/repos/standard/packages/ --ssl_verify=no
+repo --name=base --baseurl=http://download.tizen.org/releases/milestone/tizen/base/latest/repos/standard/packages/ --ssl_verify=no
+
+%packages
+tar
+gzip
+
+sed
+grep
+gawk
+perl
+
+binutils
+findutils
+util-linux
+lttng-ust
+userspace-rcu
+procps-ng
+tzdata
+ca-certificates
+
+
+### Core FX
+libicu
+libunwind
+iputils
+zlib
+krb5
+libcurl
+libopenssl
+
+%end
+
+%post
+
+### Update /tmp privilege
+chmod 777 /tmp
+####################################
+
+%end
diff --git a/eng/common/cross/armel/tizen/tizen.patch b/eng/common/cross/armel/tizen/tizen.patch
new file mode 100644
index 0000000..ca7c7c1
--- /dev/null
+++ b/eng/common/cross/armel/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-littlearm)
+-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.3 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.3 ) )
diff --git a/eng/common/cross/armv6/sources.list.buster b/eng/common/cross/armv6/sources.list.buster
new file mode 100644
index 0000000..f27fc4f
--- /dev/null
+++ b/eng/common/cross/armv6/sources.list.buster
@@ -0,0 +1,2 @@
+deb http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
+deb-src http://raspbian.raspberrypi.org/raspbian/ buster main contrib non-free rpi
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
new file mode 100644
index 0000000..42516bb
--- /dev/null
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -0,0 +1,131 @@
+#!/usr/bin/env bash
+set -e
+__NDK_Version=r21
+
+usage()
+{
+ echo "Creates a toolchain and sysroot used for cross-compiling for Android."
+ echo.
+ echo "Usage: $0 [BuildArch] [ApiLevel]"
+ echo.
+ echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
+ echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
+ echo.
+ echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
+ echo "by setting the TOOLCHAIN_DIR environment variable"
+ echo.
+ echo "By default, the NDK will be downloaded into the cross/android-rootfs/android-ndk-$__NDK_Version directory. If you already have an NDK installation,"
+ echo "you can set the NDK_DIR environment variable to have this script use that installation of the NDK."
+ echo "By default, this script will generate a file, android_platform, in the root of the ROOTFS_DIR directory that contains the RID for the supported and tested Android build: android.28-arm64. This file is to replace '/etc/os-release', which is not available for Android."
+ exit 1
+}
+
+__ApiLevel=28 # The minimum platform for arm64 is API level 21 but the minimum version that support glob(3) is 28. See $ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include/glob.h
+__BuildArch=arm64
+__AndroidArch=aarch64
+__AndroidToolchain=aarch64-linux-android
+
+for i in "$@"
+ do
+ lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ -?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __AndroidArch=aarch64
+ __AndroidToolchain=aarch64-linux-android
+ ;;
+ arm)
+ __BuildArch=arm
+ __AndroidArch=arm
+ __AndroidToolchain=arm-linux-androideabi
+ ;;
+ *[0-9])
+ __ApiLevel=$i
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
+ ;;
+ esac
+done
+
+# Obtain the location of the bash script to figure out where the root of the repo is.
+__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+__CrossDir="$__ScriptBaseDir/../../../.tools/android-rootfs"
+
+if [[ ! -f "$__CrossDir" ]]; then
+ mkdir -p "$__CrossDir"
+fi
+
+# Resolve absolute path to avoid `../` in build logs
+__CrossDir="$( cd "$__CrossDir" && pwd )"
+
+__NDK_Dir="$__CrossDir/android-ndk-$__NDK_Version"
+__lldb_Dir="$__CrossDir/lldb"
+__ToolchainDir="$__CrossDir/android-ndk-$__NDK_Version"
+
+if [[ -n "$TOOLCHAIN_DIR" ]]; then
+ __ToolchainDir=$TOOLCHAIN_DIR
+fi
+
+if [[ -n "$NDK_DIR" ]]; then
+ __NDK_Dir=$NDK_DIR
+fi
+
+echo "Target API level: $__ApiLevel"
+echo "Target architecture: $__BuildArch"
+echo "NDK location: $__NDK_Dir"
+echo "Target Toolchain location: $__ToolchainDir"
+
+# Download the NDK if required
+if [ ! -d $__NDK_Dir ]; then
+ echo Downloading the NDK into $__NDK_Dir
+ mkdir -p $__NDK_Dir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
+ unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
+fi
+
+if [ ! -d $__lldb_Dir ]; then
+ mkdir -p $__lldb_Dir
+ echo Downloading LLDB into $__lldb_Dir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/lldb-2.3.3614996-linux-x86_64.zip -O $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip
+ unzip -q $__CrossDir/lldb-2.3.3614996-linux-x86_64.zip -d $__lldb_Dir
+fi
+
+echo "Download dependencies..."
+__TmpDir=$__CrossDir/tmp/$__BuildArch/
+mkdir -p "$__TmpDir"
+
+# combined dependencies for coreclr, installer and libraries
+__AndroidPackages="libicu"
+__AndroidPackages+=" libandroid-glob"
+__AndroidPackages+=" liblzma"
+__AndroidPackages+=" krb5"
+__AndroidPackages+=" openssl"
+
+for path in $(wget -qO- http://termux.net/dists/stable/main/binary-$__AndroidArch/Packages |\
+ grep -A15 "Package: \(${__AndroidPackages// /\\|}\)" | grep -v "static\|tool" | grep Filename); do
+
+ if [[ "$path" != "Filename:" ]]; then
+ echo "Working on: $path"
+ wget -qO- http://termux.net/$path | dpkg -x - "$__TmpDir"
+ fi
+done
+
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+
+# Generate platform file for build.sh script to assign to __DistroRid
+echo "Generating platform file..."
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
+
+echo "Now to build coreclr, libraries and installers; run:"
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory coreclr
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory libraries
+echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
+ --subsetCategory installer
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
new file mode 100644
index 0000000..5680980
--- /dev/null
+++ b/eng/common/cross/build-rootfs.sh
@@ -0,0 +1,508 @@
+#!/usr/bin/env bash
+
+set -e
+
+usage()
+{
+ echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir <directory>]"
+ echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
+ echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine, alpine3.13 or alpine3.14. If BuildArch is armel, LinuxCodeName is jessie(default) or tizen."
+ echo " for FreeBSD can be: freebsd12, freebsd13"
+ echo " for illumos can be: illumos"
+ echo " for Haiku can be: haiku."
+ echo "lldbx.y - optional, LLDB version, can be: lldb3.9(default), lldb4.0, lldb5.0, lldb6.0 no-lldb. Ignored for alpine and FreeBSD"
+ echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
+ echo "--skipunmount - optional, will skip the unmount of rootfs folder."
+ echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
+ echo "--jobs N - optional, restrict to N jobs."
+ exit 1
+}
+
+__CodeName=xenial
+__CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__BuildArch=arm
+__AlpineArch=armv7
+__FreeBSDArch=arm
+__FreeBSDMachineArch=armv7
+__IllumosArch=arm7
+__QEMUArch=arm
+__UbuntuArch=armhf
+__UbuntuRepo="http://ports.ubuntu.com/"
+__LLDB_Package="liblldb-3.9-dev"
+__SkipUnmount=0
+
+# base development support
+__UbuntuPackages="build-essential"
+
+__AlpinePackages="alpine-base"
+__AlpinePackages+=" build-base"
+__AlpinePackages+=" linux-headers"
+__AlpinePackages+=" lldb-dev"
+__AlpinePackages+=" python3"
+__AlpinePackages+=" libedit"
+
+# symlinks fixer
+__UbuntuPackages+=" symlinks"
+
+# runtime dependencies
+__UbuntuPackages+=" libicu-dev"
+__UbuntuPackages+=" liblttng-ust-dev"
+__UbuntuPackages+=" libunwind8-dev"
+
+__AlpinePackages+=" gettext-dev"
+__AlpinePackages+=" icu-dev"
+__AlpinePackages+=" libunwind-dev"
+__AlpinePackages+=" lttng-ust-dev"
+__AlpinePackages+=" compiler-rt-static"
+
+# runtime libraries' dependencies
+__UbuntuPackages+=" libcurl4-openssl-dev"
+__UbuntuPackages+=" libkrb5-dev"
+__UbuntuPackages+=" libssl-dev"
+__UbuntuPackages+=" zlib1g-dev"
+
+__AlpinePackages+=" curl-dev"
+__AlpinePackages+=" krb5-dev"
+__AlpinePackages+=" openssl-dev"
+__AlpinePackages+=" zlib-dev"
+
+__FreeBSDBase="12.3-RELEASE"
+__FreeBSDPkg="1.17.0"
+__FreeBSDABI="12"
+__FreeBSDPackages="libunwind"
+__FreeBSDPackages+=" icu"
+__FreeBSDPackages+=" libinotify"
+__FreeBSDPackages+=" openssl"
+__FreeBSDPackages+=" krb5"
+__FreeBSDPackages+=" terminfo-db"
+
+__IllumosPackages="icu"
+__IllumosPackages+=" mit-krb5"
+__IllumosPackages+=" openssl"
+__IllumosPackages+=" zlib"
+
+__HaikuPackages="gmp"
+__HaikuPackages+=" gmp_devel"
+__HaikuPackages+=" krb5"
+__HaikuPackages+=" krb5_devel"
+__HaikuPackages+=" libiconv"
+__HaikuPackages+=" libiconv_devel"
+__HaikuPackages+=" llvm12_libunwind"
+__HaikuPackages+=" llvm12_libunwind_devel"
+__HaikuPackages+=" mpfr"
+__HaikuPackages+=" mpfr_devel"
+
+# ML.NET dependencies
+__UbuntuPackages+=" libomp5"
+__UbuntuPackages+=" libomp-dev"
+
+__Keyring=
+__UseMirror=0
+
+__UnprocessedBuildArgs=
+while :; do
+ if [[ "$#" -le 0 ]]; then
+ break
+ fi
+
+ lowerI="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ -\?|-h|--help)
+ usage
+ exit 1
+ ;;
+ arm)
+ __BuildArch=arm
+ __UbuntuArch=armhf
+ __AlpineArch=armv7
+ __QEMUArch=arm
+ ;;
+ arm64)
+ __BuildArch=arm64
+ __UbuntuArch=arm64
+ __AlpineArch=aarch64
+ __QEMUArch=aarch64
+ __FreeBSDArch=arm64
+ __FreeBSDMachineArch=aarch64
+ ;;
+ armel)
+ __BuildArch=armel
+ __UbuntuArch=armel
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __CodeName=jessie
+ ;;
+ armv6)
+ __BuildArch=armv6
+ __UbuntuArch=armhf
+ __QEMUArch=arm
+ __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/"
+ __CodeName=buster
+ __LLDB_Package="liblldb-6.0-dev"
+
+ if [[ -e "/usr/share/keyrings/raspbian-archive-keyring.gpg" ]]; then
+ __Keyring="--keyring /usr/share/keyrings/raspbian-archive-keyring.gpg"
+ fi
+ ;;
+ ppc64le)
+ __BuildArch=ppc64le
+ __UbuntuArch=ppc64el
+ __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
+ unset __LLDB_Package
+ ;;
+ riscv64)
+ __BuildArch=riscv64
+ __UbuntuArch=riscv64
+ __UbuntuRepo="http://deb.debian.org/debian-ports"
+ __CodeName=sid
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
+ unset __LLDB_Package
+
+ if [[ -e "/usr/share/keyrings/debian-ports-archive-keyring.gpg" ]]; then
+ __Keyring="--keyring /usr/share/keyrings/debian-ports-archive-keyring.gpg --include=debian-ports-archive-keyring"
+ fi
+ ;;
+ s390x)
+ __BuildArch=s390x
+ __UbuntuArch=s390x
+ __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/"
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libunwind8-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp-dev//')
+ __UbuntuPackages=$(echo ${__UbuntuPackages} | sed 's/ libomp5//')
+ unset __LLDB_Package
+ ;;
+ x64)
+ __BuildArch=x64
+ __UbuntuArch=amd64
+ __FreeBSDArch=amd64
+ __FreeBSDMachineArch=amd64
+ __illumosArch=x86_64
+ __UbuntuRepo=
+ ;;
+ x86)
+ __BuildArch=x86
+ __UbuntuArch=i386
+ __UbuntuRepo="http://archive.ubuntu.com/ubuntu/"
+ ;;
+ lldb*)
+ version="${lowerI/lldb/}"
+ parts=(${version//./ })
+
+ # for versions > 6.0, lldb has dropped the minor version
+ if [[ "${parts[0]}" -gt 6 ]]; then
+ version="${parts[0]}"
+ fi
+
+ __LLDB_Package="liblldb-${version}-dev"
+ ;;
+ no-lldb)
+ unset __LLDB_Package
+ ;;
+ llvm*)
+ version="${lowerI/llvm/}"
+ parts=(${version//./ })
+ __LLVM_MajorVersion="${parts[0]}"
+ __LLVM_MinorVersion="${parts[1]}"
+
+ # for versions > 6.0, llvm has dropped the minor version
+ if [[ -z "$__LLVM_MinorVersion" && "$__LLVM_MajorVersion" -le 6 ]]; then
+ __LLVM_MinorVersion=0;
+ fi
+ ;;
+ xenial) # Ubuntu 16.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=xenial
+ fi
+ ;;
+ zesty) # Ubuntu 17.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=zesty
+ fi
+ ;;
+ bionic) # Ubuntu 18.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=bionic
+ fi
+ ;;
+ focal) # Ubuntu 20.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=focal
+ fi
+ ;;
+ jammy) # Ubuntu 22.04
+ if [[ "$__CodeName" != "jessie" ]]; then
+ __CodeName=jammy
+ fi
+ ;;
+ jessie) # Debian 8
+ __CodeName=jessie
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ ;;
+ stretch) # Debian 9
+ __CodeName=stretch
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ buster) # Debian 10
+ __CodeName=buster
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ __LLDB_Package="liblldb-6.0-dev"
+ ;;
+ tizen)
+ __CodeName=
+ __UbuntuRepo=
+ __Tizen=tizen
+ ;;
+ alpine|alpine3.13)
+ __CodeName=alpine
+ __UbuntuRepo=
+ __AlpineVersion=3.13
+ __AlpinePackages+=" llvm10-libs"
+ ;;
+ alpine3.14)
+ __CodeName=alpine
+ __UbuntuRepo=
+ __AlpineVersion=3.14
+ __AlpinePackages+=" llvm11-libs"
+ ;;
+ freebsd12)
+ __CodeName=freebsd
+ __SkipUnmount=1
+ ;;
+ freebsd13)
+ __CodeName=freebsd
+ __FreeBSDBase="13.0-RELEASE"
+ __FreeBSDABI="13"
+ __SkipUnmount=1
+ ;;
+ illumos)
+ __CodeName=illumos
+ __SkipUnmount=1
+ ;;
+ haiku)
+ __CodeName=haiku
+ __BuildArch=x64
+ __SkipUnmount=1
+ ;;
+ --skipunmount)
+ __SkipUnmount=1
+ ;;
+ --rootfsdir|-rootfsdir)
+ shift
+ __RootfsDir="$1"
+ ;;
+ --use-mirror)
+ __UseMirror=1
+ ;;
+ --use-jobs)
+ shift
+ MAXJOBS=$1
+ ;;
+ *)
+ __UnprocessedBuildArgs="$__UnprocessedBuildArgs $1"
+ ;;
+ esac
+
+ shift
+done
+
+if [[ "$__BuildArch" == "armel" ]]; then
+ __LLDB_Package="lldb-3.5-dev"
+fi
+
+__UbuntuPackages+=" ${__LLDB_Package:-}"
+
+if [[ -n "$__LLVM_MajorVersion" ]]; then
+ __UbuntuPackages+=" libclang-common-${__LLVM_MajorVersion}${__LLVM_MinorVersion:+.$__LLVM_MinorVersion}-dev"
+fi
+
+if [[ -z "$__RootfsDir" && -n "$ROOTFS_DIR" ]]; then
+ __RootfsDir="$ROOTFS_DIR"
+fi
+
+if [[ -z "$__RootfsDir" ]]; then
+ __RootfsDir="$__CrossDir/../../../.tools/rootfs/$__BuildArch"
+fi
+
+if [[ -d "$__RootfsDir" ]]; then
+ if [[ "$__SkipUnmount" == "0" ]]; then
+ umount "$__RootfsDir"/* || true
+ fi
+ rm -rf "$__RootfsDir"
+fi
+
+mkdir -p "$__RootfsDir"
+__RootfsDir="$( cd "$__RootfsDir" && pwd )"
+
+if [[ "$__CodeName" == "alpine" ]]; then
+ __ApkToolsVersion=2.9.1
+ __ApkToolsDir="$(mktemp -d)"
+ wget "https://github.com/alpinelinux/apk-tools/releases/download/v$__ApkToolsVersion/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -P "$__ApkToolsDir"
+ tar -xf "$__ApkToolsDir/apk-tools-$__ApkToolsVersion-x86_64-linux.tar.gz" -C "$__ApkToolsDir"
+ mkdir -p "$__RootfsDir"/usr/bin
+ cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
+
+ "$__ApkToolsDir/apk-tools-$__ApkToolsVersion/apk" \
+ -X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/main" \
+ -X "http://dl-cdn.alpinelinux.org/alpine/v$__AlpineVersion/community" \
+ -U --allow-untrusted --root "$__RootfsDir" --arch "$__AlpineArch" --initdb \
+ add $__AlpinePackages
+
+ rm -r "$__ApkToolsDir"
+elif [[ "$__CodeName" == "freebsd" ]]; then
+ mkdir -p "$__RootfsDir"/usr/local/etc
+ JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+ wget -O - "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version
+ echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf
+ echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"${__RootfsDir}/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf
+ mkdir -p "$__RootfsDir"/tmp
+ # get and build package manager
+ wget -O - "https://github.com/freebsd/pkg/archive/${__FreeBSDPkg}.tar.gz" | tar -C "$__RootfsDir"/tmp -zxf -
+ cd "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
+ # needed for install to succeed
+ mkdir -p "$__RootfsDir"/host/etc
+ ./autogen.sh && ./configure --prefix="$__RootfsDir"/host && make -j "$JOBS" && make install
+ rm -rf "$__RootfsDir/tmp/pkg-${__FreeBSDPkg}"
+ # install packages we need.
+ INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf update
+ INSTALL_AS_USER=$(whoami) "$__RootfsDir"/host/sbin/pkg -r "$__RootfsDir" -C "$__RootfsDir"/usr/local/etc/pkg.conf install --yes $__FreeBSDPackages
+elif [[ "$__CodeName" == "illumos" ]]; then
+ mkdir "$__RootfsDir/tmp"
+ pushd "$__RootfsDir/tmp"
+ JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+ echo "Downloading sysroot."
+ wget -O - https://github.com/illumos/sysroot/releases/download/20181213-de6af22ae73b-v1/illumos-sysroot-i386-20181213-de6af22ae73b-v1.tar.gz | tar -C "$__RootfsDir" -xzf -
+ echo "Building binutils. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/binutils/binutils-2.33.1.tar.bz2 | tar -xjf -
+ mkdir build-binutils && cd build-binutils
+ ../binutils-2.33.1/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir"
+ make -j "$JOBS" && make install && cd ..
+ echo "Building gcc. Please wait.."
+ wget -O - https://ftp.gnu.org/gnu/gcc/gcc-8.4.0/gcc-8.4.0.tar.xz | tar -xJf -
+ CFLAGS="-fPIC"
+ CXXFLAGS="-fPIC"
+ CXXFLAGS_FOR_TARGET="-fPIC"
+ CFLAGS_FOR_TARGET="-fPIC"
+ export CFLAGS CXXFLAGS CXXFLAGS_FOR_TARGET CFLAGS_FOR_TARGET
+ mkdir build-gcc && cd build-gcc
+ ../gcc-8.4.0/configure --prefix="$__RootfsDir" --target="${__illumosArch}-sun-solaris2.10" --program-prefix="${__illumosArch}-illumos-" --with-sysroot="$__RootfsDir" --with-gnu-as \
+ --with-gnu-ld --disable-nls --disable-libgomp --disable-libquadmath --disable-libssp --disable-libvtv --disable-libcilkrts --disable-libada --disable-libsanitizer \
+ --disable-libquadmath-support --disable-shared --enable-tls
+ make -j "$JOBS" && make install && cd ..
+ BaseUrl=https://pkgsrc.joyent.com
+ if [[ "$__UseMirror" == 1 ]]; then
+ BaseUrl=http://pkgsrc.smartos.skylime.net
+ fi
+ BaseUrl="$BaseUrl/packages/SmartOS/trunk/${__illumosArch}/All"
+ echo "Downloading manifest"
+ wget "$BaseUrl"
+ echo "Downloading dependencies."
+ read -ra array <<<"$__IllumosPackages"
+ for package in "${array[@]}"; do
+ echo "Installing '$package'"
+ package="$(grep ">$package-[0-9]" All | sed -En 's/.*href="(.*)\.tgz".*/\1/p')"
+ echo "Resolved name '$package'"
+ wget "$BaseUrl"/"$package".tgz
+ ar -x "$package".tgz
+ tar --skip-old-files -xzf "$package".tmp.tg* -C "$__RootfsDir" 2>/dev/null
+ done
+ echo "Cleaning up temporary files."
+ popd
+ rm -rf "$__RootfsDir"/{tmp,+*}
+ mkdir -p "$__RootfsDir"/usr/include/net
+ mkdir -p "$__RootfsDir"/usr/include/netpacket
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/bpf.h
+ wget -P "$__RootfsDir"/usr/include/net https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/io/bpf/net/dlt.h
+ wget -P "$__RootfsDir"/usr/include/netpacket https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/inet/sockmods/netpacket/packet.h
+ wget -P "$__RootfsDir"/usr/include/sys https://raw.githubusercontent.com/illumos/illumos-gate/master/usr/src/uts/common/sys/sdt.h
+elif [[ "$__CodeName" == "haiku" ]]; then
+ JOBS=${MAXJOBS:="$(getconf _NPROCESSORS_ONLN)"}
+
+ echo "Building Haiku sysroot for x86_64"
+ mkdir -p "$__RootfsDir/tmp"
+ cd "$__RootfsDir/tmp"
+ git clone -b hrev56235 https://review.haiku-os.org/haiku
+ git clone -b btrev43195 https://review.haiku-os.org/buildtools
+ cd "$__RootfsDir/tmp/buildtools" && git checkout 7487388f5110021d400b9f3b88e1a7f310dc066d
+
+ # Fetch some unmerged patches
+ cd "$__RootfsDir/tmp/haiku"
+ ## Add development build profile (slimmer than nightly)
+ git fetch origin refs/changes/64/4164/1 && git -c commit.gpgsign=false cherry-pick FETCH_HEAD
+
+ # Build jam
+ cd "$__RootfsDir/tmp/buildtools/jam"
+ make
+
+ # Configure cross tools
+ echo "Building cross-compiler"
+ mkdir -p "$__RootfsDir/generated"
+ cd "$__RootfsDir/generated"
+ "$__RootfsDir/tmp/haiku/configure" -j"$JOBS" --sysroot "$__RootfsDir" --cross-tools-source "$__RootfsDir/tmp/buildtools" --build-cross-tools x86_64
+
+ # Build Haiku packages
+ echo "Building Haiku"
+ echo 'HAIKU_BUILD_PROFILE = "development-raw" ;' > UserProfileConfig
+ "$__RootfsDir/tmp/buildtools/jam/jam0" -j"$JOBS" -q '<build>package' '<repository>Haiku'
+
+ BaseUrl="https://depot.haiku-os.org/__api/v2/pkg/get-pkg"
+
+ # Download additional packages
+ echo "Downloading additional required packages"
+ read -ra array <<<"$__HaikuPackages"
+ for package in "${array[@]}"; do
+ echo "Downloading $package..."
+ # API documented here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L60
+ # The schema here: https://github.com/haiku/haikudepotserver/blob/master/haikudepotserver-api2/src/main/resources/api2/pkg.yaml#L598
+ hpkgDownloadUrl="$(wget -qO- --post-data='{"name":"'"$package"'","repositorySourceCode":"haikuports_x86_64","versionType":"LATEST","naturalLanguageCode":"en"}' \
+ --header='Content-Type:application/json' "$BaseUrl" | jq -r '.result.versions[].hpkgDownloadURL')"
+ wget -P "$__RootfsDir/generated/download" "$hpkgDownloadUrl"
+ done
+
+ # Setup the sysroot
+ echo "Setting up sysroot and extracting needed packages"
+ mkdir -p "$__RootfsDir/boot/system"
+ for file in "$__RootfsDir/generated/objects/haiku/x86_64/packaging/packages/"*.hpkg; do
+ "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
+ done
+ for file in "$__RootfsDir/generated/download/"*.hpkg; do
+ "$__RootfsDir/generated/objects/linux/x86_64/release/tools/package/package" extract -C "$__RootfsDir/boot/system" "$file"
+ done
+
+ # Cleaning up temporary files
+ echo "Cleaning up temporary files"
+ rm -rf "$__RootfsDir/tmp"
+ for name in "$__RootfsDir/generated/"*; do
+ if [[ "$name" =~ "cross-tools-" ]]; then
+ : # Keep the cross-compiler
+ else
+ rm -rf "$name"
+ fi
+ done
+elif [[ -n "$__CodeName" ]]; then
+ qemu-debootstrap $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
+ cp "$__CrossDir/$__BuildArch/sources.list.$__CodeName" "$__RootfsDir/etc/apt/sources.list"
+ chroot "$__RootfsDir" apt-get update
+ chroot "$__RootfsDir" apt-get -f -y install
+ chroot "$__RootfsDir" apt-get -y install $__UbuntuPackages
+ chroot "$__RootfsDir" symlinks -cr /usr
+ chroot "$__RootfsDir" apt-get clean
+
+ if [[ "$__SkipUnmount" == "0" ]]; then
+ umount "$__RootfsDir"/* || true
+ fi
+
+ if [[ "$__BuildArch" == "armel" && "$__CodeName" == "jessie" ]]; then
+ pushd "$__RootfsDir"
+ patch -p1 < "$__CrossDir/$__BuildArch/armel.jessie.patch"
+ popd
+ fi
+elif [[ "$__Tizen" == "tizen" ]]; then
+ ROOTFS_DIR="$__RootfsDir" "$__CrossDir/$__BuildArch/tizen-build-rootfs.sh"
+else
+ echo "Unsupported target platform."
+ usage;
+ exit 1
+fi
diff --git a/eng/common/cross/ppc64le/sources.list.bionic b/eng/common/cross/ppc64le/sources.list.bionic
new file mode 100644
index 0000000..2109557
--- /dev/null
+++ b/eng/common/cross/ppc64le/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/riscv64/sources.list.sid b/eng/common/cross/riscv64/sources.list.sid
new file mode 100644
index 0000000..65f730d
--- /dev/null
+++ b/eng/common/cross/riscv64/sources.list.sid
@@ -0,0 +1 @@
+deb http://deb.debian.org/debian-ports sid main
diff --git a/eng/common/cross/s390x/sources.list.bionic b/eng/common/cross/s390x/sources.list.bionic
new file mode 100644
index 0000000..2109557
--- /dev/null
+++ b/eng/common/cross/s390x/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-updates main restricted universe
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-backports main restricted
+
+deb http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
+deb-src http://ports.ubuntu.com/ubuntu-ports/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/toolchain.cmake b/eng/common/cross/toolchain.cmake
new file mode 100644
index 0000000..561576b
--- /dev/null
+++ b/eng/common/cross/toolchain.cmake
@@ -0,0 +1,336 @@
+set(CROSS_ROOTFS $ENV{ROOTFS_DIR})
+
+set(TARGET_ARCH_NAME $ENV{TARGET_BUILD_ARCH})
+if(EXISTS ${CROSS_ROOTFS}/bin/freebsd-version)
+ set(CMAKE_SYSTEM_NAME FreeBSD)
+ set(FREEBSD 1)
+elseif(EXISTS ${CROSS_ROOTFS}/usr/platform/i86pc)
+ set(CMAKE_SYSTEM_NAME SunOS)
+ set(ILLUMOS 1)
+elseif(EXISTS ${CROSS_ROOTFS}/boot/system/develop/headers/config/HaikuConfig.h)
+ set(CMAKE_SYSTEM_NAME Haiku)
+else()
+ set(CMAKE_SYSTEM_NAME Linux)
+ set(LINUX 1)
+endif()
+set(CMAKE_SYSTEM_VERSION 1)
+
+if(EXISTS ${CROSS_ROOTFS}/etc/tizen-release)
+ set(TIZEN 1)
+elseif(EXISTS ${CROSS_ROOTFS}/android_platform)
+ set(ANDROID 1)
+endif()
+
+if(TARGET_ARCH_NAME STREQUAL "arm")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv7-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv7-alpine-linux-musleabihf")
+ elseif(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
+ else()
+ set(TOOLCHAIN "arm-linux-gnueabihf")
+ endif()
+ if(TIZEN)
+ set(TIZEN_TOOLCHAIN "armv7hl-tizen-linux-gnueabihf/9.2.0")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(CMAKE_SYSTEM_PROCESSOR aarch64)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/aarch64-alpine-linux-musl)
+ set(TOOLCHAIN "aarch64-alpine-linux-musl")
+ elseif(LINUX)
+ set(TOOLCHAIN "aarch64-linux-gnu")
+ if(TIZEN)
+ set(TIZEN_TOOLCHAIN "aarch64-tizen-linux-gnu/9.2.0")
+ endif()
+ elseif(FREEBSD)
+ set(triple "aarch64-unknown-freebsd12")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "armel")
+ set(CMAKE_SYSTEM_PROCESSOR armv7l)
+ set(TOOLCHAIN "arm-linux-gnueabi")
+ if(TIZEN)
+ set(TIZEN_TOOLCHAIN "armv7l-tizen-linux-gnueabi/9.2.0")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "armv6")
+ set(CMAKE_SYSTEM_PROCESSOR armv6l)
+ if(EXISTS ${CROSS_ROOTFS}/usr/lib/gcc/armv6-alpine-linux-musleabihf)
+ set(TOOLCHAIN "armv6-alpine-linux-musleabihf")
+ else()
+ set(TOOLCHAIN "arm-linux-gnueabihf")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "ppc64le")
+ set(CMAKE_SYSTEM_PROCESSOR ppc64le)
+ set(TOOLCHAIN "powerpc64le-linux-gnu")
+elseif(TARGET_ARCH_NAME STREQUAL "riscv64")
+ set(CMAKE_SYSTEM_PROCESSOR riscv64)
+ set(TOOLCHAIN "riscv64-linux-gnu")
+elseif(TARGET_ARCH_NAME STREQUAL "s390x")
+ set(CMAKE_SYSTEM_PROCESSOR s390x)
+ set(TOOLCHAIN "s390x-linux-gnu")
+elseif(TARGET_ARCH_NAME STREQUAL "x64")
+ set(CMAKE_SYSTEM_PROCESSOR x86_64)
+ if(LINUX)
+ set(TOOLCHAIN "x86_64-linux-gnu")
+ if(TIZEN)
+ set(TIZEN_TOOLCHAIN "x86_64-tizen-linux-gnu/9.2.0")
+ endif()
+ elseif(FREEBSD)
+ set(triple "x86_64-unknown-freebsd12")
+ elseif(ILLUMOS)
+ set(TOOLCHAIN "x86_64-illumos")
+ elseif(HAIKU)
+ set(TOOLCHAIN "x64_64-unknown-haiku")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ set(CMAKE_SYSTEM_PROCESSOR i686)
+ set(TOOLCHAIN "i686-linux-gnu")
+ if(TIZEN)
+ set(TIZEN_TOOLCHAIN "i586-tizen-linux-gnu/9.2.0")
+ endif()
+else()
+ message(FATAL_ERROR "Arch is ${TARGET_ARCH_NAME}. Only arm, arm64, armel, armv6, ppc64le, riscv64, s390x, x64 and x86 are supported!")
+endif()
+
+if(DEFINED ENV{TOOLCHAIN})
+ set(TOOLCHAIN $ENV{TOOLCHAIN})
+endif()
+
+# Specify include paths
+if(TIZEN)
+ if(TARGET_ARCH_NAME STREQUAL "arm")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7hl-tizen-linux-gnueabihf)
+ endif()
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/armv7l-tizen-linux-gnueabi)
+ endif()
+ if(TARGET_ARCH_NAME STREQUAL "arm64")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}/include/c++/aarch64-tizen-linux-gnu)
+ endif()
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/)
+ include_directories(SYSTEM ${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}/include/c++/i586-tizen-linux-gnu)
+ endif()
+endif()
+
+if(ANDROID)
+ if(TARGET_ARCH_NAME STREQUAL "arm")
+ set(ANDROID_ABI armeabi-v7a)
+ elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ set(ANDROID_ABI arm64-v8a)
+ endif()
+
+ # extract platform number required by the NDK's toolchain
+ file(READ "${CROSS_ROOTFS}/android_platform" RID_FILE_CONTENTS)
+ string(REPLACE "RID=" "" ANDROID_RID "${RID_FILE_CONTENTS}")
+ string(REGEX REPLACE ".*\\.([0-9]+)-.*" "\\1" ANDROID_PLATFORM "${ANDROID_RID}")
+
+ set(ANDROID_TOOLCHAIN clang)
+ set(FEATURE_EVENT_TRACE 0) # disable event trace as there is no lttng-ust package in termux repository
+ set(CMAKE_SYSTEM_LIBRARY_PATH "${CROSS_ROOTFS}/usr/lib")
+ set(CMAKE_SYSTEM_INCLUDE_PATH "${CROSS_ROOTFS}/usr/include")
+
+ # include official NDK toolchain script
+ include(${CROSS_ROOTFS}/../build/cmake/android.toolchain.cmake)
+elseif(FREEBSD)
+ # we cross-compile by instructing clang
+ set(CMAKE_C_COMPILER_TARGET ${triple})
+ set(CMAKE_CXX_COMPILER_TARGET ${triple})
+ set(CMAKE_ASM_COMPILER_TARGET ${triple})
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+ set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=lld")
+ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fuse-ld=lld")
+ set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fuse-ld=lld")
+elseif(ILLUMOS)
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ include_directories(SYSTEM ${CROSS_ROOTFS}/include)
+
+ set(TOOLSET_PREFIX ${TOOLCHAIN}-)
+ function(locate_toolchain_exec exec var)
+ string(TOUPPER ${exec} EXEC_UPPERCASE)
+ if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
+ set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
+ return()
+ endif()
+
+ find_program(EXEC_LOCATION_${exec}
+ NAMES
+ "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
+ "${TOOLSET_PREFIX}${exec}")
+
+ if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
+ message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
+ endif()
+ set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
+ endfunction()
+
+ set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+
+ locate_toolchain_exec(gcc CMAKE_C_COMPILER)
+ locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
+
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
+elseif(HAIKU)
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ set(TOOLSET_PREFIX ${TOOLCHAIN}-)
+ function(locate_toolchain_exec exec var)
+ string(TOUPPER ${exec} EXEC_UPPERCASE)
+ if(NOT "$ENV{CLR_${EXEC_UPPERCASE}}" STREQUAL "")
+ set(${var} "$ENV{CLR_${EXEC_UPPERCASE}}" PARENT_SCOPE)
+ return()
+ endif()
+
+ set(SEARCH_PATH "${CROSS_ROOTFS}/generated/cross-tools-x86_64/bin")
+
+ find_program(EXEC_LOCATION_${exec}
+ PATHS ${SEARCH_PATH}
+ NAMES
+ "${TOOLSET_PREFIX}${exec}${CLR_CMAKE_COMPILER_FILE_NAME_VERSION}"
+ "${TOOLSET_PREFIX}${exec}")
+
+ if (EXEC_LOCATION_${exec} STREQUAL "EXEC_LOCATION_${exec}-NOTFOUND")
+ message(FATAL_ERROR "Unable to find toolchain executable. Name: ${exec}, Prefix: ${TOOLSET_PREFIX}.")
+ endif()
+ set(${var} ${EXEC_LOCATION_${exec}} PARENT_SCOPE)
+ endfunction()
+
+ set(CMAKE_SYSTEM_PREFIX_PATH "${CROSS_ROOTFS}")
+
+ locate_toolchain_exec(gcc CMAKE_C_COMPILER)
+ locate_toolchain_exec(g++ CMAKE_CXX_COMPILER)
+
+ set(CMAKE_C_STANDARD_LIBRARIES "${CMAKE_C_STANDARD_LIBRARIES} -lssp")
+ set(CMAKE_CXX_STANDARD_LIBRARIES "${CMAKE_CXX_STANDARD_LIBRARIES} -lssp")
+
+ # let CMake set up the correct search paths
+ include(Platform/Haiku)
+else()
+ set(CMAKE_SYSROOT "${CROSS_ROOTFS}")
+
+ set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+ set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${CROSS_ROOTFS}/usr")
+endif()
+
+# Specify link flags
+
+function(add_toolchain_linker_flag Flag)
+ set(Config "${ARGV1}")
+ set(CONFIG_SUFFIX "")
+ if (NOT Config STREQUAL "")
+ set(CONFIG_SUFFIX "_${Config}")
+ endif()
+ set("CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_EXE_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
+ set("CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT" "${CMAKE_SHARED_LINKER_FLAGS${CONFIG_SUFFIX}_INIT} ${Flag}" PARENT_SCOPE)
+endfunction()
+
+if(LINUX)
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib/${TOOLCHAIN}")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}")
+endif()
+
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
+ if(TIZEN)
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "arm64")
+ if(TIZEN)
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64")
+ add_toolchain_linker_flag("-Wl,--rpath-link=${CROSS_ROOTFS}/usr/lib64/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_toolchain_linker_flag(-m32)
+
+ if(TIZEN)
+ add_toolchain_linker_flag("-B${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/lib/gcc/${TIZEN_TOOLCHAIN}")
+ endif()
+elseif(ILLUMOS)
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/lib/amd64")
+ add_toolchain_linker_flag("-L${CROSS_ROOTFS}/usr/amd64/lib")
+endif()
+
+# Specify compile options
+
+if((TARGET_ARCH_NAME MATCHES "^(arm|arm64|armel|armv6|ppc64le|riscv64|s390x)$" AND NOT ANDROID AND NOT FREEBSD) OR ILLUMOS OR HAIKU)
+ set(CMAKE_C_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_CXX_COMPILER_TARGET ${TOOLCHAIN})
+ set(CMAKE_ASM_COMPILER_TARGET ${TOOLCHAIN})
+endif()
+
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel)$")
+ add_compile_options(-mthumb)
+ if (NOT DEFINED CLR_ARM_FPU_TYPE)
+ set (CLR_ARM_FPU_TYPE vfpv3)
+ endif (NOT DEFINED CLR_ARM_FPU_TYPE)
+
+ add_compile_options (-mfpu=${CLR_ARM_FPU_TYPE})
+ if (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+ set (CLR_ARM_FPU_CAPABILITY 0x7)
+ endif (NOT DEFINED CLR_ARM_FPU_CAPABILITY)
+
+ add_definitions (-DCLR_ARM_FPU_CAPABILITY=${CLR_ARM_FPU_CAPABILITY})
+
+ if(TARGET_ARCH_NAME STREQUAL "armel")
+ add_compile_options(-mfloat-abi=softfp)
+ endif()
+elseif(TARGET_ARCH_NAME STREQUAL "x86")
+ add_compile_options(-m32)
+ add_compile_options(-Wno-error=unused-command-line-argument)
+endif()
+
+if(TIZEN)
+ if(TARGET_ARCH_NAME MATCHES "^(arm|armel|arm64|x86)$")
+ add_compile_options(-Wno-deprecated-declarations) # compile-time option
+ add_compile_options(-D__extern_always_inline=inline) # compile-time option
+ endif()
+endif()
+
+# Set LLDB include and library paths for builds that need lldb.
+if(TARGET_ARCH_NAME MATCHES "^(arm|armel|x86)$")
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(LLVM_CROSS_DIR "$ENV{LLVM_CROSS_HOME}")
+ else() # arm/armel case
+ set(LLVM_CROSS_DIR "$ENV{LLVM_ARM_HOME}")
+ endif()
+ if(LLVM_CROSS_DIR)
+ set(WITH_LLDB_LIBS "${LLVM_CROSS_DIR}/lib/" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${LLVM_CROSS_DIR}/include" CACHE STRING "")
+ set(LLDB_H "${WITH_LLDB_INCLUDES}" CACHE STRING "")
+ set(LLDB "${LLVM_CROSS_DIR}/lib/liblldb.so" CACHE STRING "")
+ else()
+ if(TARGET_ARCH_NAME STREQUAL "x86")
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/i386-linux-gnu" CACHE STRING "")
+ set(CHECK_LLVM_DIR "${CROSS_ROOTFS}/usr/lib/llvm-3.8/include")
+ if(EXISTS "${CHECK_LLVM_DIR}" AND IS_DIRECTORY "${CHECK_LLVM_DIR}")
+ set(WITH_LLDB_INCLUDES "${CHECK_LLVM_DIR}")
+ else()
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include")
+ endif()
+ else() # arm/armel case
+ set(WITH_LLDB_LIBS "${CROSS_ROOTFS}/usr/lib/${TOOLCHAIN}" CACHE STRING "")
+ set(WITH_LLDB_INCLUDES "${CROSS_ROOTFS}/usr/lib/llvm-3.6/include" CACHE STRING "")
+ endif()
+ endif()
+endif()
+
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/eng/common/cross/x86/sources.list.bionic b/eng/common/cross/x86/sources.list.bionic
new file mode 100644
index 0000000..a71ccad
--- /dev/null
+++ b/eng/common/cross/x86/sources.list.bionic
@@ -0,0 +1,11 @@
+deb http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ bionic main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ bionic-updates main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
+deb-src http://archive.ubuntu.com/ubuntu/ bionic-backports main restricted
+
+deb http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
+deb-src http://archive.ubuntu.com/ubuntu/ bionic-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.focal b/eng/common/cross/x86/sources.list.focal
new file mode 100644
index 0000000..99d5731
--- /dev/null
+++ b/eng/common/cross/x86/sources.list.focal
@@ -0,0 +1,11 @@
+deb http://archive.ubuntu.com/ubuntu/ focal main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ focal main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ focal-updates main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
+deb-src http://archive.ubuntu.com/ubuntu/ focal-backports main restricted
+
+deb http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse
+deb-src http://archive.ubuntu.com/ubuntu/ focal-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.jammy b/eng/common/cross/x86/sources.list.jammy
new file mode 100644
index 0000000..af1c1fe
--- /dev/null
+++ b/eng/common/cross/x86/sources.list.jammy
@@ -0,0 +1,11 @@
+deb http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ jammy main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ jammy-updates main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
+deb-src http://archive.ubuntu.com/ubuntu/ jammy-backports main restricted
+
+deb http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse
+deb-src http://archive.ubuntu.com/ubuntu/ jammy-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/sources.list.xenial b/eng/common/cross/x86/sources.list.xenial
new file mode 100644
index 0000000..ad9c5a0
--- /dev/null
+++ b/eng/common/cross/x86/sources.list.xenial
@@ -0,0 +1,11 @@
+deb http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ xenial main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
+deb-src http://archive.ubuntu.com/ubuntu/ xenial-updates main restricted universe
+
+deb http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
+deb-src http://archive.ubuntu.com/ubuntu/ xenial-backports main restricted
+
+deb http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
+deb-src http://archive.ubuntu.com/ubuntu/ xenial-security main restricted universe multiverse
diff --git a/eng/common/cross/x86/tizen-build-rootfs.sh b/eng/common/cross/x86/tizen-build-rootfs.sh
new file mode 100644
index 0000000..f5f955d
--- /dev/null
+++ b/eng/common/cross/x86/tizen-build-rootfs.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+set -e
+
+__X86_CrossDir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+__TIZEN_CROSSDIR="$__X86_CrossDir/tizen"
+
+if [[ -z "$ROOTFS_DIR" ]]; then
+ echo "ROOTFS_DIR is not defined."
+ exit 1;
+fi
+
+TIZEN_TMP_DIR=$ROOTFS_DIR/tizen_tmp
+mkdir -p $TIZEN_TMP_DIR
+
+# Download files
+echo ">>Start downloading files"
+VERBOSE=1 $__X86_CrossDir/tizen-fetch.sh $TIZEN_TMP_DIR
+echo "<<Finish downloading files"
+
+echo ">>Start constructing Tizen rootfs"
+TIZEN_RPM_FILES=`ls $TIZEN_TMP_DIR/*.rpm`
+cd $ROOTFS_DIR
+for f in $TIZEN_RPM_FILES; do
+ rpm2cpio $f | cpio -idm --quiet
+done
+echo "<<Finish constructing Tizen rootfs"
+
+# Cleanup tmp
+rm -rf $TIZEN_TMP_DIR
+
+# Configure Tizen rootfs
+echo ">>Start configuring Tizen rootfs"
+ln -sfn asm-x86 ./usr/include/asm
+patch -p1 < $__TIZEN_CROSSDIR/tizen.patch
+echo "<<Finish configuring Tizen rootfs"
diff --git a/eng/common/cross/x86/tizen-fetch.sh b/eng/common/cross/x86/tizen-fetch.sh
new file mode 100644
index 0000000..fa5f88b
--- /dev/null
+++ b/eng/common/cross/x86/tizen-fetch.sh
@@ -0,0 +1,170 @@
+#!/usr/bin/env bash
+set -e
+
+if [[ -z "${VERBOSE// }" ]] || [ "$VERBOSE" -ne "$VERBOSE" ] 2>/dev/null; then
+ VERBOSE=0
+fi
+
+Log()
+{
+ if [ $VERBOSE -ge $1 ]; then
+ echo ${@:2}
+ fi
+}
+
+Inform()
+{
+ Log 1 -e "\x1B[0;34m$@\x1B[m"
+}
+
+Debug()
+{
+ Log 2 -e "\x1B[0;32m$@\x1B[m"
+}
+
+Error()
+{
+ >&2 Log 0 -e "\x1B[0;31m$@\x1B[m"
+}
+
+Fetch()
+{
+ URL=$1
+ FILE=$2
+ PROGRESS=$3
+ if [ $VERBOSE -ge 1 ] && [ $PROGRESS ]; then
+ CURL_OPT="--progress-bar"
+ else
+ CURL_OPT="--silent"
+ fi
+ curl $CURL_OPT $URL > $FILE
+}
+
+hash curl 2> /dev/null || { Error "Require 'curl' Aborting."; exit 1; }
+hash xmllint 2> /dev/null || { Error "Require 'xmllint' Aborting."; exit 1; }
+hash sha256sum 2> /dev/null || { Error "Require 'sha256sum' Aborting."; exit 1; }
+
+TMPDIR=$1
+if [ ! -d $TMPDIR ]; then
+ TMPDIR=./tizen_tmp
+ Debug "Create temporary directory : $TMPDIR"
+ mkdir -p $TMPDIR
+fi
+
+TIZEN_URL=http://download.tizen.org/snapshots/tizen
+BUILD_XML=build.xml
+REPOMD_XML=repomd.xml
+PRIMARY_XML=primary.xml
+TARGET_URL="http://__not_initialized"
+
+Xpath_get()
+{
+ XPATH_RESULT=''
+ XPATH=$1
+ XML_FILE=$2
+ RESULT=$(xmllint --xpath $XPATH $XML_FILE)
+ if [[ -z ${RESULT// } ]]; then
+ Error "Can not find target from $XML_FILE"
+ Debug "Xpath = $XPATH"
+ exit 1
+ fi
+ XPATH_RESULT=$RESULT
+}
+
+fetch_tizen_pkgs_init()
+{
+ TARGET=$1
+ PROFILE=$2
+ Debug "Initialize TARGET=$TARGET, PROFILE=$PROFILE"
+
+ TMP_PKG_DIR=$TMPDIR/tizen_${PROFILE}_pkgs
+ if [ -d $TMP_PKG_DIR ]; then rm -rf $TMP_PKG_DIR; fi
+ mkdir -p $TMP_PKG_DIR
+
+ PKG_URL=$TIZEN_URL/$PROFILE/latest
+
+ BUILD_XML_URL=$PKG_URL/$BUILD_XML
+ TMP_BUILD=$TMP_PKG_DIR/$BUILD_XML
+ TMP_REPOMD=$TMP_PKG_DIR/$REPOMD_XML
+ TMP_PRIMARY=$TMP_PKG_DIR/$PRIMARY_XML
+ TMP_PRIMARYGZ=${TMP_PRIMARY}.gz
+
+ Fetch $BUILD_XML_URL $TMP_BUILD
+
+ Debug "fetch $BUILD_XML_URL to $TMP_BUILD"
+
+ TARGET_XPATH="//build/buildtargets/buildtarget[@name=\"$TARGET\"]/repo[@type=\"binary\"]/text()"
+ Xpath_get $TARGET_XPATH $TMP_BUILD
+ TARGET_PATH=$XPATH_RESULT
+ TARGET_URL=$PKG_URL/$TARGET_PATH
+
+ REPOMD_URL=$TARGET_URL/repodata/repomd.xml
+ PRIMARY_XPATH='string(//*[local-name()="data"][@type="primary"]/*[local-name()="location"]/@href)'
+
+ Fetch $REPOMD_URL $TMP_REPOMD
+
+ Debug "fetch $REPOMD_URL to $TMP_REPOMD"
+
+ Xpath_get $PRIMARY_XPATH $TMP_REPOMD
+ PRIMARY_XML_PATH=$XPATH_RESULT
+ PRIMARY_URL=$TARGET_URL/$PRIMARY_XML_PATH
+
+ Fetch $PRIMARY_URL $TMP_PRIMARYGZ
+
+ Debug "fetch $PRIMARY_URL to $TMP_PRIMARYGZ"
+
+ gunzip $TMP_PRIMARYGZ
+
+ Debug "unzip $TMP_PRIMARYGZ to $TMP_PRIMARY"
+}
+
+fetch_tizen_pkgs()
+{
+ ARCH=$1
+ PACKAGE_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="location"]/@href)'
+
+ PACKAGE_CHECKSUM_XPATH_TPL='string(//*[local-name()="metadata"]/*[local-name()="package"][*[local-name()="name"][text()="_PKG_"]][*[local-name()="arch"][text()="_ARCH_"]]/*[local-name()="checksum"]/text())'
+
+ for pkg in ${@:2}
+ do
+ Inform "Fetching... $pkg"
+ XPATH=${PACKAGE_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ PKG_PATH=$XPATH_RESULT
+
+ XPATH=${PACKAGE_CHECKSUM_XPATH_TPL/_PKG_/$pkg}
+ XPATH=${XPATH/_ARCH_/$ARCH}
+ Xpath_get $XPATH $TMP_PRIMARY
+ CHECKSUM=$XPATH_RESULT
+
+ PKG_URL=$TARGET_URL/$PKG_PATH
+ PKG_FILE=$(basename $PKG_PATH)
+ PKG_PATH=$TMPDIR/$PKG_FILE
+
+ Debug "Download $PKG_URL to $PKG_PATH"
+ Fetch $PKG_URL $PKG_PATH true
+
+ echo "$CHECKSUM $PKG_PATH" | sha256sum -c - > /dev/null
+ if [ $? -ne 0 ]; then
+ Error "Fail to fetch $PKG_URL to $PKG_PATH"
+ Debug "Checksum = $CHECKSUM"
+ exit 1
+ fi
+ done
+}
+
+Inform "Initialize i686 base"
+fetch_tizen_pkgs_init standard base
+Inform "fetch common packages"
+fetch_tizen_pkgs i686 gcc gcc-devel-static glibc glibc-devel libicu libicu-devel libatomic linux-glibc-devel keyutils keyutils-devel libkeyutils
+Inform "fetch coreclr packages"
+fetch_tizen_pkgs i686 lldb lldb-devel libgcc libstdc++ libstdc++-devel libunwind libunwind-devel lttng-ust-devel lttng-ust userspace-rcu-devel userspace-rcu
+Inform "fetch corefx packages"
+fetch_tizen_pkgs i686 libcom_err libcom_err-devel zlib zlib-devel libopenssl11 libopenssl1.1-devel krb5 krb5-devel
+
+Inform "Initialize standard unified"
+fetch_tizen_pkgs_init standard unified
+Inform "fetch corefx packages"
+fetch_tizen_pkgs i686 gssdp gssdp-devel tizen-release
+
diff --git a/eng/common/cross/x86/tizen/tizen.patch b/eng/common/cross/x86/tizen/tizen.patch
new file mode 100644
index 0000000..f4fe883
--- /dev/null
+++ b/eng/common/cross/x86/tizen/tizen.patch
@@ -0,0 +1,9 @@
+diff -u -r a/usr/lib/libc.so b/usr/lib/libc.so
+--- a/usr/lib/libc.so 2016-12-30 23:00:08.284951863 +0900
++++ b/usr/lib/libc.so 2016-12-30 23:00:32.140951815 +0900
+@@ -2,4 +2,4 @@
+ Use the shared library, but some functions are only in
+ the static library, so try that secondarily. */
+ OUTPUT_FORMAT(elf32-i386)
+-GROUP ( /lib/libc.so.6 /usr/lib/libc_nonshared.a AS_NEEDED ( /lib/ld-linux.so.2 ) )
++GROUP ( libc.so.6 libc_nonshared.a AS_NEEDED ( ld-linux.so.2 ) )
diff --git a/eng/common/darc-init.ps1 b/eng/common/darc-init.ps1
new file mode 100644
index 0000000..435e764
--- /dev/null
+++ b/eng/common/darc-init.ps1
@@ -0,0 +1,47 @@
+param (
+ $darcVersion = $null,
+ $versionEndpoint = 'https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16',
+ $verbosity = 'minimal',
+ $toolpath = $null
+)
+
+. $PSScriptRoot\tools.ps1
+
+function InstallDarcCli ($darcVersion, $toolpath) {
+ $darcCliPackageName = 'microsoft.dotnet.darc'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list -g
+
+ if ($toolList -like "*$darcCliPackageName*") {
+ & "$dotnet" tool uninstall $darcCliPackageName -g
+ }
+
+ # If the user didn't explicitly specify the darc version,
+ # query the Maestro API for the correct version of darc to install.
+ if (-not $darcVersion) {
+ $darcVersion = $(Invoke-WebRequest -Uri $versionEndpoint -UseBasicParsing).Content
+ }
+
+ $arcadeServicesSource = 'https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/index.json'
+
+ Write-Host "Installing Darc CLI version $darcVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ if (-not $toolpath) {
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity -g"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g
+ }else {
+ Write-Host "'$dotnet' tool install $darcCliPackageName --version $darcVersion --add-source '$arcadeServicesSource' -v $verbosity --tool-path '$toolpath'"
+ & "$dotnet" tool install $darcCliPackageName --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath"
+ }
+}
+
+try {
+ InstallDarcCli $darcVersion $toolpath
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Darc' -Message $_
+ ExitWithExitCode 1
+} \ No newline at end of file
diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh
new file mode 100644
index 0000000..84c1d0c
--- /dev/null
+++ b/eng/common/darc-init.sh
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+darcVersion=''
+versionEndpoint='https://maestro-prod.westus2.cloudapp.azure.com/api/assets/darc-version?api-version=2019-01-16'
+verbosity='minimal'
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ --darcversion)
+ darcVersion=$2
+ shift
+ ;;
+ --versionendpoint)
+ versionEndpoint=$2
+ shift
+ ;;
+ --verbosity)
+ verbosity=$2
+ shift
+ ;;
+ --toolpath)
+ toolpath=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ -z "$darcVersion" ]; then
+ darcVersion=$(curl -X GET "$versionEndpoint" -H "accept: text/plain")
+fi
+
+function InstallDarcCli {
+ local darc_cli_package_name="microsoft.dotnet.darc"
+
+ InitializeDotNetCli true
+ local dotnet_root=$_InitializeDotNetCli
+
+ if [ -z "$toolpath" ]; then
+ local tool_list=$($dotnet_root/dotnet tool list -g)
+ if [[ $tool_list = *$darc_cli_package_name* ]]; then
+ echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name -g)
+ fi
+ else
+ local tool_list=$($dotnet_root/dotnet tool list --tool-path "$toolpath")
+ if [[ $tool_list = *$darc_cli_package_name* ]]; then
+ echo $($dotnet_root/dotnet tool uninstall $darc_cli_package_name --tool-path "$toolpath")
+ fi
+ fi
+
+ local arcadeServicesSource="https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json"
+
+ echo "Installing Darc CLI version $darcVersion..."
+ echo "You may need to restart your command shell if this is the first dotnet tool you have installed."
+ if [ -z "$toolpath" ]; then
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity -g)
+ else
+ echo $($dotnet_root/dotnet tool install $darc_cli_package_name --version $darcVersion --add-source "$arcadeServicesSource" -v $verbosity --tool-path "$toolpath")
+ fi
+}
+
+InstallDarcCli
diff --git a/eng/common/dotnet-install.cmd b/eng/common/dotnet-install.cmd
new file mode 100644
index 0000000..b1c2642
--- /dev/null
+++ b/eng/common/dotnet-install.cmd
@@ -0,0 +1,2 @@
+@echo off
+powershell -ExecutionPolicy ByPass -NoProfile -command "& """%~dp0dotnet-install.ps1""" %*" \ No newline at end of file
diff --git a/eng/common/dotnet-install.ps1 b/eng/common/dotnet-install.ps1
new file mode 100644
index 0000000..811f0f7
--- /dev/null
+++ b/eng/common/dotnet-install.ps1
@@ -0,0 +1,28 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = 'minimal',
+ [string] $architecture = '',
+ [string] $version = 'Latest',
+ [string] $runtime = 'dotnet',
+ [string] $RuntimeSourceFeed = '',
+ [string] $RuntimeSourceFeedKey = ''
+)
+
+. $PSScriptRoot\tools.ps1
+
+$dotnetRoot = Join-Path $RepoRoot '.dotnet'
+
+$installdir = $dotnetRoot
+try {
+ if ($architecture -and $architecture.Trim() -eq 'x86') {
+ $installdir = Join-Path $installdir 'x86'
+ }
+ InstallDotNet $installdir $version $architecture $runtime $true -RuntimeSourceFeed $RuntimeSourceFeed -RuntimeSourceFeedKey $RuntimeSourceFeedKey
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh
new file mode 100644
index 0000000..abd045a
--- /dev/null
+++ b/eng/common/dotnet-install.sh
@@ -0,0 +1,87 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+version='Latest'
+architecture=''
+runtime='dotnet'
+runtimeSourceFeed=''
+runtimeSourceFeedKey=''
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -version|-v)
+ shift
+ version="$1"
+ ;;
+ -architecture|-a)
+ shift
+ architecture="$1"
+ ;;
+ -runtime|-r)
+ shift
+ runtime="$1"
+ ;;
+ -runtimesourcefeed)
+ shift
+ runtimeSourceFeed="$1"
+ ;;
+ -runtimesourcefeedkey)
+ shift
+ runtimeSourceFeedKey="$1"
+ ;;
+ *)
+ Write-PipelineTelemetryError -Category 'Build' -Message "Invalid argument: $1"
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+# Use uname to determine what the CPU is, see https://en.wikipedia.org/wiki/Uname#Examples
+cpuname=$(uname -m)
+case $cpuname in
+ arm64|aarch64)
+ buildarch=arm64
+ ;;
+ loongarch64)
+ buildarch=loongarch64
+ ;;
+ amd64|x86_64)
+ buildarch=x64
+ ;;
+ armv*l)
+ buildarch=arm
+ ;;
+ i[3-6]86)
+ buildarch=x86
+ ;;
+ *)
+ echo "Unknown CPU $cpuname detected, treating it as x64"
+ buildarch=x64
+ ;;
+esac
+
+dotnetRoot="${repo_root}.dotnet"
+if [[ $architecture != "" ]] && [[ $architecture != $buildarch ]]; then
+ dotnetRoot="$dotnetRoot/$architecture"
+fi
+
+InstallDotNet $dotnetRoot $version "$architecture" $runtime true $runtimeSourceFeed $runtimeSourceFeedKey || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "dotnet-install.sh failed (exit code '$exit_code')." >&2
+ ExitWithExitCode $exit_code
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/enable-cross-org-publishing.ps1 b/eng/common/enable-cross-org-publishing.ps1
new file mode 100644
index 0000000..da09da4
--- /dev/null
+++ b/eng/common/enable-cross-org-publishing.ps1
@@ -0,0 +1,13 @@
+param(
+ [string] $token
+)
+
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+# Write-PipelineSetVariable will no-op if a variable named $ci is not defined
+# Since this script is only ever called in AzDO builds, just universally set it
+$ci = $true
+
+Write-PipelineSetVariable -Name 'VSS_NUGET_ACCESSTOKEN' -Value $token -IsMultiJobVariable $false
+Write-PipelineSetVariable -Name 'VSS_NUGET_URI_PREFIXES' -Value 'https://dnceng.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/dnceng/;https://devdiv.pkgs.visualstudio.com/;https://pkgs.dev.azure.com/devdiv/' -IsMultiJobVariable $false
diff --git a/eng/common/generate-locproject.ps1 b/eng/common/generate-locproject.ps1
new file mode 100644
index 0000000..dbf2ab4
--- /dev/null
+++ b/eng/common/generate-locproject.ps1
@@ -0,0 +1,142 @@
+Param(
+ [Parameter(Mandatory=$true)][string] $SourcesDirectory, # Directory where source files live; if using a Localize directory it should live in here
+ [string] $LanguageSet = 'VS_Main_Languages', # Language set to be used in the LocProject.json
+ [switch] $UseCheckedInLocProjectJson, # When set, generates a LocProject.json and compares it to one that already exists in the repo; otherwise just generates one
+ [switch] $CreateNeutralXlfs # Creates neutral xlf files. Only set to false when running locally
+)
+
+# Generates LocProject.json files for the OneLocBuild task. OneLocBuildTask is described here:
+# https://ceapex.visualstudio.com/CEINTL/_wiki/wikis/CEINTL.wiki/107/Localization-with-OneLocBuild-Task
+
+Set-StrictMode -Version 2.0
+$ErrorActionPreference = "Stop"
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+$exclusionsFilePath = "$SourcesDirectory\eng\Localize\LocExclusions.json"
+$exclusions = @{ Exclusions = @() }
+if (Test-Path -Path $exclusionsFilePath)
+{
+ $exclusions = Get-Content "$exclusionsFilePath" | ConvertFrom-Json
+}
+
+Push-Location "$SourcesDirectory" # push location for Resolve-Path -Relative to work
+
+# Template files
+$jsonFiles = @()
+$jsonTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\.template\.config\\localize\\.+\.en\.json" } # .NET templating pattern
+$jsonTemplateFiles | ForEach-Object {
+ $null = $_.Name -Match "(.+)\.[\w-]+\.json" # matches '[filename].[langcode].json
+
+ $destinationFile = "$($_.Directory.FullName)\$($Matches.1).json"
+ $jsonFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
+}
+
+$jsonWinformsTemplateFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "en\\strings\.json" } # current winforms pattern
+
+$wxlFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory" | Where-Object { $_.FullName -Match "\\.+\.wxl" -And -Not( $_.Directory.Name -Match "\d{4}" ) } # localized files live in four digit lang ID directories; this excludes them
+
+$xlfFiles = @()
+
+$allXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.xlf"
+$langXlfFiles = @()
+if ($allXlfFiles) {
+ $null = $allXlfFiles[0].FullName -Match "\.([\w-]+)\.xlf" # matches '[langcode].xlf'
+ $firstLangCode = $Matches.1
+ $langXlfFiles = Get-ChildItem -Recurse -Path "$SourcesDirectory\*\*.$firstLangCode.xlf"
+}
+$langXlfFiles | ForEach-Object {
+ $null = $_.Name -Match "(.+)\.[\w-]+\.xlf" # matches '[filename].[langcode].xlf
+
+ $destinationFile = "$($_.Directory.FullName)\$($Matches.1).xlf"
+ $xlfFiles += Copy-Item "$($_.FullName)" -Destination $destinationFile -PassThru
+}
+
+$locFiles = $jsonFiles + $jsonWinformsTemplateFiles + $xlfFiles
+
+$locJson = @{
+ Projects = @(
+ @{
+ LanguageSet = $LanguageSet
+ LocItems = @(
+ $locFiles | ForEach-Object {
+ $outputPath = "$(($_.DirectoryName | Resolve-Path -Relative) + "\")"
+ $continue = $true
+ foreach ($exclusion in $exclusions.Exclusions) {
+ if ($_.FullName.Contains($exclusion))
+ {
+ $continue = $false
+ }
+ }
+ $sourceFile = ($_.FullName | Resolve-Path -Relative)
+ if (!$CreateNeutralXlfs -and $_.Extension -eq '.xlf') {
+ Remove-Item -Path $sourceFile
+ }
+ if ($continue)
+ {
+ if ($_.Directory.Name -eq 'en' -and $_.Extension -eq '.json') {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = "$($_.Directory.Parent.FullName | Resolve-Path -Relative)\"
+ }
+ } else {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnName"
+ OutputPath = $outputPath
+ }
+ }
+ }
+ }
+ )
+ },
+ @{
+ LanguageSet = $LanguageSet
+ CloneLanguageSet = "WiX_CloneLanguages"
+ LssFiles = @( "wxl_loc.lss" )
+ LocItems = @(
+ $wxlFiles | ForEach-Object {
+ $outputPath = "$($_.Directory.FullName | Resolve-Path -Relative)\"
+ $continue = $true
+ foreach ($exclusion in $exclusions.Exclusions) {
+ if ($_.FullName.Contains($exclusion))
+ {
+ $continue = $false
+ }
+ }
+ $sourceFile = ($_.FullName | Resolve-Path -Relative)
+ if ($continue)
+ {
+ return @{
+ SourceFile = $sourceFile
+ CopyOption = "LangIDOnPath"
+ OutputPath = $outputPath
+ }
+ }
+ }
+ )
+ }
+ )
+}
+
+$json = ConvertTo-Json $locJson -Depth 5
+Write-Host "LocProject.json generated:`n`n$json`n`n"
+Pop-Location
+
+if (!$UseCheckedInLocProjectJson) {
+ New-Item "$SourcesDirectory\eng\Localize\LocProject.json" -Force # Need this to make sure the Localize directory is created
+ Set-Content "$SourcesDirectory\eng\Localize\LocProject.json" $json
+}
+else {
+ New-Item "$SourcesDirectory\eng\Localize\LocProject-generated.json" -Force # Need this to make sure the Localize directory is created
+ Set-Content "$SourcesDirectory\eng\Localize\LocProject-generated.json" $json
+
+ if ((Get-FileHash "$SourcesDirectory\eng\Localize\LocProject-generated.json").Hash -ne (Get-FileHash "$SourcesDirectory\eng\Localize\LocProject.json").Hash) {
+ Write-PipelineTelemetryError -Category "OneLocBuild" -Message "Existing LocProject.json differs from generated LocProject.json. Download LocProject-generated.json and compare them."
+
+ exit 1
+ }
+ else {
+ Write-Host "Generated LocProject.json and current LocProject.json are identical."
+ }
+}
diff --git a/eng/common/generate-sbom-prep.ps1 b/eng/common/generate-sbom-prep.ps1
new file mode 100644
index 0000000..3e5c1c7
--- /dev/null
+++ b/eng/common/generate-sbom-prep.ps1
@@ -0,0 +1,21 @@
+Param(
+ [Parameter(Mandatory=$true)][string] $ManifestDirPath # Manifest directory where sbom will be placed
+)
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+Write-Host "Creating dir $ManifestDirPath"
+# create directory for sbom manifest to be placed
+if (!(Test-Path -path $ManifestDirPath))
+{
+ New-Item -ItemType Directory -path $ManifestDirPath
+ Write-Host "Successfully created directory $ManifestDirPath"
+}
+else{
+ Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
+}
+
+Write-Host "Updating artifact name"
+$artifact_name = "${env:SYSTEM_STAGENAME}_${env:AGENT_JOBNAME}_SBOM" -replace '["/:<>\\|?@*"() ]', '_'
+Write-Host "Artifact name $artifact_name"
+Write-Host "##vso[task.setvariable variable=ARTIFACT_NAME]$artifact_name"
diff --git a/eng/common/generate-sbom-prep.sh b/eng/common/generate-sbom-prep.sh
new file mode 100644
index 0000000..d5c76dc
--- /dev/null
+++ b/eng/common/generate-sbom-prep.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h $source ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+. $scriptroot/pipeline-logging-functions.sh
+
+manifest_dir=$1
+
+if [ ! -d "$manifest_dir" ] ; then
+ mkdir -p "$manifest_dir"
+ echo "Sbom directory created." $manifest_dir
+else
+ Write-PipelineTelemetryError -category 'Build' "Unable to create sbom folder."
+fi
+
+artifact_name=$SYSTEM_STAGENAME"_"$AGENT_JOBNAME"_SBOM"
+echo "Artifact name before : "$artifact_name
+# replace all special characters with _, some builds use special characters like : in Agent.Jobname, that is not a permissible name while uploading artifacts.
+safe_artifact_name="${artifact_name//["/:<>\\|?@*$" ]/_}"
+echo "Artifact name after : "$safe_artifact_name
+export ARTIFACT_NAME=$safe_artifact_name
+echo "##vso[task.setvariable variable=ARTIFACT_NAME]$safe_artifact_name"
+
+exit 0
diff --git a/eng/common/helixpublish.proj b/eng/common/helixpublish.proj
new file mode 100644
index 0000000..d7f1858
--- /dev/null
+++ b/eng/common/helixpublish.proj
@@ -0,0 +1,26 @@
+<Project Sdk="Microsoft.DotNet.Helix.Sdk" DefaultTargets="Test">
+
+ <PropertyGroup>
+ <Language>msbuild</Language>
+ </PropertyGroup>
+
+ <ItemGroup>
+ <HelixCorrelationPayload Include="$(CorrelationPayloadDirectory)">
+ <PayloadDirectory>%(Identity)</PayloadDirectory>
+ </HelixCorrelationPayload>
+ </ItemGroup>
+
+ <ItemGroup>
+ <HelixWorkItem Include="WorkItem" Condition="'$(WorkItemDirectory)' != ''">
+ <PayloadDirectory>$(WorkItemDirectory)</PayloadDirectory>
+ <Command>$(WorkItemCommand)</Command>
+ <Timeout Condition="'$(WorkItemTimeout)' != ''">$(WorkItemTimeout)</Timeout>
+ </HelixWorkItem>
+ </ItemGroup>
+
+ <ItemGroup>
+ <XUnitProject Include="$(XUnitProjects.Split(';'))">
+ <Arguments />
+ </XUnitProject>
+ </ItemGroup>
+</Project>
diff --git a/eng/common/init-tools-native.cmd b/eng/common/init-tools-native.cmd
new file mode 100644
index 0000000..438cd54
--- /dev/null
+++ b/eng/common/init-tools-native.cmd
@@ -0,0 +1,3 @@
+@echo off
+powershell -NoProfile -NoLogo -ExecutionPolicy ByPass -command "& """%~dp0init-tools-native.ps1""" %*"
+exit /b %ErrorLevel% \ No newline at end of file
diff --git a/eng/common/init-tools-native.ps1 b/eng/common/init-tools-native.ps1
new file mode 100644
index 0000000..ac42f04
--- /dev/null
+++ b/eng/common/init-tools-native.ps1
@@ -0,0 +1,205 @@
+<#
+.SYNOPSIS
+Entry point script for installing native tools
+
+.DESCRIPTION
+Reads $RepoRoot\global.json file to determine native assets to install
+and executes installers for those tools
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER InstallDirectory
+Directory to install native toolset. This is a command-line override for the default
+Install directory precedence order:
+- InstallDirectory command-line override
+- NETCOREENG_INSTALL_DIRECTORY environment variable
+- (default) %USERPROFILE%/.netcoreeng/native
+
+.PARAMETER Clean
+Switch specifying to not install anything, but cleanup native asset folders
+
+.PARAMETER Force
+Clean and then install tools
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.PARAMETER GlobalJsonFile
+File path to global.json file
+
+.PARAMETER PathPromotion
+Optional switch to enable either promote native tools specified in the global.json to the path (in Azure Pipelines)
+or break the build if a native tool is not found on the path (on a local dev machine)
+
+.NOTES
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [string] $BaseUri = 'https://netcorenativeassets.blob.core.windows.net/resource-packages/external',
+ [string] $InstallDirectory,
+ [switch] $Clean = $False,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [string] $GlobalJsonFile,
+ [switch] $PathPromotion
+)
+
+if (!$GlobalJsonFile) {
+ $GlobalJsonFile = Join-Path (Get-Item $PSScriptRoot).Parent.Parent.FullName 'global.json'
+}
+
+Set-StrictMode -version 2.0
+$ErrorActionPreference='Stop'
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+Import-Module -Name (Join-Path $PSScriptRoot 'native\CommonLibrary.psm1')
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq 'Continue'
+
+ $EngCommonBaseDir = Join-Path $PSScriptRoot 'native\'
+ $NativeBaseDir = $InstallDirectory
+ if (!$NativeBaseDir) {
+ $NativeBaseDir = CommonLibrary\Get-NativeInstallDirectory
+ }
+ $Env:CommonLibrary_NativeInstallDir = $NativeBaseDir
+ $InstallBin = Join-Path $NativeBaseDir 'bin'
+ $InstallerPath = Join-Path $EngCommonBaseDir 'install-tool.ps1'
+
+ # Process tools list
+ Write-Host "Processing $GlobalJsonFile"
+ If (-Not (Test-Path $GlobalJsonFile)) {
+ Write-Host "Unable to find '$GlobalJsonFile'"
+ exit 0
+ }
+ $NativeTools = Get-Content($GlobalJsonFile) -Raw |
+ ConvertFrom-Json |
+ Select-Object -Expand 'native-tools' -ErrorAction SilentlyContinue
+ if ($NativeTools) {
+ if ($PathPromotion -eq $True) {
+ if ($env:SYSTEM_TEAMPROJECT) { # check to see if we're in an Azure pipelines build
+ $NativeTools.PSObject.Properties | ForEach-Object {
+ $ToolName = $_.Name
+ $ToolVersion = $_.Value
+ $InstalledTools = @{}
+
+ if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
+ if ($ToolVersion -eq "latest") {
+ $ToolVersion = ""
+ }
+ $ArcadeToolsDirectory = "C:\arcade-tools"
+ if (-not (Test-Path $ArcadeToolsDirectory)) {
+ Write-Error "Arcade tools directory '$ArcadeToolsDirectory' was not found; artifacts were not properly installed."
+ exit 1
+ }
+ $ToolDirectories = (Get-ChildItem -Path "$ArcadeToolsDirectory" -Filter "$ToolName-$ToolVersion*" | Sort-Object -Descending)
+ if ($ToolDirectories -eq $null) {
+ Write-Error "Unable to find directory for $ToolName $ToolVersion; please make sure the tool is installed on this image."
+ exit 1
+ }
+ $ToolDirectory = $ToolDirectories[0]
+ $BinPathFile = "$($ToolDirectory.FullName)\binpath.txt"
+ if (-not (Test-Path -Path "$BinPathFile")) {
+ Write-Error "Unable to find binpath.txt in '$($ToolDirectory.FullName)' ($ToolName $ToolVersion); artifact is either installed incorrectly or is not a bootstrappable tool."
+ exit 1
+ }
+ $BinPath = Get-Content "$BinPathFile"
+ $ToolPath = Convert-Path -Path $BinPath
+ Write-Host "Adding $ToolName to the path ($ToolPath)..."
+ Write-Host "##vso[task.prependpath]$ToolPath"
+ $InstalledTools += @{ $ToolName = $ToolDirectory.FullName }
+ }
+ }
+ return $InstalledTools
+ } else {
+ $NativeTools.PSObject.Properties | ForEach-Object {
+ $ToolName = $_.Name
+ $ToolVersion = $_.Value
+
+ if ((Get-Command "$ToolName" -ErrorAction SilentlyContinue) -eq $null) {
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message "$ToolName not found on path. Please install $ToolName $ToolVersion before proceeding."
+ }
+ }
+ exit 0
+ }
+ } else {
+ $NativeTools.PSObject.Properties | ForEach-Object {
+ $ToolName = $_.Name
+ $ToolVersion = $_.Value
+ $LocalInstallerArguments = @{ ToolName = "$ToolName" }
+ $LocalInstallerArguments += @{ InstallPath = "$InstallBin" }
+ $LocalInstallerArguments += @{ BaseUri = "$BaseUri" }
+ $LocalInstallerArguments += @{ CommonLibraryDirectory = "$EngCommonBaseDir" }
+ $LocalInstallerArguments += @{ Version = "$ToolVersion" }
+
+ if ($Verbose) {
+ $LocalInstallerArguments += @{ Verbose = $True }
+ }
+ if (Get-Variable 'Force' -ErrorAction 'SilentlyContinue') {
+ if($Force) {
+ $LocalInstallerArguments += @{ Force = $True }
+ }
+ }
+ if ($Clean) {
+ $LocalInstallerArguments += @{ Clean = $True }
+ }
+
+ Write-Verbose "Installing $ToolName version $ToolVersion"
+ Write-Verbose "Executing '$InstallerPath $($LocalInstallerArguments.Keys.ForEach({"-$_ '$($LocalInstallerArguments.$_)'"}) -join ' ')'"
+ & $InstallerPath @LocalInstallerArguments
+ if ($LASTEXITCODE -Ne "0") {
+ $errMsg = "$ToolName installation failed"
+ if ((Get-Variable 'DoNotAbortNativeToolsInstallationOnFailure' -ErrorAction 'SilentlyContinue') -and $DoNotAbortNativeToolsInstallationOnFailure) {
+ $showNativeToolsWarning = $true
+ if ((Get-Variable 'DoNotDisplayNativeToolsInstallationWarnings' -ErrorAction 'SilentlyContinue') -and $DoNotDisplayNativeToolsInstallationWarnings) {
+ $showNativeToolsWarning = $false
+ }
+ if ($showNativeToolsWarning) {
+ Write-Warning $errMsg
+ }
+ $toolInstallationFailure = $true
+ } else {
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host $errMsg
+ exit 1
+ }
+ }
+ }
+
+ if ((Get-Variable 'toolInstallationFailure' -ErrorAction 'SilentlyContinue') -and $toolInstallationFailure) {
+ # We cannot change this to Write-PipelineTelemetryError because of https://github.com/dotnet/arcade/issues/4482
+ Write-Host 'Native tools bootstrap failed'
+ exit 1
+ }
+ }
+ }
+ else {
+ Write-Host 'No native tools defined in global.json'
+ exit 0
+ }
+
+ if ($Clean) {
+ exit 0
+ }
+ if (Test-Path $InstallBin) {
+ Write-Host 'Native tools are available from ' (Convert-Path -Path $InstallBin)
+ Write-Host "##vso[task.prependpath]$(Convert-Path -Path $InstallBin)"
+ return $InstallBin
+ }
+ elseif (-not ($PathPromotion)) {
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message 'Native tools install directory does not exist, installation failed'
+ exit 1
+ }
+ exit 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NativeToolsBootstrap' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/init-tools-native.sh b/eng/common/init-tools-native.sh
new file mode 100644
index 0000000..3e6a8d6
--- /dev/null
+++ b/eng/common/init-tools-native.sh
@@ -0,0 +1,238 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+base_uri='https://netcorenativeassets.blob.core.windows.net/resource-packages/external'
+install_directory=''
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+global_json_file="$(dirname "$(dirname "${scriptroot}")")/global.json"
+declare -a native_assets
+
+. $scriptroot/pipeline-logging-functions.sh
+. $scriptroot/native/common-library.sh
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installdirectory)
+ install_directory=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --donotabortonfailure)
+ donotabortonfailure=true
+ shift 1
+ ;;
+ --donotdisplaywarnings)
+ donotdisplaywarnings=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --installdirectory Directory to install native toolset."
+ echo " This is a command-line override for the default"
+ echo " Install directory precedence order:"
+ echo " - InstallDirectory command-line override"
+ echo " - NETCOREENG_INSTALL_DIRECTORY environment variable"
+ echo " - (default) %USERPROFILE%/.netcoreeng/native"
+ echo ""
+ echo " --clean Switch specifying not to install anything, but cleanup native asset folders"
+ echo " --donotabortonfailure Switch specifiying whether to abort native tools installation on failure"
+ echo " --donotdisplaywarnings Switch specifiying whether to display warnings during native tools installation on failure"
+ echo " --force Clean and then install tools"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --baseuri <value> Base URI for where to download native tools from"
+ echo " --downloadretries <value> Number of times a download should be attempted"
+ echo " --retrywaittimeseconds <value> Wait time between download attempts"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+function ReadGlobalJsonNativeTools {
+ # happy path: we have a proper JSON parsing tool `jq(1)` in PATH!
+ if command -v jq &> /dev/null; then
+
+ # jq: read each key/value pair under "native-tools" entry and emit:
+ # KEY="<entry-key>" VALUE="<entry-value>"
+ # followed by a null byte.
+ #
+ # bash: read line with null byte delimeter and push to array (for later `eval`uation).
+
+ while IFS= read -rd '' line; do
+ native_assets+=("$line")
+ done < <(jq -r '. |
+ select(has("native-tools")) |
+ ."native-tools" |
+ keys[] as $k |
+ @sh "KEY=\($k) VALUE=\(.[$k])\u0000"' "$global_json_file")
+
+ return
+ fi
+
+ # Warning: falling back to manually parsing JSON, which is not recommended.
+
+ # Following routine matches the output and escaping logic of jq(1)'s @sh formatter used above.
+ # It has been tested with several weird strings with escaped characters in entries (key and value)
+ # and results were compared with the output of jq(1) in binary representation using xxd(1);
+ # just before the assignment to 'native_assets' array (above and below).
+
+ # try to capture the section under "native-tools".
+ if [[ ! "$(cat "$global_json_file")" =~ \"native-tools\"[[:space:]\:\{]*([^\}]+) ]]; then
+ return
+ fi
+
+ section="${BASH_REMATCH[1]}"
+
+ parseStarted=0
+ possibleEnd=0
+ escaping=0
+ escaped=0
+ isKey=1
+
+ for (( i=0; i<${#section}; i++ )); do
+ char="${section:$i:1}"
+ if ! ((parseStarted)) && [[ "$char" =~ [[:space:],:] ]]; then continue; fi
+
+ if ! ((escaping)) && [[ "$char" == "\\" ]]; then
+ escaping=1
+ elif ((escaping)) && ! ((escaped)); then
+ escaped=1
+ fi
+
+ if ! ((parseStarted)) && [[ "$char" == "\"" ]]; then
+ parseStarted=1
+ possibleEnd=0
+ elif [[ "$char" == "'" ]]; then
+ token="$token'\\\''"
+ possibleEnd=0
+ elif ((escaping)) || [[ "$char" != "\"" ]]; then
+ token="$token$char"
+ possibleEnd=1
+ fi
+
+ if ((possibleEnd)) && ! ((escaping)) && [[ "$char" == "\"" ]]; then
+ # Use printf to unescape token to match jq(1)'s @sh formatting rules.
+ # do not use 'token="$(printf "$token")"' syntax, as $() eats the trailing linefeed.
+ printf -v token "'$token'"
+
+ if ((isKey)); then
+ KEY="$token"
+ isKey=0
+ else
+ line="KEY=$KEY VALUE=$token"
+ native_assets+=("$line")
+ isKey=1
+ fi
+
+ # reset for next token
+ parseStarted=0
+ token=
+ elif ((escaping)) && ((escaped)); then
+ escaping=0
+ escaped=0
+ fi
+ done
+}
+
+native_base_dir=$install_directory
+if [[ -z $install_directory ]]; then
+ native_base_dir=$(GetNativeInstallDirectory)
+fi
+
+install_bin="${native_base_dir}/bin"
+installed_any=false
+
+ReadGlobalJsonNativeTools
+
+if [[ ${#native_assets[@]} -eq 0 ]]; then
+ echo "No native tools defined in global.json"
+ exit 0;
+else
+ native_installer_dir="$scriptroot/native"
+ for index in "${!native_assets[@]}"; do
+ eval "${native_assets["$index"]}"
+
+ installer_path="$native_installer_dir/install-$KEY.sh"
+ installer_command="$installer_path"
+ installer_command+=" --baseuri $base_uri"
+ installer_command+=" --installpath $install_bin"
+ installer_command+=" --version $VALUE"
+ echo $installer_command
+
+ if [[ $force = true ]]; then
+ installer_command+=" --force"
+ fi
+
+ if [[ $clean = true ]]; then
+ installer_command+=" --clean"
+ fi
+
+ if [[ -a $installer_path ]]; then
+ $installer_command
+ if [[ $? != 0 ]]; then
+ if [[ $donotabortonfailure = true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed"
+ exit 1
+ fi
+ else
+ $installed_any = true
+ fi
+ else
+ if [[ $donotabortonfailure == true ]]; then
+ if [[ $donotdisplaywarnings != true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ fi
+ else
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Execution Failed: no install script"
+ exit 1
+ fi
+ fi
+ done
+fi
+
+if [[ $clean = true ]]; then
+ exit 0
+fi
+
+if [[ -d $install_bin ]]; then
+ echo "Native tools are available from $install_bin"
+ echo "##vso[task.prependpath]$install_bin"
+else
+ if [[ $installed_any = true ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Native tools install directory does not exist, installation failed"
+ exit 1
+ fi
+fi
+
+exit 0
diff --git a/eng/common/internal-feed-operations.ps1 b/eng/common/internal-feed-operations.ps1
new file mode 100644
index 0000000..92b7734
--- /dev/null
+++ b/eng/common/internal-feed-operations.ps1
@@ -0,0 +1,132 @@
+param(
+ [Parameter(Mandatory=$true)][string] $Operation,
+ [string] $AuthToken,
+ [string] $CommitSha,
+ [string] $RepoName,
+ [switch] $IsFeedPrivate
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+. $PSScriptRoot\tools.ps1
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables. This should ONLY be called from identified
+# internal builds
+function SetupCredProvider {
+ param(
+ [string] $AuthToken
+ )
+
+ # Install the Cred Provider NuGet plugin
+ Write-Host 'Setting up Cred Provider NuGet plugin in the agent...'
+ Write-Host "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ $url = 'https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.ps1'
+
+ Write-Host "Writing the contents of 'installcredprovider.ps1' locally..."
+ Invoke-WebRequest $url -OutFile installcredprovider.ps1
+
+ Write-Host 'Installing plugin...'
+ .\installcredprovider.ps1 -Force
+
+ Write-Host "Deleting local copy of 'installcredprovider.ps1'..."
+ Remove-Item .\installcredprovider.ps1
+
+ if (-Not("$env:USERPROFILE\.nuget\plugins\netcore")) {
+ Write-PipelineTelemetryError -Category 'Arcade' -Message 'CredProvider plugin was not installed correctly!'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'CredProvider plugin was installed correctly!'
+ }
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ $nugetConfigPath = Join-Path $RepoRoot "NuGet.config"
+
+ if (-Not (Test-Path -Path $nugetConfigPath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'NuGet.config file not found in repo root!'
+ ExitWithExitCode 1
+ }
+
+ $endpoints = New-Object System.Collections.ArrayList
+ $nugetConfigPackageSources = Select-Xml -Path $nugetConfigPath -XPath "//packageSources/add[contains(@key, 'darc-int-')]/@value" | foreach{$_.Node.Value}
+
+ if (($nugetConfigPackageSources | Measure-Object).Count -gt 0 ) {
+ foreach ($stableRestoreResource in $nugetConfigPackageSources) {
+ $trimmedResource = ([string]$stableRestoreResource).Trim()
+ [void]$endpoints.Add(@{endpoint="$trimmedResource"; password="$AuthToken"})
+ }
+ }
+
+ if (($endpoints | Measure-Object).Count -gt 0) {
+ $endpointCredentials = @{endpointCredentials=$endpoints} | ConvertTo-Json -Compress
+
+ # Create the environment variables the AzDo way
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $endpointCredentials -Properties @{
+ 'variable' = 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS'
+ 'issecret' = 'false'
+ }
+
+ # We don't want sessions cached since we will be updating the endpoints quite frequently
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data 'False' -Properties @{
+ 'variable' = 'NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED'
+ 'issecret' = 'false'
+ }
+ }
+ else
+ {
+ Write-Host 'No internal endpoints found in NuGet.config'
+ }
+}
+
+#Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ $dotnetTempDir = Join-Path $RepoRoot "dotnet"
+ $dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ $dotnet = "$dotnetTempDir\dotnet.exe"
+ $restoreProjPath = "$PSScriptRoot\restore.proj"
+
+ Write-Host "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Out-File "$restoreProjPath"
+
+ & $dotnet restore $restoreProjPath
+
+ Write-Host 'Arcade SDK restored!'
+
+ if (Test-Path -Path $restoreProjPath) {
+ Remove-Item $restoreProjPath
+ }
+
+ if (Test-Path -Path $dotnetTempDir) {
+ Remove-Item $dotnetTempDir -Recurse
+ }
+}
+
+try {
+ Push-Location $PSScriptRoot
+
+ if ($Operation -like 'setup') {
+ SetupCredProvider $AuthToken
+ }
+ elseif ($Operation -like 'install-restore') {
+ InstallDotNetSdkAndRestoreArcade
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'Arcade' -Message "Unknown operation '$Operation'!"
+ ExitWithExitCode 1
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Arcade' -Message $_
+ ExitWithExitCode 1
+}
+finally {
+ Pop-Location
+}
diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh
new file mode 100644
index 0000000..9378223
--- /dev/null
+++ b/eng/common/internal-feed-operations.sh
@@ -0,0 +1,141 @@
+#!/usr/bin/env bash
+
+set -e
+
+# Sets VSS_NUGET_EXTERNAL_FEED_ENDPOINTS based on the "darc-int-*" feeds defined in NuGet.config. This is needed
+# in build agents by CredProvider to authenticate the restore requests to internal feeds as specified in
+# https://github.com/microsoft/artifacts-credprovider/blob/0f53327cd12fd893d8627d7b08a2171bf5852a41/README.md#environment-variables.
+# This should ONLY be called from identified internal builds
+function SetupCredProvider {
+ local authToken=$1
+
+ # Install the Cred Provider NuGet plugin
+ echo "Setting up Cred Provider NuGet plugin in the agent..."...
+ echo "Getting 'installcredprovider.ps1' from 'https://github.com/microsoft/artifacts-credprovider'..."
+
+ local url="https://raw.githubusercontent.com/microsoft/artifacts-credprovider/master/helpers/installcredprovider.sh"
+
+ echo "Writing the contents of 'installcredprovider.ps1' locally..."
+ local installcredproviderPath="installcredprovider.sh"
+ if command -v curl > /dev/null; then
+ curl $url > "$installcredproviderPath"
+ else
+ wget -q -O "$installcredproviderPath" "$url"
+ fi
+
+ echo "Installing plugin..."
+ . "$installcredproviderPath"
+
+ echo "Deleting local copy of 'installcredprovider.sh'..."
+ rm installcredprovider.sh
+
+ if [ ! -d "$HOME/.nuget/plugins" ]; then
+ Write-PipelineTelemetryError -category 'Build' 'CredProvider plugin was not installed correctly!'
+ ExitWithExitCode 1
+ else
+ echo "CredProvider plugin was installed correctly!"
+ fi
+
+ # Then, we set the 'VSS_NUGET_EXTERNAL_FEED_ENDPOINTS' environment variable to restore from the stable
+ # feeds successfully
+
+ local nugetConfigPath="{$repo_root}NuGet.config"
+
+ if [ ! "$nugetConfigPath" ]; then
+ Write-PipelineTelemetryError -category 'Build' "NuGet.config file not found in repo's root!"
+ ExitWithExitCode 1
+ fi
+
+ local endpoints='['
+ local nugetConfigPackageValues=`cat "$nugetConfigPath" | grep "key=\"darc-int-"`
+ local pattern="value=\"(.*)\""
+
+ for value in $nugetConfigPackageValues
+ do
+ if [[ $value =~ $pattern ]]; then
+ local endpoint="${BASH_REMATCH[1]}"
+ endpoints+="{\"endpoint\": \"$endpoint\", \"password\": \"$authToken\"},"
+ fi
+ done
+
+ endpoints=${endpoints%?}
+ endpoints+=']'
+
+ if [ ${#endpoints} -gt 2 ]; then
+ local endpointCredentials="{\"endpointCredentials\": "$endpoints"}"
+
+ echo "##vso[task.setvariable variable=VSS_NUGET_EXTERNAL_FEED_ENDPOINTS]$endpointCredentials"
+ echo "##vso[task.setvariable variable=NUGET_CREDENTIALPROVIDER_SESSIONTOKENCACHE_ENABLED]False"
+ else
+ echo "No internal endpoints found in NuGet.config"
+ fi
+}
+
+# Workaround for https://github.com/microsoft/msbuild/issues/4430
+function InstallDotNetSdkAndRestoreArcade {
+ local dotnetTempDir="$repo_root/dotnet"
+ local dotnetSdkVersion="2.1.507" # After experimentation we know this version works when restoring the SDK (compared to 3.0.*)
+ local restoreProjPath="$repo_root/eng/common/restore.proj"
+
+ echo "Installing dotnet SDK version $dotnetSdkVersion to restore Arcade SDK..."
+ echo "<Project Sdk=\"Microsoft.DotNet.Arcade.Sdk\"/>" > "$restoreProjPath"
+
+ InstallDotNetSdk "$dotnetTempDir" "$dotnetSdkVersion"
+
+ local res=`$dotnetTempDir/dotnet restore $restoreProjPath`
+ echo "Arcade SDK restored!"
+
+ # Cleanup
+ if [ "$restoreProjPath" ]; then
+ rm "$restoreProjPath"
+ fi
+
+ if [ "$dotnetTempDir" ]; then
+ rm -r $dotnetTempDir
+ fi
+}
+
+source="${BASH_SOURCE[0]}"
+operation=''
+authToken=''
+repoName=''
+
+while [[ $# > 0 ]]; do
+ opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ --operation)
+ operation=$2
+ shift
+ ;;
+ --authtoken)
+ authToken=$2
+ shift
+ ;;
+ *)
+ echo "Invalid argument: $1"
+ usage
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. "$scriptroot/tools.sh"
+
+if [ "$operation" = "setup" ]; then
+ SetupCredProvider $authToken
+elif [ "$operation" = "install-restore" ]; then
+ InstallDotNetSdkAndRestoreArcade
+else
+ echo "Unknown operation '$operation'!"
+fi
diff --git a/eng/common/internal/Directory.Build.props b/eng/common/internal/Directory.Build.props
new file mode 100644
index 0000000..dbf99d8
--- /dev/null
+++ b/eng/common/internal/Directory.Build.props
@@ -0,0 +1,4 @@
+<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
+<Project>
+ <Import Project="Sdk.props" Sdk="Microsoft.DotNet.Arcade.Sdk" />
+</Project>
diff --git a/eng/common/internal/NuGet.config b/eng/common/internal/NuGet.config
new file mode 100644
index 0000000..19d3d31
--- /dev/null
+++ b/eng/common/internal/NuGet.config
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="utf-8"?>
+<configuration>
+ <packageSources>
+ <clear />
+ <add key="dotnet-core-internal-tooling" value="https://pkgs.dev.azure.com/devdiv/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json" />
+ </packageSources>
+</configuration>
diff --git a/eng/common/internal/Tools.csproj b/eng/common/internal/Tools.csproj
new file mode 100644
index 0000000..7f5ce6d
--- /dev/null
+++ b/eng/common/internal/Tools.csproj
@@ -0,0 +1,30 @@
+<!-- Licensed to the .NET Foundation under one or more agreements. The .NET Foundation licenses this file to you under the MIT license. -->
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <TargetFramework>net472</TargetFramework>
+ <ImportDirectoryBuildTargets>false</ImportDirectoryBuildTargets>
+ <AutomaticallyUseReferenceAssemblyPackages>false</AutomaticallyUseReferenceAssemblyPackages>
+ </PropertyGroup>
+ <ItemGroup>
+ <!-- Clear references, the SDK may add some depending on UsuingToolXxx settings, but we only want to restore the following -->
+ <PackageReference Remove="@(PackageReference)"/>
+ <PackageReference Include="Microsoft.ManifestTool.CrossPlatform" Version="$(MicrosoftManifestToolCrossPlatformVersion)" />
+ <PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Core" Version="$(MicrosoftVisualStudioEngMicroBuildCoreVersion)" />
+ <PackageReference Include="Microsoft.VisualStudioEng.MicroBuild.Plugins.SwixBuild" Version="$(MicrosoftVisualStudioEngMicroBuildPluginsSwixBuildVersion)" />
+ <PackageReference Include="Microsoft.DotNet.IBCMerge" Version="$(MicrosoftDotNetIBCMergeVersion)" Condition="'$(UsingToolIbcOptimization)' == 'true'" />
+ <PackageReference Include="Drop.App" Version="$(DropAppVersion)" ExcludeAssets="all" Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'"/>
+ </ItemGroup>
+ <PropertyGroup>
+ <RestoreSources></RestoreSources>
+ <RestoreSources Condition="'$(UsingToolIbcOptimization)' == 'true'">
+ https://devdiv.pkgs.visualstudio.com/_packaging/dotnet-core-internal-tooling/nuget/v3/index.json;
+ </RestoreSources>
+ <RestoreSources Condition="'$(UsingToolVisualStudioIbcTraining)' == 'true'">
+ $(RestoreSources);
+ https://devdiv.pkgs.visualstudio.com/_packaging/VS/nuget/v3/index.json;
+ </RestoreSources>
+ </PropertyGroup>
+
+ <!-- Repository extensibility point -->
+ <Import Project="$(RepositoryEngineeringDir)InternalTools.props" Condition="Exists('$(RepositoryEngineeringDir)InternalTools.props')" />
+</Project>
diff --git a/eng/common/msbuild.ps1 b/eng/common/msbuild.ps1
new file mode 100644
index 0000000..f041e5d
--- /dev/null
+++ b/eng/common/msbuild.ps1
@@ -0,0 +1,28 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $verbosity = 'minimal',
+ [bool] $warnAsError = $true,
+ [bool] $nodeReuse = $true,
+ [switch] $ci,
+ [switch] $prepareMachine,
+ [switch] $excludePrereleaseVS,
+ [string] $msbuildEngine = $null,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$extraArgs
+)
+
+. $PSScriptRoot\tools.ps1
+
+try {
+ if ($ci) {
+ $nodeReuse = $false
+ }
+
+ MSBuild @extraArgs
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0 \ No newline at end of file
diff --git a/eng/common/msbuild.sh b/eng/common/msbuild.sh
new file mode 100644
index 0000000..20d3dad
--- /dev/null
+++ b/eng/common/msbuild.sh
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+
+# resolve $source until the file is no longer a symlink
+while [[ -h "$source" ]]; do
+ scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+ source="$(readlink "$source")"
+ # if $source was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $source != /* ]] && source="$scriptroot/$source"
+done
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+verbosity='minimal'
+warn_as_error=true
+node_reuse=true
+prepare_machine=false
+extra_args=''
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --verbosity)
+ verbosity=$2
+ shift 2
+ ;;
+ --warnaserror)
+ warn_as_error=$2
+ shift 2
+ ;;
+ --nodereuse)
+ node_reuse=$2
+ shift 2
+ ;;
+ --ci)
+ ci=true
+ shift 1
+ ;;
+ --preparemachine)
+ prepare_machine=true
+ shift 1
+ ;;
+ *)
+ extra_args="$extra_args $1"
+ shift 1
+ ;;
+ esac
+done
+
+. "$scriptroot/tools.sh"
+
+if [[ "$ci" == true ]]; then
+ node_reuse=false
+fi
+
+MSBuild $extra_args
+ExitWithExitCode 0
diff --git a/eng/common/native/CommonLibrary.psm1 b/eng/common/native/CommonLibrary.psm1
new file mode 100644
index 0000000..ca38268
--- /dev/null
+++ b/eng/common/native/CommonLibrary.psm1
@@ -0,0 +1,400 @@
+<#
+.SYNOPSIS
+Helper module to install an archive to a directory
+
+.DESCRIPTION
+Helper module to download and extract an archive to a specified directory
+
+.PARAMETER Uri
+Uri of artifact to download
+
+.PARAMETER InstallDirectory
+Directory to extract artifact contents to
+
+.PARAMETER Force
+Force download / extraction if file or contents already exist. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds. Default = 30
+
+.NOTES
+Returns False if download or extraction fail, True otherwise
+#>
+function DownloadAndExtract {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallDirectory,
+ [switch] $Force = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+ )
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $TempToolPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+
+ # Download native tool
+ $DownloadStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($DownloadStatus -Eq $False) {
+ Write-Error "Download failed from $Uri"
+ return $False
+ }
+
+ # Extract native tool
+ $UnzipStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($UnzipStatus -Eq $False) {
+ # Retry Download one more time with Force=true
+ $DownloadRetryStatus = CommonLibrary\Get-File -Uri $Uri `
+ -Path $TempToolPath `
+ -DownloadRetries 1 `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Force:$True `
+ -Verbose:$Verbose
+
+ if ($DownloadRetryStatus -Eq $False) {
+ Write-Error "Last attempt of download failed as well"
+ return $False
+ }
+
+ # Retry unzip again one more time with Force=true
+ $UnzipRetryStatus = CommonLibrary\Expand-Zip -ZipPath $TempToolPath `
+ -OutputDirectory $InstallDirectory `
+ -Force:$True `
+ -Verbose:$Verbose
+ if ($UnzipRetryStatus -Eq $False)
+ {
+ Write-Error "Last attempt of unzip failed as well"
+ # Clean up partial zips and extracts
+ if (Test-Path $TempToolPath) {
+ Remove-Item $TempToolPath -Force
+ }
+ if (Test-Path $InstallDirectory) {
+ Remove-Item $InstallDirectory -Force -Recurse
+ }
+ return $False
+ }
+ }
+
+ return $True
+}
+
+<#
+.SYNOPSIS
+Download a file, retry on failure
+
+.DESCRIPTION
+Download specified file and retry if attempt fails
+
+.PARAMETER Uri
+Uri of file to download. If Uri is a local path, the file will be copied instead of downloaded
+
+.PARAMETER Path
+Path to download or copy uri file to
+
+.PARAMETER Force
+Overwrite existing file if present. Default = False
+
+.PARAMETER DownloadRetries
+Total number of retry attempts. Default = 5
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds Default = 30
+
+#>
+function Get-File {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Uri,
+ [Parameter(Mandatory=$True)]
+ [string] $Path,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30,
+ [switch] $Force = $False
+ )
+ $Attempt = 0
+
+ if ($Force) {
+ if (Test-Path $Path) {
+ Remove-Item $Path -Force
+ }
+ }
+ if (Test-Path $Path) {
+ Write-Host "File '$Path' already exists, skipping download"
+ return $True
+ }
+
+ $DownloadDirectory = Split-Path -ErrorAction Ignore -Path "$Path" -Parent
+ if (-Not (Test-Path $DownloadDirectory)) {
+ New-Item -path $DownloadDirectory -force -itemType "Directory" | Out-Null
+ }
+
+ $TempPath = "$Path.tmp"
+ if (Test-Path -IsValid -Path $Uri) {
+ Write-Verbose "'$Uri' is a file path, copying temporarily to '$TempPath'"
+ Copy-Item -Path $Uri -Destination $TempPath
+ Write-Verbose "Moving temporary file to '$Path'"
+ Move-Item -Path $TempPath -Destination $Path
+ return $?
+ }
+ else {
+ Write-Verbose "Downloading $Uri"
+ # Don't display the console progress UI - it's a huge perf hit
+ $ProgressPreference = 'SilentlyContinue'
+ while($Attempt -Lt $DownloadRetries)
+ {
+ try {
+ Invoke-WebRequest -UseBasicParsing -Uri $Uri -OutFile $TempPath
+ Write-Verbose "Downloaded to temporary location '$TempPath'"
+ Move-Item -Path $TempPath -Destination $Path
+ Write-Verbose "Moved temporary file to '$Path'"
+ return $True
+ }
+ catch {
+ $Attempt++
+ if ($Attempt -Lt $DownloadRetries) {
+ $AttemptsLeft = $DownloadRetries - $Attempt
+ Write-Warning "Download failed, $AttemptsLeft attempts remaining, will retry in $RetryWaitTimeInSeconds seconds"
+ Start-Sleep -Seconds $RetryWaitTimeInSeconds
+ }
+ else {
+ Write-Error $_
+ Write-Error $_.Exception
+ }
+ }
+ }
+ }
+
+ return $False
+}
+
+<#
+.SYNOPSIS
+Generate a shim for a native tool
+
+.DESCRIPTION
+Creates a wrapper script (shim) that passes arguments forward to native tool assembly
+
+.PARAMETER ShimName
+The name of the shim
+
+.PARAMETER ShimDirectory
+The directory where shims are stored
+
+.PARAMETER ToolFilePath
+Path to file that shim forwards to
+
+.PARAMETER Force
+Replace shim if already present. Default = False
+
+.NOTES
+Returns $True if generating shim succeeds, $False otherwise
+#>
+function New-ScriptShim {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ShimName,
+ [Parameter(Mandatory=$True)]
+ [string] $ShimDirectory,
+ [Parameter(Mandatory=$True)]
+ [string] $ToolFilePath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [switch] $Force
+ )
+ try {
+ Write-Verbose "Generating '$ShimName' shim"
+
+ if (-Not (Test-Path $ToolFilePath)){
+ Write-Error "Specified tool file path '$ToolFilePath' does not exist"
+ return $False
+ }
+
+ # WinShimmer is a small .NET Framework program that creates .exe shims to bootstrapped programs
+ # Many of the checks for installed programs expect a .exe extension for Windows tools, rather
+ # than a .bat or .cmd file.
+ # Source: https://github.com/dotnet/arcade/tree/master/src/WinShimmer
+ if (-Not (Test-Path "$ShimDirectory\WinShimmer\winshimmer.exe")) {
+ $InstallStatus = DownloadAndExtract -Uri "$BaseUri/windows/winshimmer/WinShimmer.zip" `
+ -InstallDirectory $ShimDirectory\WinShimmer `
+ -Force:$Force `
+ -DownloadRetries 2 `
+ -RetryWaitTimeInSeconds 5 `
+ -Verbose:$Verbose
+ }
+
+ if ((Test-Path (Join-Path $ShimDirectory "$ShimName.exe"))) {
+ Write-Host "$ShimName.exe already exists; replacing..."
+ Remove-Item (Join-Path $ShimDirectory "$ShimName.exe")
+ }
+
+ & "$ShimDirectory\WinShimmer\winshimmer.exe" $ShimName $ToolFilePath $ShimDirectory
+ return $True
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ return $False
+ }
+}
+
+<#
+.SYNOPSIS
+Returns the machine architecture of the host machine
+
+.NOTES
+Returns 'x64' on 64 bit machines
+ Returns 'x86' on 32 bit machines
+#>
+function Get-MachineArchitecture {
+ $ProcessorArchitecture = $Env:PROCESSOR_ARCHITECTURE
+ $ProcessorArchitectureW6432 = $Env:PROCESSOR_ARCHITEW6432
+ if($ProcessorArchitecture -Eq "X86")
+ {
+ if(($ProcessorArchitectureW6432 -Eq "") -Or
+ ($ProcessorArchitectureW6432 -Eq "X86")) {
+ return "x86"
+ }
+ $ProcessorArchitecture = $ProcessorArchitectureW6432
+ }
+ if (($ProcessorArchitecture -Eq "AMD64") -Or
+ ($ProcessorArchitecture -Eq "IA64") -Or
+ ($ProcessorArchitecture -Eq "ARM64") -Or
+ ($ProcessorArchitecture -Eq "LOONGARCH64")) {
+ return "x64"
+ }
+ return "x86"
+}
+
+<#
+.SYNOPSIS
+Get the name of a temporary folder under the native install directory
+#>
+function Get-TempDirectory {
+ return Join-Path (Get-NativeInstallDirectory) "temp/"
+}
+
+function Get-TempPathFilename {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $Path
+ )
+ $TempDir = CommonLibrary\Get-TempDirectory
+ $TempFilename = Split-Path $Path -leaf
+ $TempPath = Join-Path $TempDir $TempFilename
+ return $TempPath
+}
+
+<#
+.SYNOPSIS
+Returns the base directory to use for native tool installation
+
+.NOTES
+Returns the value of the NETCOREENG_INSTALL_DIRECTORY if that environment variable
+is set, or otherwise returns an install directory under the %USERPROFILE%
+#>
+function Get-NativeInstallDirectory {
+ $InstallDir = $Env:NETCOREENG_INSTALL_DIRECTORY
+ if (!$InstallDir) {
+ $InstallDir = Join-Path $Env:USERPROFILE ".netcoreeng/native/"
+ }
+ return $InstallDir
+}
+
+<#
+.SYNOPSIS
+Unzip an archive
+
+.DESCRIPTION
+Powershell module to unzip an archive to a specified directory
+
+.PARAMETER ZipPath (Required)
+Path to archive to unzip
+
+.PARAMETER OutputDirectory (Required)
+Output directory for archive contents
+
+.PARAMETER Force
+Overwrite output directory contents if they already exist
+
+.NOTES
+- Returns True and does not perform an extraction if output directory already exists but Overwrite is not True.
+- Returns True if unzip operation is successful
+- Returns False if Overwrite is True and it is unable to remove contents of OutputDirectory
+- Returns False if unable to extract zip archive
+#>
+function Expand-Zip {
+ [CmdletBinding(PositionalBinding=$false)]
+ Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ZipPath,
+ [Parameter(Mandatory=$True)]
+ [string] $OutputDirectory,
+ [switch] $Force
+ )
+
+ Write-Verbose "Extracting '$ZipPath' to '$OutputDirectory'"
+ try {
+ if ((Test-Path $OutputDirectory) -And (-Not $Force)) {
+ Write-Host "Directory '$OutputDirectory' already exists, skipping extract"
+ return $True
+ }
+ if (Test-Path $OutputDirectory) {
+ Write-Verbose "'Force' is 'True', but '$OutputDirectory' exists, removing directory"
+ Remove-Item $OutputDirectory -Force -Recurse
+ if ($? -Eq $False) {
+ Write-Error "Unable to remove '$OutputDirectory'"
+ return $False
+ }
+ }
+
+ $TempOutputDirectory = Join-Path "$(Split-Path -Parent $OutputDirectory)" "$(Split-Path -Leaf $OutputDirectory).tmp"
+ if (Test-Path $TempOutputDirectory) {
+ Remove-Item $TempOutputDirectory -Force -Recurse
+ }
+ New-Item -Path $TempOutputDirectory -Force -ItemType "Directory" | Out-Null
+
+ Add-Type -assembly "system.io.compression.filesystem"
+ [io.compression.zipfile]::ExtractToDirectory("$ZipPath", "$TempOutputDirectory")
+ if ($? -Eq $False) {
+ Write-Error "Unable to extract '$ZipPath'"
+ return $False
+ }
+
+ Move-Item -Path $TempOutputDirectory -Destination $OutputDirectory
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+
+ return $False
+ }
+ return $True
+}
+
+export-modulemember -function DownloadAndExtract
+export-modulemember -function Expand-Zip
+export-modulemember -function Get-File
+export-modulemember -function Get-MachineArchitecture
+export-modulemember -function Get-NativeInstallDirectory
+export-modulemember -function Get-TempDirectory
+export-modulemember -function Get-TempPathFilename
+export-modulemember -function New-ScriptShim
diff --git a/eng/common/native/common-library.sh b/eng/common/native/common-library.sh
new file mode 100644
index 0000000..080c2c2
--- /dev/null
+++ b/eng/common/native/common-library.sh
@@ -0,0 +1,172 @@
+#!/usr/bin/env bash
+
+function GetNativeInstallDirectory {
+ local install_dir
+
+ if [[ -z $NETCOREENG_INSTALL_DIRECTORY ]]; then
+ install_dir=$HOME/.netcoreeng/native/
+ else
+ install_dir=$NETCOREENG_INSTALL_DIRECTORY
+ fi
+
+ echo $install_dir
+ return 0
+}
+
+function GetTempDirectory {
+
+ echo $(GetNativeInstallDirectory)temp/
+ return 0
+}
+
+function ExpandZip {
+ local zip_path=$1
+ local output_directory=$2
+ local force=${3:-false}
+
+ echo "Extracting $zip_path to $output_directory"
+ if [[ -d $output_directory ]] && [[ $force = false ]]; then
+ echo "Directory '$output_directory' already exists, skipping extract"
+ return 0
+ fi
+
+ if [[ -d $output_directory ]]; then
+ echo "'Force flag enabled, but '$output_directory' exists. Removing directory"
+ rm -rf $output_directory
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to remove '$output_directory'"
+ return 1
+ fi
+ fi
+
+ echo "Creating directory: '$output_directory'"
+ mkdir -p $output_directory
+
+ echo "Extracting archive"
+ tar -xf $zip_path -C $output_directory
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Unable to extract '$zip_path'"
+ return 1
+ fi
+
+ return 0
+}
+
+function GetCurrentOS {
+ local unameOut="$(uname -s)"
+ case $unameOut in
+ Linux*) echo "Linux";;
+ Darwin*) echo "MacOS";;
+ esac
+ return 0
+}
+
+function GetFile {
+ local uri=$1
+ local path=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ if [[ -f $path ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$path' already exists. Skipping download"
+ return 0
+ else
+ rm -rf $path
+ fi
+ fi
+
+ if [[ -f $uri ]]; then
+ echo "'$uri' is a file path, copying file to '$path'"
+ cp $uri $path
+ return $?
+ fi
+
+ echo "Downloading $uri"
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail
+ else
+ wget -q -O "$path" "$uri" --tries="$download_retries"
+ fi
+
+ return $?
+}
+
+function GetTempPathFileName {
+ local path=$1
+
+ local temp_dir=$(GetTempDirectory)
+ local temp_file_name=$(basename $path)
+ echo $temp_dir$temp_file_name
+ return 0
+}
+
+function DownloadAndExtract {
+ local uri=$1
+ local installDir=$2
+ local force=${3:-false}
+ local download_retries=${4:-5}
+ local retry_wait_time_seconds=${5:-30}
+
+ local temp_tool_path=$(GetTempPathFileName $uri)
+
+ echo "downloading to: $temp_tool_path"
+
+ # Download file
+ GetFile "$uri" "$temp_tool_path" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to download '$uri' to '$temp_tool_path'."
+ return 1
+ fi
+
+ # Extract File
+ echo "extracting from $temp_tool_path to $installDir"
+ ExpandZip "$temp_tool_path" "$installDir" $force $download_retries $retry_wait_time_seconds
+ if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Failed to extract '$temp_tool_path' to '$installDir'."
+ return 1
+ fi
+
+ return 0
+}
+
+function NewScriptShim {
+ local shimpath=$1
+ local tool_file_path=$2
+ local force=${3:-false}
+
+ echo "Generating '$shimpath' shim"
+ if [[ -f $shimpath ]]; then
+ if [[ $force = false ]]; then
+ echo "File '$shimpath' already exists." >&2
+ return 1
+ else
+ rm -rf $shimpath
+ fi
+ fi
+
+ if [[ ! -f $tool_file_path ]]; then
+ # try to see if the path is lower cased
+ tool_file_path="$(echo $tool_file_path | tr "[:upper:]" "[:lower:]")"
+ if [[ ! -f $tool_file_path ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' "Specified tool file path:'$tool_file_path' does not exist"
+ return 1
+ fi
+ fi
+
+ local shim_contents=$'#!/usr/bin/env bash\n'
+ shim_contents+="SHIMARGS="$'$1\n'
+ shim_contents+="$tool_file_path"$' $SHIMARGS\n'
+
+ # Write shim file
+ echo "$shim_contents" > $shimpath
+
+ chmod +x $shimpath
+
+ echo "Finished generating shim '$shimpath'"
+
+ return $?
+}
+
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
new file mode 100644
index 0000000..41a26d8
--- /dev/null
+++ b/eng/common/native/init-compiler.sh
@@ -0,0 +1,144 @@
+#!/usr/bin/env bash
+#
+# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
+#
+# NOTE: some scripts source this file and rely on stdout being empty, make sure to not output anything here!
+
+if [[ "$#" -lt 3 ]]; then
+ echo "Usage..."
+ echo "init-compiler.sh <script directory> <Architecture> <compiler>"
+ echo "Specify the script directory."
+ echo "Specify the target architecture."
+ echo "Specify the name of compiler (clang or gcc)."
+ exit 1
+fi
+
+nativescriptroot="$1"
+build_arch="$2"
+compiler="$3"
+
+case "$compiler" in
+ clang*|-clang*|--clang*)
+ # clangx.y or clang-x.y
+ version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
+ parts=(${version//./ })
+ majorVersion="${parts[0]}"
+ minorVersion="${parts[1]}"
+ if [[ -z "$minorVersion" && "$majorVersion" -le 6 ]]; then
+ minorVersion=0;
+ fi
+ compiler=clang
+ ;;
+
+ gcc*|-gcc*|--gcc*)
+ # gccx.y or gcc-x.y
+ version="$(echo "$compiler" | tr -d '[:alpha:]-=')"
+ parts=(${version//./ })
+ majorVersion="${parts[0]}"
+ minorVersion="${parts[1]}"
+ compiler=gcc
+ ;;
+esac
+
+cxxCompiler="$compiler++"
+
+. "$nativescriptroot"/../pipeline-logging-functions.sh
+
+# clear the existing CC and CXX from environment
+CC=
+CXX=
+LDFLAGS=
+
+if [[ "$compiler" == "gcc" ]]; then cxxCompiler="g++"; fi
+
+check_version_exists() {
+ desired_version=-1
+
+ # Set up the environment to be used for building with the desired compiler.
+ if command -v "$compiler-$1.$2" > /dev/null; then
+ desired_version="-$1.$2"
+ elif command -v "$compiler$1$2" > /dev/null; then
+ desired_version="$1$2"
+ elif command -v "$compiler-$1$2" > /dev/null; then
+ desired_version="-$1$2"
+ fi
+
+ echo "$desired_version"
+}
+
+if [[ -z "$CLR_CC" ]]; then
+
+ # Set default versions
+ if [[ -z "$majorVersion" ]]; then
+ # note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
+ if [[ "$compiler" == "clang" ]]; then versions=( 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
+ elif [[ "$compiler" == "gcc" ]]; then versions=( 12 11 10 9 8 7 6 5 4.9 ); fi
+
+ for version in "${versions[@]}"; do
+ parts=(${version//./ })
+ desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
+ if [[ "$desired_version" != "-1" ]]; then majorVersion="${parts[0]}"; break; fi
+ done
+
+ if [[ -z "$majorVersion" ]]; then
+ if command -v "$compiler" > /dev/null; then
+ if [[ "$(uname)" != "Darwin" ]]; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Specific version of $compiler not found, falling back to use the one in PATH."
+ fi
+ CC="$(command -v "$compiler")"
+ CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "No usable version of $compiler found."
+ exit 1
+ fi
+ else
+ if [[ "$compiler" == "clang" && "$majorVersion" -lt 5 ]]; then
+ if [[ "$build_arch" == "arm" || "$build_arch" == "armel" ]]; then
+ if command -v "$compiler" > /dev/null; then
+ Write-PipelineTelemetryError -category "Build" -type "warning" "Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
+ CC="$(command -v "$compiler")"
+ CXX="$(command -v "$cxxCompiler")"
+ else
+ Write-PipelineTelemetryError -category "Build" "Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
+ exit 1
+ fi
+ fi
+ fi
+ fi
+ else
+ desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
+ if [[ "$desired_version" == "-1" ]]; then
+ Write-PipelineTelemetryError -category "Build" "Could not find specific version of $compiler: $majorVersion $minorVersion."
+ exit 1
+ fi
+ fi
+
+ if [[ -z "$CC" ]]; then
+ CC="$(command -v "$compiler$desired_version")"
+ CXX="$(command -v "$cxxCompiler$desired_version")"
+ if [[ -z "$CXX" ]]; then CXX="$(command -v "$cxxCompiler")"; fi
+ fi
+else
+ if [[ ! -f "$CLR_CC" ]]; then
+ Write-PipelineTelemetryError -category "Build" "CLR_CC is set but path '$CLR_CC' does not exist"
+ exit 1
+ fi
+ CC="$CLR_CC"
+ CXX="$CLR_CXX"
+fi
+
+if [[ -z "$CC" ]]; then
+ Write-PipelineTelemetryError -category "Build" "Unable to find $compiler."
+ exit 1
+fi
+
+# Only lld version >= 9 can be considered stable
+if [[ "$compiler" == "clang" && "$majorVersion" -ge 9 ]]; then
+ if "$CC" -fuse-ld=lld -Wl,--version >/dev/null 2>&1; then
+ LDFLAGS="-fuse-ld=lld"
+ fi
+fi
+
+SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
+
+export CC CXX LDFLAGS SCAN_BUILD_COMMAND
diff --git a/eng/common/native/install-cmake-test.sh b/eng/common/native/install-cmake-test.sh
new file mode 100644
index 0000000..8a5e7cf
--- /dev/null
+++ b/eng/common/native/install-cmake-test.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath <value> Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake-test"
+tool_os=$(GetCurrentOS)
+tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
+ exit 1
+fi
+
+exit 0
diff --git a/eng/common/native/install-cmake.sh b/eng/common/native/install-cmake.sh
new file mode 100644
index 0000000..de496be
--- /dev/null
+++ b/eng/common/native/install-cmake.sh
@@ -0,0 +1,117 @@
+#!/usr/bin/env bash
+
+source="${BASH_SOURCE[0]}"
+scriptroot="$( cd -P "$( dirname "$source" )" && pwd )"
+
+. $scriptroot/common-library.sh
+
+base_uri=
+install_path=
+version=
+clean=false
+force=false
+download_retries=5
+retry_wait_time_seconds=30
+
+while (($# > 0)); do
+ lowerI="$(echo $1 | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
+ --baseuri)
+ base_uri=$2
+ shift 2
+ ;;
+ --installpath)
+ install_path=$2
+ shift 2
+ ;;
+ --version)
+ version=$2
+ shift 2
+ ;;
+ --clean)
+ clean=true
+ shift 1
+ ;;
+ --force)
+ force=true
+ shift 1
+ ;;
+ --downloadretries)
+ download_retries=$2
+ shift 2
+ ;;
+ --retrywaittimeseconds)
+ retry_wait_time_seconds=$2
+ shift 2
+ ;;
+ --help)
+ echo "Common settings:"
+ echo " --baseuri <value> Base file directory or Url wrom which to acquire tool archives"
+ echo " --installpath <value> Base directory to install native tool to"
+ echo " --clean Don't install the tool, just clean up the current install of the tool"
+ echo " --force Force install of tools even if they previously exist"
+ echo " --help Print help and exit"
+ echo ""
+ echo "Advanced settings:"
+ echo " --downloadretries Total number of retry attempts"
+ echo " --retrywaittimeseconds Wait time between retry attempts in seconds"
+ echo ""
+ exit 0
+ ;;
+ esac
+done
+
+tool_name="cmake"
+tool_os=$(GetCurrentOS)
+tool_folder="$(echo $tool_os | tr "[:upper:]" "[:lower:]")"
+tool_arch="x86_64"
+tool_name_moniker="$tool_name-$version-$tool_os-$tool_arch"
+tool_install_directory="$install_path/$tool_name/$version"
+tool_file_path="$tool_install_directory/$tool_name_moniker/bin/$tool_name"
+shim_path="$install_path/$tool_name.sh"
+uri="${base_uri}/$tool_folder/$tool_name/$tool_name_moniker.tar.gz"
+
+# Clean up tool and installers
+if [[ $clean = true ]]; then
+ echo "Cleaning $tool_install_directory"
+ if [[ -d $tool_install_directory ]]; then
+ rm -rf $tool_install_directory
+ fi
+
+ echo "Cleaning $shim_path"
+ if [[ -f $shim_path ]]; then
+ rm -rf $shim_path
+ fi
+
+ tool_temp_path=$(GetTempPathFileName $uri)
+ echo "Cleaning $tool_temp_path"
+ if [[ -f $tool_temp_path ]]; then
+ rm -rf $tool_temp_path
+ fi
+
+ exit 0
+fi
+
+# Install tool
+if [[ -f $tool_file_path ]] && [[ $force = false ]]; then
+ echo "$tool_name ($version) already exists, skipping install"
+ exit 0
+fi
+
+DownloadAndExtract $uri $tool_install_directory $force $download_retries $retry_wait_time_seconds
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Installation failed'
+ exit 1
+fi
+
+# Generate Shim
+# Always rewrite shims so that we are referencing the expected version
+NewScriptShim $shim_path $tool_file_path true
+
+if [[ $? != 0 ]]; then
+ Write-PipelineTelemetryError -category 'NativeToolsBootstrap' 'Shim generation failed'
+ exit 1
+fi
+
+exit 0
diff --git a/eng/common/native/install-tool.ps1 b/eng/common/native/install-tool.ps1
new file mode 100644
index 0000000..78f2d84
--- /dev/null
+++ b/eng/common/native/install-tool.ps1
@@ -0,0 +1,132 @@
+<#
+.SYNOPSIS
+Install native tool
+
+.DESCRIPTION
+Install cmake native tool from Azure blob storage
+
+.PARAMETER InstallPath
+Base directory to install native tool to
+
+.PARAMETER BaseUri
+Base file directory or Url from which to acquire tool archives
+
+.PARAMETER CommonLibraryDirectory
+Path to folder containing common library modules
+
+.PARAMETER Force
+Force install of tools even if they previously exist
+
+.PARAMETER Clean
+Don't install the tool, just clean up the current install of the tool
+
+.PARAMETER DownloadRetries
+Total number of retry attempts
+
+.PARAMETER RetryWaitTimeInSeconds
+Wait time between retry attempts in seconds
+
+.NOTES
+Returns 0 if install succeeds, 1 otherwise
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ToolName,
+ [Parameter(Mandatory=$True)]
+ [string] $InstallPath,
+ [Parameter(Mandatory=$True)]
+ [string] $BaseUri,
+ [Parameter(Mandatory=$True)]
+ [string] $Version,
+ [string] $CommonLibraryDirectory = $PSScriptRoot,
+ [switch] $Force = $False,
+ [switch] $Clean = $False,
+ [int] $DownloadRetries = 5,
+ [int] $RetryWaitTimeInSeconds = 30
+)
+
+. $PSScriptRoot\..\pipeline-logging-functions.ps1
+
+# Import common library modules
+Import-Module -Name (Join-Path $CommonLibraryDirectory "CommonLibrary.psm1")
+
+try {
+ # Define verbose switch if undefined
+ $Verbose = $VerbosePreference -Eq "Continue"
+
+ $Arch = CommonLibrary\Get-MachineArchitecture
+ $ToolOs = "win64"
+ if($Arch -Eq "x32") {
+ $ToolOs = "win32"
+ }
+ $ToolNameMoniker = "$ToolName-$Version-$ToolOs-$Arch"
+ $ToolInstallDirectory = Join-Path $InstallPath "$ToolName\$Version\"
+ $Uri = "$BaseUri/windows/$ToolName/$ToolNameMoniker.zip"
+ $ShimPath = Join-Path $InstallPath "$ToolName.exe"
+
+ if ($Clean) {
+ Write-Host "Cleaning $ToolInstallDirectory"
+ if (Test-Path $ToolInstallDirectory) {
+ Remove-Item $ToolInstallDirectory -Force -Recurse
+ }
+ Write-Host "Cleaning $ShimPath"
+ if (Test-Path $ShimPath) {
+ Remove-Item $ShimPath -Force
+ }
+ $ToolTempPath = CommonLibrary\Get-TempPathFilename -Path $Uri
+ Write-Host "Cleaning $ToolTempPath"
+ if (Test-Path $ToolTempPath) {
+ Remove-Item $ToolTempPath -Force
+ }
+ exit 0
+ }
+
+ # Install tool
+ if ((Test-Path $ToolInstallDirectory) -And (-Not $Force)) {
+ Write-Verbose "$ToolName ($Version) already exists, skipping install"
+ }
+ else {
+ $InstallStatus = CommonLibrary\DownloadAndExtract -Uri $Uri `
+ -InstallDirectory $ToolInstallDirectory `
+ -Force:$Force `
+ -DownloadRetries $DownloadRetries `
+ -RetryWaitTimeInSeconds $RetryWaitTimeInSeconds `
+ -Verbose:$Verbose
+
+ if ($InstallStatus -Eq $False) {
+ Write-PipelineTelemetryError "Installation failed" -Category "NativeToolsetBootstrapping"
+ exit 1
+ }
+ }
+
+ $ToolFilePath = Get-ChildItem $ToolInstallDirectory -Recurse -Filter "$ToolName.exe" | % { $_.FullName }
+ if (@($ToolFilePath).Length -Gt 1) {
+ Write-Error "There are multiple copies of $ToolName in $($ToolInstallDirectory): `n$(@($ToolFilePath | out-string))"
+ exit 1
+ } elseif (@($ToolFilePath).Length -Lt 1) {
+ Write-Host "$ToolName was not found in $ToolInstallDirectory."
+ exit 1
+ }
+
+ # Generate shim
+ # Always rewrite shims so that we are referencing the expected version
+ $GenerateShimStatus = CommonLibrary\New-ScriptShim -ShimName $ToolName `
+ -ShimDirectory $InstallPath `
+ -ToolFilePath "$ToolFilePath" `
+ -BaseUri $BaseUri `
+ -Force:$Force `
+ -Verbose:$Verbose
+
+ if ($GenerateShimStatus -Eq $False) {
+ Write-PipelineTelemetryError "Generate shim failed" -Category "NativeToolsetBootstrapping"
+ return 1
+ }
+
+ exit 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category "NativeToolsetBootstrapping" -Message $_
+ exit 1
+}
diff --git a/eng/common/pipeline-logging-functions.ps1 b/eng/common/pipeline-logging-functions.ps1
new file mode 100644
index 0000000..8e422c5
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.ps1
@@ -0,0 +1,260 @@
+# Source for this file was taken from https://github.com/microsoft/azure-pipelines-task-lib/blob/11c9439d4af17e6475d9fe058e6b2e03914d17e6/powershell/VstsTaskSdk/LoggingCommandFunctions.ps1 and modified.
+
+# NOTE: You should not be calling these method directly as they are likely to change. Instead you should be calling the Write-Pipeline* functions defined in tools.ps1
+
+$script:loggingCommandPrefix = '##vso['
+$script:loggingCommandEscapeMappings = @( # TODO: WHAT ABOUT "="? WHAT ABOUT "%"?
+ New-Object psobject -Property @{ Token = ';' ; Replacement = '%3B' }
+ New-Object psobject -Property @{ Token = "`r" ; Replacement = '%0D' }
+ New-Object psobject -Property @{ Token = "`n" ; Replacement = '%0A' }
+ New-Object psobject -Property @{ Token = "]" ; Replacement = '%5D' }
+)
+# TODO: BUG: Escape % ???
+# TODO: Add test to verify don't need to escape "=".
+
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTelemetryError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Category,
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force)
+
+ $PSBoundParameters.Remove('Category') | Out-Null
+
+ if ($Force -Or ((Test-Path variable:ci) -And $ci)) {
+ $Message = "(NETCORE_ENGINEERING_TELEMETRY=$Category) $Message"
+ }
+ $PSBoundParameters.Remove('Message') | Out-Null
+ $PSBoundParameters.Add('Message', $Message)
+ Write-PipelineTaskError @PSBoundParameters
+}
+
+# Specify "-Force" to force pipeline formatted output even if "$ci" is false or not set
+function Write-PipelineTaskError {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Message,
+ [Parameter(Mandatory = $false)]
+ [string]$Type = 'error',
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput,
+ [switch]$Force
+ )
+
+ if (!$Force -And (-Not (Test-Path variable:ci) -Or !$ci)) {
+ if ($Type -eq 'error') {
+ Write-Host $Message -ForegroundColor Red
+ return
+ }
+ elseif ($Type -eq 'warning') {
+ Write-Host $Message -ForegroundColor Yellow
+ return
+ }
+ }
+
+ if (($Type -ne 'error') -and ($Type -ne 'warning')) {
+ Write-Host $Message
+ return
+ }
+ $PSBoundParameters.Remove('Force') | Out-Null
+ if (-not $PSBoundParameters.ContainsKey('Type')) {
+ $PSBoundParameters.Add('Type', 'error')
+ }
+ Write-LogIssue @PSBoundParameters
+}
+
+function Write-PipelineSetVariable {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Name,
+ [string]$Value,
+ [switch]$Secret,
+ [switch]$AsOutput,
+ [bool]$IsMultiJobVariable = $true)
+
+ if ((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'setvariable' -Data $Value -Properties @{
+ 'variable' = $Name
+ 'isSecret' = $Secret
+ 'isOutput' = $IsMultiJobVariable
+ } -AsOutput:$AsOutput
+ }
+}
+
+function Write-PipelinePrependPath {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Path,
+ [switch]$AsOutput)
+
+ if ((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'prependpath' -Data $Path -AsOutput:$AsOutput
+ }
+}
+
+function Write-PipelineSetResult {
+ [CmdletBinding()]
+ param(
+ [ValidateSet("Succeeded", "SucceededWithIssues", "Failed", "Cancelled", "Skipped")]
+ [Parameter(Mandatory = $true)]
+ [string]$Result,
+ [string]$Message)
+ if ((Test-Path variable:ci) -And $ci) {
+ Write-LoggingCommand -Area 'task' -Event 'complete' -Data $Message -Properties @{
+ 'result' = $Result
+ }
+ }
+}
+
+<########################################
+# Private functions.
+########################################>
+function Format-LoggingCommandData {
+ [CmdletBinding()]
+ param([string]$Value, [switch]$Reverse)
+
+ if (!$Value) {
+ return ''
+ }
+
+ if (!$Reverse) {
+ foreach ($mapping in $script:loggingCommandEscapeMappings) {
+ $Value = $Value.Replace($mapping.Token, $mapping.Replacement)
+ }
+ }
+ else {
+ for ($i = $script:loggingCommandEscapeMappings.Length - 1 ; $i -ge 0 ; $i--) {
+ $mapping = $script:loggingCommandEscapeMappings[$i]
+ $Value = $Value.Replace($mapping.Replacement, $mapping.Token)
+ }
+ }
+
+ return $Value
+}
+
+function Format-LoggingCommand {
+ [CmdletBinding()]
+ param(
+ [Parameter(Mandatory = $true)]
+ [string]$Area,
+ [Parameter(Mandatory = $true)]
+ [string]$Event,
+ [string]$Data,
+ [hashtable]$Properties)
+
+ # Append the preamble.
+ [System.Text.StringBuilder]$sb = New-Object -TypeName System.Text.StringBuilder
+ $null = $sb.Append($script:loggingCommandPrefix).Append($Area).Append('.').Append($Event)
+
+ # Append the properties.
+ if ($Properties) {
+ $first = $true
+ foreach ($key in $Properties.Keys) {
+ [string]$value = Format-LoggingCommandData $Properties[$key]
+ if ($value) {
+ if ($first) {
+ $null = $sb.Append(' ')
+ $first = $false
+ }
+ else {
+ $null = $sb.Append(';')
+ }
+
+ $null = $sb.Append("$key=$value")
+ }
+ }
+ }
+
+ # Append the tail and output the value.
+ $Data = Format-LoggingCommandData $Data
+ $sb.Append(']').Append($Data).ToString()
+}
+
+function Write-LoggingCommand {
+ [CmdletBinding(DefaultParameterSetName = 'Parameters')]
+ param(
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Area,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Parameters')]
+ [string]$Event,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [string]$Data,
+ [Parameter(ParameterSetName = 'Parameters')]
+ [hashtable]$Properties,
+ [Parameter(Mandatory = $true, ParameterSetName = 'Object')]
+ $Command,
+ [switch]$AsOutput)
+
+ if ($PSCmdlet.ParameterSetName -eq 'Object') {
+ Write-LoggingCommand -Area $Command.Area -Event $Command.Event -Data $Command.Data -Properties $Command.Properties -AsOutput:$AsOutput
+ return
+ }
+
+ $command = Format-LoggingCommand -Area $Area -Event $Event -Data $Data -Properties $Properties
+ if ($AsOutput) {
+ $command
+ }
+ else {
+ Write-Host $command
+ }
+}
+
+function Write-LogIssue {
+ [CmdletBinding()]
+ param(
+ [ValidateSet('warning', 'error')]
+ [Parameter(Mandatory = $true)]
+ [string]$Type,
+ [string]$Message,
+ [string]$ErrCode,
+ [string]$SourcePath,
+ [string]$LineNumber,
+ [string]$ColumnNumber,
+ [switch]$AsOutput)
+
+ $command = Format-LoggingCommand -Area 'task' -Event 'logissue' -Data $Message -Properties @{
+ 'type' = $Type
+ 'code' = $ErrCode
+ 'sourcepath' = $SourcePath
+ 'linenumber' = $LineNumber
+ 'columnnumber' = $ColumnNumber
+ }
+ if ($AsOutput) {
+ return $command
+ }
+
+ if ($Type -eq 'error') {
+ $foregroundColor = $host.PrivateData.ErrorForegroundColor
+ $backgroundColor = $host.PrivateData.ErrorBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Red
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ }
+ else {
+ $foregroundColor = $host.PrivateData.WarningForegroundColor
+ $backgroundColor = $host.PrivateData.WarningBackgroundColor
+ if ($foregroundColor -isnot [System.ConsoleColor] -or $backgroundColor -isnot [System.ConsoleColor]) {
+ $foregroundColor = [System.ConsoleColor]::Yellow
+ $backgroundColor = [System.ConsoleColor]::Black
+ }
+ }
+
+ Write-Host $command -ForegroundColor $foregroundColor -BackgroundColor $backgroundColor
+}
diff --git a/eng/common/pipeline-logging-functions.sh b/eng/common/pipeline-logging-functions.sh
new file mode 100644
index 0000000..6a0b225
--- /dev/null
+++ b/eng/common/pipeline-logging-functions.sh
@@ -0,0 +1,206 @@
+#!/usr/bin/env bash
+
+function Write-PipelineTelemetryError {
+ local telemetry_category=''
+ local force=false
+ local function_args=()
+ local message=''
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -category|-c)
+ telemetry_category=$2
+ shift
+ ;;
+ -force|-f)
+ force=true
+ ;;
+ -*)
+ function_args+=("$1 $2")
+ shift
+ ;;
+ *)
+ message=$*
+ ;;
+ esac
+ shift
+ done
+
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$message" >&2
+ return
+ fi
+
+ if [[ $force == true ]]; then
+ function_args+=("-force")
+ fi
+ message="(NETCORE_ENGINEERING_TELEMETRY=$telemetry_category) $message"
+ function_args+=("$message")
+ Write-PipelineTaskError ${function_args[@]}
+}
+
+function Write-PipelineTaskError {
+ local message_type="error"
+ local sourcepath=''
+ local linenumber=''
+ local columnnumber=''
+ local error_code=''
+ local force=false
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -type|-t)
+ message_type=$2
+ shift
+ ;;
+ -sourcepath|-s)
+ sourcepath=$2
+ shift
+ ;;
+ -linenumber|-ln)
+ linenumber=$2
+ shift
+ ;;
+ -columnnumber|-cn)
+ columnnumber=$2
+ shift
+ ;;
+ -errcode|-e)
+ error_code=$2
+ shift
+ ;;
+ -force|-f)
+ force=true
+ ;;
+ *)
+ break
+ ;;
+ esac
+
+ shift
+ done
+
+ if [[ $force != true ]] && [[ "$ci" != true ]]; then
+ echo "$@" >&2
+ return
+ fi
+
+ local message="##vso[task.logissue"
+
+ message="$message type=$message_type"
+
+ if [ -n "$sourcepath" ]; then
+ message="$message;sourcepath=$sourcepath"
+ fi
+
+ if [ -n "$linenumber" ]; then
+ message="$message;linenumber=$linenumber"
+ fi
+
+ if [ -n "$columnnumber" ]; then
+ message="$message;columnnumber=$columnnumber"
+ fi
+
+ if [ -n "$error_code" ]; then
+ message="$message;code=$error_code"
+ fi
+
+ message="$message]$*"
+ echo "$message"
+}
+
+function Write-PipelineSetVariable {
+ if [[ "$ci" != true ]]; then
+ return
+ fi
+
+ local name=''
+ local value=''
+ local secret=false
+ local as_output=false
+ local is_multi_job_variable=true
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -name|-n)
+ name=$2
+ shift
+ ;;
+ -value|-v)
+ value=$2
+ shift
+ ;;
+ -secret|-s)
+ secret=true
+ ;;
+ -as_output|-a)
+ as_output=true
+ ;;
+ -is_multi_job_variable|-i)
+ is_multi_job_variable=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ value=${value/;/%3B}
+ value=${value/\\r/%0D}
+ value=${value/\\n/%0A}
+ value=${value/]/%5D}
+
+ local message="##vso[task.setvariable variable=$name;isSecret=$secret;isOutput=$is_multi_job_variable]$value"
+
+ if [[ "$as_output" == true ]]; then
+ $message
+ else
+ echo "$message"
+ fi
+}
+
+function Write-PipelinePrependPath {
+ local prepend_path=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -path|-p)
+ prepend_path=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ export PATH="$prepend_path:$PATH"
+
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.prependpath]$prepend_path"
+ fi
+}
+
+function Write-PipelineSetResult {
+ local result=''
+ local message=''
+
+ while [[ $# -gt 0 ]]; do
+ opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")"
+ case "$opt" in
+ -result|-r)
+ result=$2
+ shift
+ ;;
+ -message|-m)
+ message=$2
+ shift
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$ci" == true ]]; then
+ echo "##vso[task.complete result=$result;]$message"
+ fi
+}
diff --git a/eng/common/post-build/add-build-to-channel.ps1 b/eng/common/post-build/add-build-to-channel.ps1
new file mode 100644
index 0000000..de2d957
--- /dev/null
+++ b/eng/common/post-build/add-build-to-channel.ps1
@@ -0,0 +1,48 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Check that the channel we are going to promote the build to exist
+ $channelInfo = Get-MaestroChannel -ChannelId $ChannelId
+
+ if (!$channelInfo) {
+ Write-PipelineTelemetryCategory -Category 'PromoteBuild' -Message "Channel with BAR ID $ChannelId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Get info about which channel(s) the build has already been promoted to
+ $buildInfo = Get-MaestroBuild -BuildId $BuildId
+
+ if (!$buildInfo) {
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "Build with BAR ID $BuildId was not found in BAR!"
+ ExitWithExitCode 1
+ }
+
+ # Find whether the build is already assigned to the channel or not
+ if ($buildInfo.channels) {
+ foreach ($channel in $buildInfo.channels) {
+ if ($channel.Id -eq $ChannelId) {
+ Write-Host "The build with BAR ID $BuildId is already on channel $ChannelId!"
+ ExitWithExitCode 0
+ }
+ }
+ }
+
+ Write-Host "Promoting build '$BuildId' to channel '$ChannelId'."
+
+ Assign-BuildToChannel -BuildId $BuildId -ChannelId $ChannelId
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to promote build '$BuildId' to channel '$ChannelId'"
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/check-channel-consistency.ps1 b/eng/common/post-build/check-channel-consistency.ps1
new file mode 100644
index 0000000..63f3464
--- /dev/null
+++ b/eng/common/post-build/check-channel-consistency.ps1
@@ -0,0 +1,40 @@
+param(
+ [Parameter(Mandatory=$true)][string] $PromoteToChannels, # List of channels that the build should be promoted to
+ [Parameter(Mandatory=$true)][array] $AvailableChannelIds # List of channel IDs available in the YAML implementation
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ if ($PromoteToChannels -eq "") {
+ Write-PipelineTaskError -Type 'warning' -Message "This build won't publish assets as it's not configured to any Maestro channel. If that wasn't intended use Darc to configure a default channel using add-default-channel for this branch or to promote it to a channel using add-build-to-channel. See https://github.com/dotnet/arcade/blob/master/Documentation/Darc.md#assigning-an-individual-build-to-a-channel for more info."
+ ExitWithExitCode 0
+ }
+
+ # Check that every channel that Maestro told to promote the build to
+ # is available in YAML
+ $PromoteToChannelsIds = $PromoteToChannels -split "\D" | Where-Object { $_ }
+
+ $hasErrors = $false
+
+ foreach ($id in $PromoteToChannelsIds) {
+ if (($id -ne 0) -and ($id -notin $AvailableChannelIds)) {
+ Write-PipelineTaskError -Message "Channel $id is not present in the post-build YAML configuration! This is an error scenario. Please contact @dnceng."
+ $hasErrors = $true
+ }
+ }
+
+ # The `Write-PipelineTaskError` doesn't error the script and we might report several errors
+ # in the previous lines. The check below makes sure that we return an error state from the
+ # script if we reported any validation error
+ if ($hasErrors) {
+ ExitWithExitCode 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'CheckChannelConsistency' -Message "There was an error while trying to check consistency of Maestro default channels for the build and post-build YAML configuration."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/nuget-validation.ps1 b/eng/common/post-build/nuget-validation.ps1
new file mode 100644
index 0000000..dab3534
--- /dev/null
+++ b/eng/common/post-build/nuget-validation.ps1
@@ -0,0 +1,24 @@
+# This script validates NuGet package metadata information using this
+# tool: https://github.com/NuGet/NuGetGallery/tree/jver-verify/src/VerifyMicrosoftPackage
+
+param(
+ [Parameter(Mandatory=$true)][string] $PackagesPath, # Path to where the packages to be validated are
+ [Parameter(Mandatory=$true)][string] $ToolDestinationPath # Where the validation tool should be downloaded to
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $url = 'https://raw.githubusercontent.com/NuGet/NuGetGallery/3e25ad135146676bcab0050a516939d9958bfa5d/src/VerifyMicrosoftPackage/verify.ps1'
+
+ New-Item -ItemType 'directory' -Path ${ToolDestinationPath} -Force
+
+ Invoke-WebRequest $url -OutFile ${ToolDestinationPath}\verify.ps1
+
+ & ${ToolDestinationPath}\verify.ps1 ${PackagesPath}\*.nupkg
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'NuGetValidation' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/post-build-utils.ps1 b/eng/common/post-build/post-build-utils.ps1
new file mode 100644
index 0000000..534f698
--- /dev/null
+++ b/eng/common/post-build/post-build-utils.ps1
@@ -0,0 +1,91 @@
+# Most of the functions in this file require the variables `MaestroApiEndPoint`,
+# `MaestroApiVersion` and `MaestroApiAccessToken` to be globally available.
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+# `tools.ps1` checks $ci to perform some actions. Since the post-build
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+$disableConfigureToolsetImport = $true
+. $PSScriptRoot\..\tools.ps1
+
+function Create-MaestroApiRequestHeaders([string]$ContentType = 'application/json') {
+ Validate-MaestroVars
+
+ $headers = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $headers.Add('Accept', $ContentType)
+ $headers.Add('Authorization',"Bearer $MaestroApiAccessToken")
+ return $headers
+}
+
+function Get-MaestroChannel([int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}?api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroBuild([int]$BuildId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/builds/${BuildId}?api-version=$MaestroApiVersion"
+
+ $result = try { return Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Get-MaestroSubscriptions([string]$SourceRepository, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $SourceRepository = [System.Web.HttpUtility]::UrlEncode($SourceRepository)
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions?sourceRepository=$SourceRepository&channelId=$ChannelId&api-version=$MaestroApiVersion"
+
+ $result = try { Invoke-WebRequest -Method Get -Uri $apiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+ return $result
+}
+
+function Assign-BuildToChannel([int]$BuildId, [int]$ChannelId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/channels/${ChannelId}/builds/${BuildId}?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Method Post -Uri $apiEndpoint -Headers $apiHeaders | Out-Null
+}
+
+function Trigger-Subscription([string]$SubscriptionId) {
+ Validate-MaestroVars
+
+ $apiHeaders = Create-MaestroApiRequestHeaders -AuthToken $MaestroApiAccessToken
+ $apiEndpoint = "$MaestroApiEndPoint/api/subscriptions/$SubscriptionId/trigger?api-version=$MaestroApiVersion"
+ Invoke-WebRequest -Uri $apiEndpoint -Headers $apiHeaders -Method Post | Out-Null
+}
+
+function Validate-MaestroVars {
+ try {
+ Get-Variable MaestroApiEndPoint | Out-Null
+ Get-Variable MaestroApiVersion | Out-Null
+ Get-Variable MaestroApiAccessToken | Out-Null
+
+ if (!($MaestroApiEndPoint -Match '^http[s]?://maestro-(int|prod).westus2.cloudapp.azure.com$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiEndPoint is not a valid Maestro URL. '$MaestroApiEndPoint'"
+ ExitWithExitCode 1
+ }
+
+ if (!($MaestroApiVersion -Match '^[0-9]{4}-[0-9]{2}-[0-9]{2}$')) {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message "MaestroApiVersion does not match a version string in the format yyyy-MM-DD. '$MaestroApiVersion'"
+ ExitWithExitCode 1
+ }
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'MaestroVars' -Message 'Error: Variables `MaestroApiEndPoint`, `MaestroApiVersion` and `MaestroApiAccessToken` are required while using this script.'
+ Write-Host $_
+ ExitWithExitCode 1
+ }
+}
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
new file mode 100644
index 0000000..8508397
--- /dev/null
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -0,0 +1,54 @@
+param(
+ [Parameter(Mandatory=$true)][int] $BuildId,
+ [Parameter(Mandatory=$true)][int] $PublishingInfraVersion,
+ [Parameter(Mandatory=$true)][string] $AzdoToken,
+ [Parameter(Mandatory=$true)][string] $MaestroToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
+ [Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ $darc = Get-Darc
+
+ $optionalParams = [System.Collections.ArrayList]::new()
+
+ if ("" -ne $ArtifactsPublishingAdditionalParameters) {
+ $optionalParams.Add("--artifact-publishing-parameters") | Out-Null
+ $optionalParams.Add($ArtifactsPublishingAdditionalParameters) | Out-Null
+ }
+
+ if ("" -ne $SymbolPublishingAdditionalParameters) {
+ $optionalParams.Add("--symbol-publishing-parameters") | Out-Null
+ $optionalParams.Add($SymbolPublishingAdditionalParameters) | Out-Null
+ }
+
+ if ("false" -eq $WaitPublishingFinish) {
+ $optionalParams.Add("--no-wait") | Out-Null
+ }
+
+ & $darc add-build-to-channel `
+ --id $buildId `
+ --publishing-infra-version $PublishingInfraVersion `
+ --default-channels `
+ --source-branch main `
+ --azdev-pat $AzdoToken `
+ --bar-uri $MaestroApiEndPoint `
+ --password $MaestroToken `
+ @optionalParams
+
+ if ($LastExitCode -ne 0) {
+ Write-Host "Problems using Darc to promote build ${buildId} to default channels. Stopping execution..."
+ exit 1
+ }
+
+ Write-Host 'done.'
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Category 'PromoteBuild' -Message "There was an error while trying to publish build '$BuildId' to default channels."
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/sourcelink-validation.ps1 b/eng/common/post-build/sourcelink-validation.ps1
new file mode 100644
index 0000000..4011d32
--- /dev/null
+++ b/eng/common/post-build/sourcelink-validation.ps1
@@ -0,0 +1,319 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where Symbols.NuGet packages to be checked are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory=$false)][string] $GHRepoName, # GitHub name of the repo including the Org. E.g., dotnet/arcade
+ [Parameter(Mandatory=$false)][string] $GHCommit, # GitHub commit SHA used to build the packages
+ [Parameter(Mandatory=$true)][string] $SourcelinkCliVersion # Version of SourceLink CLI to use
+)
+
+. $PSScriptRoot\post-build-utils.ps1
+
+# Cache/HashMap (File -> Exist flag) used to consult whether a file exist
+# in the repository at a specific commit point. This is populated by inserting
+# all files present in the repo at a specific commit point.
+$global:RepoFiles = @{}
+
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 16
+
+$MaxRetries = 5
+$RetryWaitTimeInSeconds = 30
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+if (!$InputPath -or !(Test-Path $InputPath)){
+ Write-Host "No files to validate."
+ ExitWithExitCode 0
+}
+
+$ValidatePackage = {
+ param(
+ [string] $PackagePath # Full path to a Symbols.NuGet package
+ )
+
+ . $using:PSScriptRoot\..\tools.ps1
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-Host "Input file does not exist: $PackagePath"
+ return [pscustomobject]@{
+ result = 1
+ packagePath = $PackagePath
+ }
+ }
+
+ # Extensions for which we'll look for SourceLink information
+ # For now we'll only care about Portable & Embedded PDBs
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+
+ Write-Host -NoNewLine 'Validating ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+ $FailedFiles = 0
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath) | Out-Null
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $FileName = $_.FullName
+ $Extension = [System.IO.Path]::GetExtension($_.Name)
+ $FakeName = -Join((New-Guid), $Extension)
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $FakeName
+
+ # We ignore resource DLLs
+ if ($FileName.EndsWith('.resources.dll')) {
+ return [pscustomobject]@{
+ result = 0
+ packagePath = $PackagePath
+ }
+ }
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+
+ $ValidateFile = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $RealPath,
+ [ref] $FailedFiles
+ )
+
+ $sourcelinkExe = "$env:USERPROFILE\.dotnet\tools"
+ $sourcelinkExe = Resolve-Path "$sourcelinkExe\sourcelink.exe"
+ $SourceLinkInfos = & $sourcelinkExe print-urls $FullPath | Out-String
+
+ if ($LASTEXITCODE -eq 0 -and -not ([string]::IsNullOrEmpty($SourceLinkInfos))) {
+ $NumFailedLinks = 0
+
+ # We only care about Http addresses
+ $Matches = (Select-String '(http[s]?)(:\/\/)([^\s,]+)' -Input $SourceLinkInfos -AllMatches).Matches
+
+ if ($Matches.Count -ne 0) {
+ $Matches.Value |
+ ForEach-Object {
+ $Link = $_
+ $CommitUrl = "https://raw.githubusercontent.com/${using:GHRepoName}/${using:GHCommit}/"
+
+ $FilePath = $Link.Replace($CommitUrl, "")
+ $Status = 200
+ $Cache = $using:RepoFiles
+
+ $attempts = 0
+
+ while ($attempts -lt $using:MaxRetries) {
+ if ( !($Cache.ContainsKey($FilePath)) ) {
+ try {
+ $Uri = $Link -as [System.URI]
+
+ if ($Link -match "submodules") {
+ # Skip submodule links until sourcelink properly handles submodules
+ $Status = 200
+ }
+ elseif ($Uri.AbsoluteURI -ne $null -and ($Uri.Host -match 'github' -or $Uri.Host -match 'githubusercontent')) {
+ # Only GitHub links are valid
+ $Status = (Invoke-WebRequest -Uri $Link -UseBasicParsing -Method HEAD -TimeoutSec 5).StatusCode
+ }
+ else {
+ # If it's not a github link, we want to break out of the loop and not retry.
+ $Status = 0
+ $attempts = $using:MaxRetries
+ }
+ }
+ catch {
+ Write-Host $_
+ $Status = 0
+ }
+ }
+
+ if ($Status -ne 200) {
+ $attempts++
+
+ if ($attempts -lt $using:MaxRetries)
+ {
+ $attemptsLeft = $using:MaxRetries - $attempts
+ Write-Warning "Download failed, $attemptsLeft attempts remaining, will retry in $using:RetryWaitTimeInSeconds seconds"
+ Start-Sleep -Seconds $using:RetryWaitTimeInSeconds
+ }
+ else {
+ if ($NumFailedLinks -eq 0) {
+ if ($FailedFiles.Value -eq 0) {
+ Write-Host
+ }
+
+ Write-Host "`tFile $RealPath has broken links:"
+ }
+
+ Write-Host "`t`tFailed to retrieve $Link"
+
+ $NumFailedLinks++
+ }
+ }
+ else {
+ break
+ }
+ }
+ }
+ }
+
+ if ($NumFailedLinks -ne 0) {
+ $FailedFiles.value++
+ $global:LASTEXITCODE = 1
+ }
+ }
+ }
+
+ &$ValidateFile $TargetFile $FileName ([ref]$FailedFiles)
+ }
+ }
+ catch {
+ Write-Host $_
+ }
+ finally {
+ $zip.Dispose()
+ }
+
+ if ($FailedFiles -eq 0) {
+ Write-Host 'Passed.'
+ return [pscustomobject]@{
+ result = 0
+ packagePath = $PackagePath
+ }
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$PackagePath has broken SourceLink links."
+ return [pscustomobject]@{
+ result = 1
+ packagePath = $PackagePath
+ }
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$ValidationFailures,
+ [switch]$logErrors) {
+ if ($result -ne '0') {
+ if ($logErrors) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$packagePath has broken SourceLink links."
+ }
+ $ValidationFailures.Value++
+ }
+}
+
+function ValidateSourceLinkLinks {
+ if ($GHRepoName -ne '' -and !($GHRepoName -Match '^[^\s\/]+/[^\s\/]+$')) {
+ if (!($GHRepoName -Match '^[^\s-]+-[^\s]+$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHRepoName should be in the format <org>/<repo> or <org>-<repo>. '$GHRepoName'"
+ ExitWithExitCode 1
+ }
+ else {
+ $GHRepoName = $GHRepoName -replace '^([^\s-]+)-([^\s]+)$', '$1/$2';
+ }
+ }
+
+ if ($GHCommit -ne '' -and !($GHCommit -Match '^[0-9a-fA-F]{40}$')) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "GHCommit should be a 40 chars hexadecimal string. '$GHCommit'"
+ ExitWithExitCode 1
+ }
+
+ if ($GHRepoName -ne '' -and $GHCommit -ne '') {
+ $RepoTreeURL = -Join('http://api.github.com/repos/', $GHRepoName, '/git/trees/', $GHCommit, '?recursive=1')
+ $CodeExtensions = @('.cs', '.vb', '.fs', '.fsi', '.fsx', '.fsscript')
+
+ try {
+ # Retrieve the list of files in the repo at that particular commit point and store them in the RepoFiles hash
+ $Data = Invoke-WebRequest $RepoTreeURL -UseBasicParsing | ConvertFrom-Json | Select-Object -ExpandProperty tree
+
+ foreach ($file in $Data) {
+ $Extension = [System.IO.Path]::GetExtension($file.path)
+
+ if ($CodeExtensions.Contains($Extension)) {
+ $RepoFiles[$file.path] = 1
+ }
+ }
+ }
+ catch {
+ Write-Host "Problems downloading the list of files from the repo. Url used: $RepoTreeURL . Execution will proceed without caching."
+ }
+ }
+ elseif ($GHRepoName -ne '' -or $GHCommit -ne '') {
+ Write-Host 'For using the http caching mechanism both GHRepoName and GHCommit should be informed.'
+ }
+
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ $ValidationFailures = 0
+
+ # Process each NuGet package in parallel
+ Get-ChildItem "$InputPath\*.symbols.nupkg" |
+ ForEach-Object {
+ Write-Host "Starting $($_.FullName)"
+ Start-Job -ScriptBlock $ValidatePackage -ArgumentList $_.FullName | Out-Null
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures) -LogErrors
+ Remove-Job -Id $Job.Id
+ }
+ }
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$ValidationFailures)
+ Remove-Job -Id $Job.Id
+ }
+ if ($ValidationFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message "$ValidationFailures package(s) failed validation."
+ ExitWithExitCode 1
+ }
+}
+
+function InstallSourcelinkCli {
+ $sourcelinkCliPackageName = 'sourcelink'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$sourcelinkCliPackageName*") -and ($toolList -like "*$sourcelinkCliVersion*")) {
+ Write-Host "SourceLink CLI version $sourcelinkCliVersion is already installed."
+ }
+ else {
+ Write-Host "Installing SourceLink CLI version $sourcelinkCliVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $sourcelinkCliPackageName --version $sourcelinkCliVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ InstallSourcelinkCli
+
+ foreach ($Job in @(Get-Job)) {
+ Remove-Job -Id $Job.Id
+ }
+
+ ValidateSourceLinkLinks
+}
+catch {
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'SourceLink' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/symbols-validation.ps1 b/eng/common/post-build/symbols-validation.ps1
new file mode 100644
index 0000000..cd2181b
--- /dev/null
+++ b/eng/common/post-build/symbols-validation.ps1
@@ -0,0 +1,339 @@
+param(
+ [Parameter(Mandatory = $true)][string] $InputPath, # Full path to directory where NuGet packages to be checked are stored
+ [Parameter(Mandatory = $true)][string] $ExtractPath, # Full path to directory where the packages will be extracted during validation
+ [Parameter(Mandatory = $true)][string] $DotnetSymbolVersion, # Version of dotnet symbol to use
+ [Parameter(Mandatory = $false)][switch] $CheckForWindowsPdbs, # If we should check for the existence of windows pdbs in addition to portable PDBs
+ [Parameter(Mandatory = $false)][switch] $ContinueOnError, # If we should keep checking symbols after an error
+ [Parameter(Mandatory = $false)][switch] $Clean, # Clean extracted symbols directory after checking symbols
+ [Parameter(Mandatory = $false)][string] $SymbolExclusionFile # Exclude the symbols in the file from publishing to symbol server
+)
+
+. $PSScriptRoot\..\tools.ps1
+# Maximum number of jobs to run in parallel
+$MaxParallelJobs = 16
+
+# Max number of retries
+$MaxRetry = 5
+
+# Wait time between check for system load
+$SecondsBetweenLoadChecks = 10
+
+# Set error codes
+Set-Variable -Name "ERROR_BADEXTRACT" -Option Constant -Value -1
+Set-Variable -Name "ERROR_FILEDOESNOTEXIST" -Option Constant -Value -2
+
+$WindowsPdbVerificationParam = ""
+if ($CheckForWindowsPdbs) {
+ $WindowsPdbVerificationParam = "--windows-pdbs"
+}
+
+$ExclusionSet = New-Object System.Collections.Generic.HashSet[string];
+
+if (!$InputPath -or !(Test-Path $InputPath)){
+ Write-Host "No symbols to validate."
+ ExitWithExitCode 0
+}
+
+#Check if the path exists
+if ($SymbolExclusionFile -and (Test-Path $SymbolExclusionFile)){
+ [string[]]$Exclusions = Get-Content "$SymbolExclusionFile"
+ $Exclusions | foreach { if($_ -and $_.Trim()){$ExclusionSet.Add($_)} }
+}
+else{
+ Write-Host "Symbol Exclusion file does not exists. No symbols to exclude."
+}
+
+$CountMissingSymbols = {
+ param(
+ [string] $PackagePath, # Path to a NuGet package
+ [string] $WindowsPdbVerificationParam # If we should check for the existence of windows pdbs in addition to portable PDBs
+ )
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ Write-Host "Validating $PackagePath "
+
+ # Ensure input file exist
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTaskError "Input file does not exist: $PackagePath"
+ return [pscustomobject]@{
+ result = $using:ERROR_FILEDOESNOTEXIST
+ packagePath = $PackagePath
+ }
+ }
+
+ # Extensions for which we'll look for symbols
+ $RelevantExtensions = @('.dll', '.exe', '.so', '.dylib')
+
+ # How many files are missing symbol information
+ $MissingSymbols = 0
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $PackageGuid = New-Guid
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageGuid
+ $SymbolsPath = Join-Path -Path $ExtractPath -ChildPath 'Symbols'
+
+ try {
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($PackagePath, $ExtractPath)
+ }
+ catch {
+ Write-Host "Something went wrong extracting $PackagePath"
+ Write-Host $_
+ return [pscustomobject]@{
+ result = $using:ERROR_BADEXTRACT
+ packagePath = $PackagePath
+ }
+ }
+
+ Get-ChildItem -Recurse $ExtractPath |
+ Where-Object { $RelevantExtensions -contains $_.Extension } |
+ ForEach-Object {
+ $FileName = $_.FullName
+ if ($FileName -Match '\\ref\\') {
+ Write-Host "`t Ignoring reference assembly file " $FileName
+ return
+ }
+
+ $FirstMatchingSymbolDescriptionOrDefault = {
+ param(
+ [string] $FullPath, # Full path to the module that has to be checked
+ [string] $TargetServerParam, # Parameter to pass to `Symbol Tool` indicating the server to lookup for symbols
+ [string] $WindowsPdbVerificationParam, # Parameter to pass to potential check for windows-pdbs.
+ [string] $SymbolsPath
+ )
+
+ $FileName = [System.IO.Path]::GetFileName($FullPath)
+ $Extension = [System.IO.Path]::GetExtension($FullPath)
+
+ # Those below are potential symbol files that the `dotnet symbol` might
+ # return. Which one will be returned depend on the type of file we are
+ # checking and which type of file was uploaded.
+
+ # The file itself is returned
+ $SymbolPath = $SymbolsPath + '\' + $FileName
+
+ # PDB file for the module
+ $PdbPath = $SymbolPath.Replace($Extension, '.pdb')
+
+ # PDB file for R2R module (created by crossgen)
+ $NGenPdb = $SymbolPath.Replace($Extension, '.ni.pdb')
+
+ # DBG file for a .so library
+ $SODbg = $SymbolPath.Replace($Extension, '.so.dbg')
+
+ # DWARF file for a .dylib
+ $DylibDwarf = $SymbolPath.Replace($Extension, '.dylib.dwarf')
+
+ $dotnetSymbolExe = "$env:USERPROFILE\.dotnet\tools"
+ $dotnetSymbolExe = Resolve-Path "$dotnetSymbolExe\dotnet-symbol.exe"
+
+ $totalRetries = 0
+
+ while ($totalRetries -lt $using:MaxRetry) {
+
+ # Save the output and get diagnostic output
+ $output = & $dotnetSymbolExe --symbols --modules $WindowsPdbVerificationParam $TargetServerParam $FullPath -o $SymbolsPath --diagnostics | Out-String
+
+ if ((Test-Path $PdbPath) -and (Test-path $SymbolPath)) {
+ return 'Module and PDB for Module'
+ }
+ elseif ((Test-Path $NGenPdb) -and (Test-Path $PdbPath) -and (Test-Path $SymbolPath)) {
+ return 'Dll, PDB and NGen PDB'
+ }
+ elseif ((Test-Path $SODbg) -and (Test-Path $SymbolPath)) {
+ return 'So and DBG for SO'
+ }
+ elseif ((Test-Path $DylibDwarf) -and (Test-Path $SymbolPath)) {
+ return 'Dylib and Dwarf for Dylib'
+ }
+ elseif (Test-Path $SymbolPath) {
+ return 'Module'
+ }
+ else
+ {
+ $totalRetries++
+ }
+ }
+
+ return $null
+ }
+
+ $FileRelativePath = $FileName.Replace("$ExtractPath\", "")
+ if (($($using:ExclusionSet) -ne $null) -and ($($using:ExclusionSet).Contains($FileRelativePath) -or ($($using:ExclusionSet).Contains($FileRelativePath.Replace("\", "/"))))){
+ Write-Host "Skipping $FileName from symbol validation"
+ }
+
+ else {
+ $FileGuid = New-Guid
+ $ExpandedSymbolsPath = Join-Path -Path $SymbolsPath -ChildPath $FileGuid
+
+ $SymbolsOnMSDL = & $FirstMatchingSymbolDescriptionOrDefault `
+ -FullPath $FileName `
+ -TargetServerParam '--microsoft-symbol-server' `
+ -SymbolsPath "$ExpandedSymbolsPath-msdl" `
+ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
+ $SymbolsOnSymWeb = & $FirstMatchingSymbolDescriptionOrDefault `
+ -FullPath $FileName `
+ -TargetServerParam '--internal-server' `
+ -SymbolsPath "$ExpandedSymbolsPath-symweb" `
+ -WindowsPdbVerificationParam $WindowsPdbVerificationParam
+
+ Write-Host -NoNewLine "`t Checking file " $FileName "... "
+
+ if ($SymbolsOnMSDL -ne $null -and $SymbolsOnSymWeb -ne $null) {
+ Write-Host "Symbols found on MSDL ($SymbolsOnMSDL) and SymWeb ($SymbolsOnSymWeb)"
+ }
+ else {
+ $MissingSymbols++
+
+ if ($SymbolsOnMSDL -eq $null -and $SymbolsOnSymWeb -eq $null) {
+ Write-Host 'No symbols found on MSDL or SymWeb!'
+ }
+ else {
+ if ($SymbolsOnMSDL -eq $null) {
+ Write-Host 'No symbols found on MSDL!'
+ }
+ else {
+ Write-Host 'No symbols found on SymWeb!'
+ }
+ }
+ }
+ }
+ }
+
+ if ($using:Clean) {
+ Remove-Item $ExtractPath -Recurse -Force
+ }
+
+ Pop-Location
+
+ return [pscustomobject]@{
+ result = $MissingSymbols
+ packagePath = $PackagePath
+ }
+}
+
+function CheckJobResult(
+ $result,
+ $packagePath,
+ [ref]$DupedSymbols,
+ [ref]$TotalFailures) {
+ if ($result -eq $ERROR_BADEXTRACT) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath has duplicated symbol files"
+ $DupedSymbols.Value++
+ }
+ elseif ($result -eq $ERROR_FILEDOESNOTEXIST) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$packagePath does not exist"
+ $TotalFailures.Value++
+ }
+ elseif ($result -gt '0') {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Missing symbols for $result modules in the package $packagePath"
+ $TotalFailures.Value++
+ }
+ else {
+ Write-Host "All symbols verified for package $packagePath"
+ }
+}
+
+function CheckSymbolsAvailable {
+ if (Test-Path $ExtractPath) {
+ Remove-Item $ExtractPath -Force -Recurse -ErrorAction SilentlyContinue
+ }
+
+ $TotalPackages = 0
+ $TotalFailures = 0
+ $DupedSymbols = 0
+
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $FileName = $_.Name
+ $FullName = $_.FullName
+
+ # These packages from Arcade-Services include some native libraries that
+ # our current symbol uploader can't handle. Below is a workaround until
+ # we get issue: https://github.com/dotnet/arcade/issues/2457 sorted.
+ if ($FileName -Match 'Microsoft\.DotNet\.Darc\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+ elseif ($FileName -Match 'Microsoft\.DotNet\.Maestro\.Tasks\.') {
+ Write-Host "Ignoring Arcade-services file: $FileName"
+ Write-Host
+ return
+ }
+
+ $TotalPackages++
+
+ Start-Job -ScriptBlock $CountMissingSymbols -ArgumentList @($FullName,$WindowsPdbVerificationParam) | Out-Null
+
+ $NumJobs = @(Get-Job -State 'Running').Count
+
+ while ($NumJobs -ge $MaxParallelJobs) {
+ Write-Host "There are $NumJobs validation jobs running right now. Waiting $SecondsBetweenLoadChecks seconds to check again."
+ sleep $SecondsBetweenLoadChecks
+ $NumJobs = @(Get-Job -State 'Running').Count
+ }
+
+ foreach ($Job in @(Get-Job -State 'Completed')) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ Remove-Job -Id $Job.Id
+ }
+ Write-Host
+ }
+
+ foreach ($Job in @(Get-Job)) {
+ $jobResult = Wait-Job -Id $Job.Id | Receive-Job
+ CheckJobResult $jobResult.result $jobResult.packagePath ([ref]$DupedSymbols) ([ref]$TotalFailures)
+ }
+
+ if ($TotalFailures -gt 0 -or $DupedSymbols -gt 0) {
+ if ($TotalFailures -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "Symbols missing for $TotalFailures/$TotalPackages packages"
+ }
+
+ if ($DupedSymbols -gt 0) {
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message "$DupedSymbols/$TotalPackages packages had duplicated symbol files and could not be extracted"
+ }
+
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host "All symbols validated!"
+ }
+}
+
+function InstallDotnetSymbol {
+ $dotnetSymbolPackageName = 'dotnet-symbol'
+
+ $dotnetRoot = InitializeDotNetCli -install:$true
+ $dotnet = "$dotnetRoot\dotnet.exe"
+ $toolList = & "$dotnet" tool list --global
+
+ if (($toolList -like "*$dotnetSymbolPackageName*") -and ($toolList -like "*$dotnetSymbolVersion*")) {
+ Write-Host "dotnet-symbol version $dotnetSymbolVersion is already installed."
+ }
+ else {
+ Write-Host "Installing dotnet-symbol version $dotnetSymbolVersion..."
+ Write-Host 'You may need to restart your command window if this is the first dotnet tool you have installed.'
+ & "$dotnet" tool install $dotnetSymbolPackageName --version $dotnetSymbolVersion --verbosity "minimal" --global
+ }
+}
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ InstallDotnetSymbol
+
+ foreach ($Job in @(Get-Job)) {
+ Remove-Job -Id $Job.Id
+ }
+
+ CheckSymbolsAvailable
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'CheckSymbols' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/post-build/trigger-subscriptions.ps1 b/eng/common/post-build/trigger-subscriptions.ps1
new file mode 100644
index 0000000..55dea51
--- /dev/null
+++ b/eng/common/post-build/trigger-subscriptions.ps1
@@ -0,0 +1,64 @@
+param(
+ [Parameter(Mandatory=$true)][string] $SourceRepo,
+ [Parameter(Mandatory=$true)][int] $ChannelId,
+ [Parameter(Mandatory=$true)][string] $MaestroApiAccessToken,
+ [Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro-prod.westus2.cloudapp.azure.com',
+ [Parameter(Mandatory=$false)][string] $MaestroApiVersion = '2019-01-16'
+)
+
+try {
+ . $PSScriptRoot\post-build-utils.ps1
+
+ # Get all the $SourceRepo subscriptions
+ $normalizedSourceRepo = $SourceRepo.Replace('dnceng@', '')
+ $subscriptions = Get-MaestroSubscriptions -SourceRepository $normalizedSourceRepo -ChannelId $ChannelId
+
+ if (!$subscriptions) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message "No subscriptions found for source repo '$normalizedSourceRepo' in channel '$ChannelId'"
+ ExitWithExitCode 0
+ }
+
+ $subscriptionsToTrigger = New-Object System.Collections.Generic.List[string]
+ $failedTriggeredSubscription = $false
+
+ # Get all enabled subscriptions that need dependency flow on 'everyBuild'
+ foreach ($subscription in $subscriptions) {
+ if ($subscription.enabled -and $subscription.policy.updateFrequency -like 'everyBuild' -and $subscription.channel.id -eq $ChannelId) {
+ Write-Host "Should trigger this subscription: ${$subscription.id}"
+ [void]$subscriptionsToTrigger.Add($subscription.id)
+ }
+ }
+
+ foreach ($subscriptionToTrigger in $subscriptionsToTrigger) {
+ try {
+ Write-Host "Triggering subscription '$subscriptionToTrigger'."
+
+ Trigger-Subscription -SubscriptionId $subscriptionToTrigger
+
+ Write-Host 'done.'
+ }
+ catch
+ {
+ Write-Host "There was an error while triggering subscription '$subscriptionToTrigger'"
+ Write-Host $_
+ Write-Host $_.ScriptStackTrace
+ $failedTriggeredSubscription = $true
+ }
+ }
+
+ if ($subscriptionsToTrigger.Count -eq 0) {
+ Write-Host "No subscription matched source repo '$normalizedSourceRepo' and channel ID '$ChannelId'."
+ }
+ elseif ($failedTriggeredSubscription) {
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message 'At least one subscription failed to be triggered...'
+ ExitWithExitCode 1
+ }
+ else {
+ Write-Host 'All subscriptions were triggered successfully!'
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'TriggerSubscriptions' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/retain-build.ps1 b/eng/common/retain-build.ps1
new file mode 100644
index 0000000..e7ba975
--- /dev/null
+++ b/eng/common/retain-build.ps1
@@ -0,0 +1,45 @@
+
+Param(
+[Parameter(Mandatory=$true)][int] $buildId,
+[Parameter(Mandatory=$true)][string] $azdoOrgUri,
+[Parameter(Mandatory=$true)][string] $azdoProject,
+[Parameter(Mandatory=$true)][string] $token
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+function Get-AzDOHeaders(
+ [string] $token)
+{
+ $base64AuthInfo = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":${token}"))
+ $headers = @{"Authorization"="Basic $base64AuthInfo"}
+ return $headers
+}
+
+function Update-BuildRetention(
+ [string] $azdoOrgUri,
+ [string] $azdoProject,
+ [int] $buildId,
+ [string] $token)
+{
+ $headers = Get-AzDOHeaders -token $token
+ $requestBody = "{
+ `"keepForever`": `"true`"
+ }"
+
+ $requestUri = "${azdoOrgUri}/${azdoProject}/_apis/build/builds/${buildId}?api-version=6.0"
+ write-Host "Attempting to retain build using the following URI: ${requestUri} ..."
+
+ try {
+ Invoke-RestMethod -Uri $requestUri -Method Patch -Body $requestBody -Header $headers -contentType "application/json"
+ Write-Host "Updated retention settings for build ${buildId}."
+ }
+ catch {
+ Write-Error "Failed to update retention settings for build: $_.Exception.Response.StatusDescription"
+ exit 1
+ }
+}
+
+Update-BuildRetention -azdoOrgUri $azdoOrgUri -azdoProject $azdoProject -buildId $buildId -token $token
+exit 0
diff --git a/eng/common/sdk-task.ps1 b/eng/common/sdk-task.ps1
new file mode 100644
index 0000000..c35087a
--- /dev/null
+++ b/eng/common/sdk-task.ps1
@@ -0,0 +1,97 @@
+[CmdletBinding(PositionalBinding=$false)]
+Param(
+ [string] $configuration = 'Debug',
+ [string] $task,
+ [string] $verbosity = 'minimal',
+ [string] $msbuildEngine = $null,
+ [switch] $restore,
+ [switch] $prepareMachine,
+ [switch] $help,
+ [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties
+)
+
+$ci = $true
+$binaryLog = $true
+$warnAsError = $true
+
+. $PSScriptRoot\tools.ps1
+
+function Print-Usage() {
+ Write-Host "Common settings:"
+ Write-Host " -task <value> Name of Arcade task (name of a project in SdkTasks directory of the Arcade SDK package)"
+ Write-Host " -restore Restore dependencies"
+ Write-Host " -verbosity <value> Msbuild verbosity: q[uiet], m[inimal], n[ormal], d[etailed], and diag[nostic]"
+ Write-Host " -help Print help and exit"
+ Write-Host ""
+
+ Write-Host "Advanced settings:"
+ Write-Host " -prepareMachine Prepare machine for CI run"
+ Write-Host " -msbuildEngine <value> Msbuild engine to use to run build ('dotnet', 'vs', or unspecified)."
+ Write-Host ""
+ Write-Host "Command line arguments not listed above are passed thru to msbuild."
+}
+
+function Build([string]$target) {
+ $logSuffix = if ($target -eq 'Execute') { '' } else { ".$target" }
+ $log = Join-Path $LogDir "$task$logSuffix.binlog"
+ $outputPath = Join-Path $ToolsetDir "$task\"
+
+ MSBuild $taskProject `
+ /bl:$log `
+ /t:$target `
+ /p:Configuration=$configuration `
+ /p:RepoRoot=$RepoRoot `
+ /p:BaseIntermediateOutputPath=$outputPath `
+ /v:$verbosity `
+ @properties
+}
+
+try {
+ if ($help -or (($null -ne $properties) -and ($properties.Contains('/help') -or $properties.Contains('/?')))) {
+ Print-Usage
+ exit 0
+ }
+
+ if ($task -eq "") {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Missing required parameter '-task <value>'"
+ Print-Usage
+ ExitWithExitCode 1
+ }
+
+ if( $msbuildEngine -eq "vs") {
+ # Ensure desktop MSBuild is available for sdk tasks.
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -contains "vs" )) {
+ $GlobalJson.tools | Add-Member -Name "vs" -Value (ConvertFrom-Json "{ `"version`": `"16.5`" }") -MemberType NoteProperty
+ }
+ if( -not ($GlobalJson.tools.PSObject.Properties.Name -match "xcopy-msbuild" )) {
+ $GlobalJson.tools | Add-Member -Name "xcopy-msbuild" -Value "17.2.1" -MemberType NoteProperty
+ }
+ if ($GlobalJson.tools."xcopy-msbuild".Trim() -ine "none") {
+ $xcopyMSBuildToolsFolder = InitializeXCopyMSBuild $GlobalJson.tools."xcopy-msbuild" -install $true
+ }
+ if ($xcopyMSBuildToolsFolder -eq $null) {
+ throw 'Unable to get xcopy downloadable version of msbuild'
+ }
+
+ $global:_MSBuildExe = "$($xcopyMSBuildToolsFolder)\MSBuild\Current\Bin\MSBuild.exe"
+ }
+
+ $taskProject = GetSdkTaskProject $task
+ if (!(Test-Path $taskProject)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Unknown task: $task"
+ ExitWithExitCode 1
+ }
+
+ if ($restore) {
+ Build 'Restore'
+ }
+
+ Build 'Execute'
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Category 'Build' -Message $_
+ ExitWithExitCode 1
+}
+
+ExitWithExitCode 0
diff --git a/eng/common/sdl/NuGet.config b/eng/common/sdl/NuGet.config
new file mode 100644
index 0000000..3849bdb
--- /dev/null
+++ b/eng/common/sdl/NuGet.config
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<configuration>
+ <solution>
+ <add key="disableSourceControlIntegration" value="true" />
+ </solution>
+ <packageSources>
+ <clear />
+ <add key="guardian" value="https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json" />
+ </packageSources>
+ <packageSourceMapping>
+ <packageSource key="guardian">
+ <package pattern="microsoft.guardian.cli" />
+ </packageSource>
+ </packageSourceMapping>
+ <disabledPackageSources>
+ <clear />
+ </disabledPackageSources>
+</configuration>
diff --git a/eng/common/sdl/configure-sdl-tool.ps1 b/eng/common/sdl/configure-sdl-tool.ps1
new file mode 100644
index 0000000..bdbf49e
--- /dev/null
+++ b/eng/common/sdl/configure-sdl-tool.ps1
@@ -0,0 +1,116 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $WorkingDirectory,
+ [string] $TargetDirectory,
+ [string] $GdnFolder,
+ # The list of Guardian tools to configure. For each object in the array:
+ # - If the item is a [hashtable], it must contain these entries:
+ # - Name = The tool name as Guardian knows it.
+ # - Scenario = (Optional) Scenario-specific name for this configuration entry. It must be unique
+ # among all tool entries with the same Name.
+ # - Args = (Optional) Array of Guardian tool configuration args, like '@("Target > C:\temp")'
+ # - If the item is a [string] $v, it is treated as '@{ Name="$v" }'
+ [object[]] $ToolsList,
+ [string] $GuardianLoggerLevel='Standard',
+ # Optional: Additional params to add to any tool using CredScan.
+ [string[]] $CrScanAdditionalRunConfigParams,
+ # Optional: Additional params to add to any tool using PoliCheck.
+ [string[]] $PoliCheckAdditionalRunConfigParams,
+ # Optional: Additional params to add to any tool using CodeQL/Semmle.
+ [string[]] $CodeQLAdditionalRunConfigParams
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ # Normalize tools list: all in [hashtable] form with defined values for each key.
+ $ToolsList = $ToolsList |
+ ForEach-Object {
+ if ($_ -is [string]) {
+ $_ = @{ Name = $_ }
+ }
+
+ if (-not ($_['Scenario'])) { $_.Scenario = "" }
+ if (-not ($_['Args'])) { $_.Args = @() }
+ $_
+ }
+
+ Write-Host "List of tools to configure:"
+ $ToolsList | ForEach-Object { $_ | Out-String | Write-Host }
+
+ # We store config files in the r directory of .gdn
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
+ }
+
+ foreach ($tool in $ToolsList) {
+ # Put together the name and scenario to make a unique key.
+ $toolConfigName = $tool.Name
+ if ($tool.Scenario) {
+ $toolConfigName += "_" + $tool.Scenario
+ }
+
+ Write-Host "=== Configuring $toolConfigName..."
+
+ $gdnConfigFile = Join-Path $gdnConfigPath "$toolConfigName-configure.gdnconfig"
+
+ # For some tools, add default and automatic args.
+ if ($tool.Name -eq 'credscan') {
+ if ($targetDirectory) {
+ $tool.Args += "`"TargetDirectory < $TargetDirectory`""
+ }
+ $tool.Args += "`"OutputType < pre`""
+ $tool.Args += $CrScanAdditionalRunConfigParams
+ } elseif ($tool.Name -eq 'policheck') {
+ if ($targetDirectory) {
+ $tool.Args += "`"Target < $TargetDirectory`""
+ }
+ $tool.Args += $PoliCheckAdditionalRunConfigParams
+ } elseif ($tool.Name -eq 'semmle' -or $tool.Name -eq 'codeql') {
+ if ($targetDirectory) {
+ $tool.Args += "`"SourceCodeDirectory < $TargetDirectory`""
+ }
+ $tool.Args += $CodeQLAdditionalRunConfigParams
+ }
+
+ # Create variable pointing to the args array directly so we can use splat syntax later.
+ $toolArgs = $tool.Args
+
+ # Configure the tool. If args array is provided or the current tool has some default arguments
+ # defined, add "--args" and splat each element on the end. Arg format is "{Arg id} < {Value}",
+ # one per parameter. Doc page for "guardian configure":
+ # https://dev.azure.com/securitytools/SecurityIntegration/_wiki/wikis/Guardian/1395/configure
+ Exec-BlockVerbosely {
+ & $GuardianCliLocation configure `
+ --working-directory $WorkingDirectory `
+ --tool $tool.Name `
+ --output-path $gdnConfigFile `
+ --logger-level $GuardianLoggerLevel `
+ --noninteractive `
+ --force `
+ $(if ($toolArgs) { "--args" }) @toolArgs
+ Exit-IfNZEC "Sdl"
+ }
+
+ Write-Host "Created '$toolConfigName' configuration file: $gdnConfigFile"
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/execute-all-sdl-tools.ps1 b/eng/common/sdl/execute-all-sdl-tools.ps1
new file mode 100644
index 0000000..4797e01
--- /dev/null
+++ b/eng/common/sdl/execute-all-sdl-tools.ps1
@@ -0,0 +1,165 @@
+Param(
+ [string] $GuardianPackageName, # Required: the name of guardian CLI package (not needed if GuardianCliLocation is specified)
+ [string] $NugetPackageDirectory, # Required: directory where NuGet packages are installed (not needed if GuardianCliLocation is specified)
+ [string] $GuardianCliLocation, # Optional: Direct location of Guardian CLI executable if GuardianPackageName & NugetPackageDirectory are not specified
+ [string] $Repository=$env:BUILD_REPOSITORY_NAME, # Required: the name of the repository (e.g. dotnet/arcade)
+ [string] $BranchName=$env:BUILD_SOURCEBRANCH, # Optional: name of branch or version of gdn settings; defaults to master
+ [string] $SourceDirectory=$env:BUILD_SOURCESDIRECTORY, # Required: the directory where source files are located
+ [string] $ArtifactsDirectory = (Join-Path $env:BUILD_ARTIFACTSTAGINGDIRECTORY ('artifacts')), # Required: the directory where build artifacts are located
+ [string] $AzureDevOpsAccessToken, # Required: access token for dnceng; should be provided via KeyVault
+
+ # Optional: list of SDL tools to run on source code. See 'configure-sdl-tool.ps1' for tools list
+ # format.
+ [object[]] $SourceToolsList,
+ # Optional: list of SDL tools to run on built artifacts. See 'configure-sdl-tool.ps1' for tools
+ # list format.
+ [object[]] $ArtifactToolsList,
+ # Optional: list of SDL tools to run without automatically specifying a target directory. See
+ # 'configure-sdl-tool.ps1' for tools list format.
+ [object[]] $CustomToolsList,
+
+ [bool] $TsaPublish=$False, # Optional: true will publish results to TSA; only set to true after onboarding to TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBranchName=$env:BUILD_SOURCEBRANCH, # Optional: required for TSA publish; defaults to $(Build.SourceBranchName); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaRepositoryName=$env:BUILD_REPOSITORY_NAME, # Optional: TSA repository name; will be generated automatically if not submitted; TSA is the automated framework used to upload test results as bugs.
+ [string] $BuildNumber=$env:BUILD_BUILDNUMBER, # Optional: required for TSA publish; defaults to $(Build.BuildNumber)
+ [bool] $UpdateBaseline=$False, # Optional: if true, will update the baseline in the repository; should only be run after fixing any issues which need to be fixed
+ [bool] $TsaOnboard=$False, # Optional: if true, will onboard the repository to TSA; should only be run once; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaInstanceUrl, # Optional: only needed if TsaOnboard or TsaPublish is true; the instance-url registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the codebase registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaProjectName, # Optional: only needed if TsaOnboard or TsaPublish is true; the name of the project registered with TSA; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaNotificationEmail, # Optional: only needed if TsaOnboard is true; the email(s) which will receive notifications of TSA bug filings (e.g. alias@microsoft.com); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaCodebaseAdmin, # Optional: only needed if TsaOnboard is true; the aliases which are admins of the TSA codebase (e.g. DOMAIN\alias); TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaBugAreaPath, # Optional: only needed if TsaOnboard is true; the area path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $TsaIterationPath, # Optional: only needed if TsaOnboard is true; the iteration path where TSA will file bugs in AzDO; TSA is the automated framework used to upload test results as bugs.
+ [string] $GuardianLoggerLevel='Standard', # Optional: the logger level for the Guardian CLI; options are Trace, Verbose, Standard, Warning, and Error
+ [string[]] $CrScanAdditionalRunConfigParams, # Optional: Additional Params to custom build a CredScan run config in the format @("xyz:abc","sdf:1")
+ [string[]] $PoliCheckAdditionalRunConfigParams, # Optional: Additional Params to custom build a Policheck run config in the format @("xyz:abc","sdf:1")
+ [string[]] $CodeQLAdditionalRunConfigParams, # Optional: Additional Params to custom build a Semmle/CodeQL run config in the format @("xyz < abc","sdf < 1")
+ [bool] $BreakOnFailure=$False # Optional: Fail the build if there were errors during the run
+)
+
+try {
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+ $global:LASTEXITCODE = 0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ #Replace repo names to the format of org/repo
+ if (!($Repository.contains('/'))) {
+ $RepoName = $Repository -replace '(.*?)-(.*)', '$1/$2';
+ }
+ else{
+ $RepoName = $Repository;
+ }
+
+ if ($GuardianPackageName) {
+ $guardianCliLocation = Join-Path $NugetPackageDirectory (Join-Path $GuardianPackageName (Join-Path 'tools' 'guardian.cmd'))
+ } else {
+ $guardianCliLocation = $GuardianCliLocation
+ }
+
+ $workingDirectory = (Split-Path $SourceDirectory -Parent)
+ $ValidPath = Test-Path $guardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Invalid Guardian CLI Location.'
+ ExitWithExitCode 1
+ }
+
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'init-sdl.ps1') -GuardianCliLocation $guardianCliLocation -Repository $RepoName -BranchName $BranchName -WorkingDirectory $workingDirectory -AzureDevOpsAccessToken $AzureDevOpsAccessToken -GuardianLoggerLevel $GuardianLoggerLevel
+ }
+ $gdnFolder = Join-Path $workingDirectory '.gdn'
+
+ if ($TsaOnboard) {
+ if ($TsaCodebaseName -and $TsaNotificationEmail -and $TsaCodebaseAdmin -and $TsaBugAreaPath) {
+ Exec-BlockVerbosely {
+ & $guardianCliLocation tsa-onboard --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-onboard failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not onboard to TSA -- not all required values ($TsaCodebaseName, $TsaNotificationEmail, $TsaCodebaseAdmin, $TsaBugAreaPath) were specified.'
+ ExitWithExitCode 1
+ }
+ }
+
+ # Configure a list of tools with a default target directory. Populates the ".gdn/r" directory.
+ function Configure-ToolsList([object[]] $tools, [string] $targetDirectory) {
+ if ($tools -and $tools.Count -gt 0) {
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'configure-sdl-tool.ps1') `
+ -GuardianCliLocation $guardianCliLocation `
+ -WorkingDirectory $workingDirectory `
+ -TargetDirectory $targetDirectory `
+ -GdnFolder $gdnFolder `
+ -ToolsList $tools `
+ -AzureDevOpsAccessToken $AzureDevOpsAccessToken `
+ -GuardianLoggerLevel $GuardianLoggerLevel `
+ -CrScanAdditionalRunConfigParams $CrScanAdditionalRunConfigParams `
+ -PoliCheckAdditionalRunConfigParams $PoliCheckAdditionalRunConfigParams `
+ -CodeQLAdditionalRunConfigParams $CodeQLAdditionalRunConfigParams
+ if ($BreakOnFailure) {
+ Exit-IfNZEC "Sdl"
+ }
+ }
+ }
+ }
+
+ # Configure Artifact and Source tools with default Target directories.
+ Configure-ToolsList $ArtifactToolsList $ArtifactsDirectory
+ Configure-ToolsList $SourceToolsList $SourceDirectory
+ # Configure custom tools with no default Target directory.
+ Configure-ToolsList $CustomToolsList $null
+
+ # At this point, all tools are configured in the ".gdn" directory. Run them all in a single call.
+ # (If we used "run" multiple times, each run would overwrite data from earlier runs.)
+ Exec-BlockVerbosely {
+ & $(Join-Path $PSScriptRoot 'run-sdl.ps1') `
+ -GuardianCliLocation $guardianCliLocation `
+ -WorkingDirectory $SourceDirectory `
+ -UpdateBaseline $UpdateBaseline `
+ -GdnFolder $gdnFolder
+ }
+
+ if ($TsaPublish) {
+ if ($TsaBranchName -and $BuildNumber) {
+ if (-not $TsaRepositoryName) {
+ $TsaRepositoryName = "$($Repository)-$($BranchName)"
+ }
+ Exec-BlockVerbosely {
+ & $guardianCliLocation tsa-publish --all-tools --repository-name "$TsaRepositoryName" --branch-name "$TsaBranchName" --build-number "$BuildNumber" --onboard $True --codebase-name "$TsaCodebaseName" --notification-alias "$TsaNotificationEmail" --codebase-admin "$TsaCodebaseAdmin" --instance-url "$TsaInstanceUrl" --project-name "$TsaProjectName" --area-path "$TsaBugAreaPath" --iteration-path "$TsaIterationPath" --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Guardian tsa-publish failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ } else {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message 'Could not publish to TSA -- not all required values ($TsaBranchName, $BuildNumber) were specified.'
+ ExitWithExitCode 1
+ }
+ }
+
+ if ($BreakOnFailure) {
+ Write-Host "Failing the build in case of breaking results..."
+ Exec-BlockVerbosely {
+ & $guardianCliLocation break --working-directory $workingDirectory --logger-level $GuardianLoggerLevel
+ }
+ } else {
+ Write-Host "Letting the build pass even if there were breaking results..."
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ exit 1
+}
diff --git a/eng/common/sdl/extract-artifact-archives.ps1 b/eng/common/sdl/extract-artifact-archives.ps1
new file mode 100644
index 0000000..68da4fb
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-archives.ps1
@@ -0,0 +1,63 @@
+# This script looks for each archive file in a directory and extracts it into the target directory.
+# For example, the file "$InputPath/bin.tar.gz" extracts to "$ExtractPath/bin.tar.gz.extracted/**".
+# Uses the "tar" utility added to Windows 10 / Windows 2019 that supports tar.gz and zip.
+param(
+ # Full path to directory where archives are stored.
+ [Parameter(Mandatory=$true)][string] $InputPath,
+ # Full path to directory to extract archives into. May be the same as $InputPath.
+ [Parameter(Mandatory=$true)][string] $ExtractPath
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ Measure-Command {
+ $jobs = @()
+
+ # Find archive files for non-Windows and Windows builds.
+ $archiveFiles = @(
+ Get-ChildItem (Join-Path $InputPath "*.tar.gz")
+ Get-ChildItem (Join-Path $InputPath "*.zip")
+ )
+
+ foreach ($targzFile in $archiveFiles) {
+ $jobs += Start-Job -ScriptBlock {
+ $file = $using:targzFile
+ $fileName = [System.IO.Path]::GetFileName($file)
+ $extractDir = Join-Path $using:ExtractPath "$fileName.extracted"
+
+ New-Item $extractDir -ItemType Directory -Force | Out-Null
+
+ Write-Host "Extracting '$file' to '$extractDir'..."
+
+ # Pipe errors to stdout to prevent PowerShell detecting them and quitting the job early.
+ # This type of quit skips the catch, so we wouldn't be able to tell which file triggered the
+ # error. Save output so it can be stored in the exception string along with context.
+ $output = tar -xf $file -C $extractDir 2>&1
+ # Handle NZEC manually rather than using Exit-IfNZEC: we are in a background job, so we
+ # don't have access to the outer scope.
+ if ($LASTEXITCODE -ne 0) {
+ throw "Error extracting '$file': non-zero exit code ($LASTEXITCODE). Output: '$output'"
+ }
+
+ Write-Host "Extracted to $extractDir"
+ }
+ }
+
+ Receive-Job $jobs -Wait
+ }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/extract-artifact-packages.ps1 b/eng/common/sdl/extract-artifact-packages.ps1
new file mode 100644
index 0000000..7f28d9c
--- /dev/null
+++ b/eng/common/sdl/extract-artifact-packages.ps1
@@ -0,0 +1,80 @@
+param(
+ [Parameter(Mandatory=$true)][string] $InputPath, # Full path to directory where artifact packages are stored
+ [Parameter(Mandatory=$true)][string] $ExtractPath # Full path to directory where the packages will be extracted
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+
+$disableConfigureToolsetImport = $true
+
+function ExtractArtifacts {
+ if (!(Test-Path $InputPath)) {
+ Write-Host "Input Path does not exist: $InputPath"
+ ExitWithExitCode 0
+ }
+ $Jobs = @()
+ Get-ChildItem "$InputPath\*.nupkg" |
+ ForEach-Object {
+ $Jobs += Start-Job -ScriptBlock $ExtractPackage -ArgumentList $_.FullName
+ }
+
+ foreach ($Job in $Jobs) {
+ Wait-Job -Id $Job.Id | Receive-Job
+ }
+}
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $ExtractPackage = {
+ param(
+ [string] $PackagePath # Full path to a NuGet package
+ )
+
+ if (!(Test-Path $PackagePath)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message "Input file does not exist: $PackagePath"
+ ExitWithExitCode 1
+ }
+
+ $RelevantExtensions = @('.dll', '.exe', '.pdb')
+ Write-Host -NoNewLine 'Extracting ' ([System.IO.Path]::GetFileName($PackagePath)) '...'
+
+ $PackageId = [System.IO.Path]::GetFileNameWithoutExtension($PackagePath)
+ $ExtractPath = Join-Path -Path $using:ExtractPath -ChildPath $PackageId
+
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+
+ [System.IO.Directory]::CreateDirectory($ExtractPath);
+
+ try {
+ $zip = [System.IO.Compression.ZipFile]::OpenRead($PackagePath)
+
+ $zip.Entries |
+ Where-Object {$RelevantExtensions -contains [System.IO.Path]::GetExtension($_.Name)} |
+ ForEach-Object {
+ $TargetFile = Join-Path -Path $ExtractPath -ChildPath $_.Name
+
+ [System.IO.Compression.ZipFileExtensions]::ExtractToFile($_, $TargetFile, $true)
+ }
+ }
+ catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+ }
+ finally {
+ $zip.Dispose()
+ }
+ }
+ Measure-Command { ExtractArtifacts }
+}
+catch {
+ Write-Host $_
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/init-sdl.ps1 b/eng/common/sdl/init-sdl.ps1
new file mode 100644
index 0000000..3ac1d92
--- /dev/null
+++ b/eng/common/sdl/init-sdl.ps1
@@ -0,0 +1,55 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $Repository,
+ [string] $BranchName='master',
+ [string] $WorkingDirectory,
+ [string] $AzureDevOpsAccessToken,
+ [string] $GuardianLoggerLevel='Standard'
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+# `tools.ps1` checks $ci to perform some actions. Since the SDL
+# scripts don't necessarily execute in the same agent that run the
+# build.ps1/sh script this variable isn't automatically set.
+$ci = $true
+. $PSScriptRoot\..\tools.ps1
+
+# Don't display the console progress UI - it's a huge perf hit
+$ProgressPreference = 'SilentlyContinue'
+
+# Construct basic auth from AzDO access token; construct URI to the repository's gdn folder stored in that repository; construct location of zip file
+$encodedPat = [Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes(":$AzureDevOpsAccessToken"))
+$escapedRepository = [Uri]::EscapeDataString("/$Repository/$BranchName/.gdn")
+$uri = "https://dev.azure.com/dnceng/internal/_apis/git/repositories/sdl-tool-cfg/Items?path=$escapedRepository&versionDescriptor[versionOptions]=0&`$format=zip&api-version=5.0"
+$zipFile = "$WorkingDirectory/gdn.zip"
+
+Add-Type -AssemblyName System.IO.Compression.FileSystem
+$gdnFolder = (Join-Path $WorkingDirectory '.gdn')
+
+try {
+ # if the folder does not exist, we'll do a guardian init and push it to the remote repository
+ Write-Host 'Initializing Guardian...'
+ Write-Host "$GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel"
+ & $GuardianCliLocation init --working-directory $WorkingDirectory --logger-level $GuardianLoggerLevel
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian init failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ # We create the mainbaseline so it can be edited later
+ Write-Host "$GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline"
+ & $GuardianCliLocation baseline --working-directory $WorkingDirectory --name mainbaseline
+ if ($LASTEXITCODE -ne 0) {
+ Write-PipelineTelemetryError -Force -Category 'Build' -Message "Guardian baseline failed with exit code $LASTEXITCODE."
+ ExitWithExitCode $LASTEXITCODE
+ }
+ ExitWithExitCode 0
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/packages.config b/eng/common/sdl/packages.config
new file mode 100644
index 0000000..4585cfd
--- /dev/null
+++ b/eng/common/sdl/packages.config
@@ -0,0 +1,4 @@
+<?xml version="1.0" encoding="utf-8"?>
+<packages>
+ <package id="Microsoft.Guardian.Cli" version="0.109.0"/>
+</packages>
diff --git a/eng/common/sdl/run-sdl.ps1 b/eng/common/sdl/run-sdl.ps1
new file mode 100644
index 0000000..2eac8c7
--- /dev/null
+++ b/eng/common/sdl/run-sdl.ps1
@@ -0,0 +1,49 @@
+Param(
+ [string] $GuardianCliLocation,
+ [string] $WorkingDirectory,
+ [string] $GdnFolder,
+ [string] $UpdateBaseline,
+ [string] $GuardianLoggerLevel='Standard'
+)
+
+$ErrorActionPreference = 'Stop'
+Set-StrictMode -Version 2.0
+$disableConfigureToolsetImport = $true
+$global:LASTEXITCODE = 0
+
+try {
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ # We store config files in the r directory of .gdn
+ $gdnConfigPath = Join-Path $GdnFolder 'r'
+ $ValidPath = Test-Path $GuardianCliLocation
+
+ if ($ValidPath -eq $False)
+ {
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message "Invalid Guardian CLI Location."
+ ExitWithExitCode 1
+ }
+
+ $gdnConfigFiles = Get-ChildItem $gdnConfigPath -Recurse -Include '*.gdnconfig'
+ Write-Host "Discovered Guardian config files:"
+ $gdnConfigFiles | Out-String | Write-Host
+
+ Exec-BlockVerbosely {
+ & $GuardianCliLocation run `
+ --working-directory $WorkingDirectory `
+ --baseline mainbaseline `
+ --update-baseline $UpdateBaseline `
+ --logger-level $GuardianLoggerLevel `
+ --config @gdnConfigFiles
+ Exit-IfNZEC "Sdl"
+ }
+}
+catch {
+ Write-Host $_.ScriptStackTrace
+ Write-PipelineTelemetryError -Force -Category 'Sdl' -Message $_
+ ExitWithExitCode 1
+}
diff --git a/eng/common/sdl/sdl.ps1 b/eng/common/sdl/sdl.ps1
new file mode 100644
index 0000000..648c506
--- /dev/null
+++ b/eng/common/sdl/sdl.ps1
@@ -0,0 +1,38 @@
+
+function Install-Gdn {
+ param(
+ [Parameter(Mandatory=$true)]
+ [string]$Path,
+
+ # If omitted, install the latest version of Guardian, otherwise install that specific version.
+ [string]$Version
+ )
+
+ $ErrorActionPreference = 'Stop'
+ Set-StrictMode -Version 2.0
+ $disableConfigureToolsetImport = $true
+ $global:LASTEXITCODE = 0
+
+ # `tools.ps1` checks $ci to perform some actions. Since the SDL
+ # scripts don't necessarily execute in the same agent that run the
+ # build.ps1/sh script this variable isn't automatically set.
+ $ci = $true
+ . $PSScriptRoot\..\tools.ps1
+
+ $argumentList = @("install", "Microsoft.Guardian.Cli", "-Source https://securitytools.pkgs.visualstudio.com/_packaging/Guardian/nuget/v3/index.json", "-OutputDirectory $Path", "-NonInteractive", "-NoCache")
+
+ if ($Version) {
+ $argumentList += "-Version $Version"
+ }
+
+ Start-Process nuget -Verbose -ArgumentList $argumentList -NoNewWindow -Wait
+
+ $gdnCliPath = Get-ChildItem -Filter guardian.cmd -Recurse -Path $Path
+
+ if (!$gdnCliPath)
+ {
+ Write-PipelineTelemetryError -Category 'Sdl' -Message 'Failure installing Guardian'
+ }
+
+ return $gdnCliPath.FullName
+} \ No newline at end of file
diff --git a/eng/common/templates/job/execute-sdl.yml b/eng/common/templates/job/execute-sdl.yml
new file mode 100644
index 0000000..781a41c
--- /dev/null
+++ b/eng/common/templates/job/execute-sdl.yml
@@ -0,0 +1,133 @@
+parameters:
+ enable: 'false' # Whether the SDL validation job should execute or not
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
+ # diagnosis of problems with specific tool configurations.
+ publishGuardianDirectoryToPipeline: false
+ # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
+ # parameters rather than relying on YAML. It may be better to use a local script, because you can
+ # reproduce results locally without piecing together a command based on the YAML.
+ executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ sdlContinueOnError: false # optional: determines whether to continue the build if the step errors;
+ # optional: determines if build artifacts should be downloaded.
+ downloadArtifacts: true
+ # optional: determines if this job should search the directory of downloaded artifacts for
+ # 'tar.gz' and 'zip' archive files and extract them before running SDL validation tasks.
+ extractArchiveArtifacts: false
+ dependsOn: '' # Optional: dependencies of the job
+ artifactNames: '' # Optional: patterns supplied to DownloadBuildArtifacts
+ # Usage:
+ # artifactNames:
+ # - 'BlobArtifacts'
+ # - 'Artifacts_Windows_NT_Release'
+ # Optional: download a list of pipeline artifacts. 'downloadArtifacts' controls build artifacts,
+ # not pipeline artifacts, so doesn't affect the use of this parameter.
+ pipelineArtifactNames: []
+
+jobs:
+- job: Run_SDL
+ dependsOn: ${{ parameters.dependsOn }}
+ displayName: Run SDL tool
+ condition: eq( ${{ parameters.enable }}, 'true')
+ variables:
+ - group: DotNet-VSTS-Bot
+ - name: AzDOProjectName
+ value: ${{ parameters.AzDOProjectName }}
+ - name: AzDOPipelineId
+ value: ${{ parameters.AzDOPipelineId }}
+ - name: AzDOBuildId
+ value: ${{ parameters.AzDOBuildId }}
+ - template: /eng/common/templates/variables/sdl-variables.yml
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - checkout: self
+ clean: true
+
+ # If the template caller didn't provide an AzDO parameter, set them all up as Maestro vars.
+ - ${{ if not(and(parameters.AzDOProjectName, parameters.AzDOPipelineId, parameters.AzDOBuildId)) }}:
+ - template: /eng/common/templates/post-build/setup-maestro-vars.yml
+
+ - ${{ if ne(parameters.downloadArtifacts, 'false')}}:
+ - ${{ if ne(parameters.artifactNames, '') }}:
+ - ${{ each artifactName in parameters.artifactNames }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ checkDownloadedFiles: true
+ - ${{ if eq(parameters.artifactNames, '') }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Build Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ downloadType: specific files
+ itemPattern: "**"
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ checkDownloadedFiles: true
+
+ - ${{ each artifactName in parameters.pipelineArtifactNames }}:
+ - task: DownloadPipelineArtifact@2
+ displayName: Download Pipeline Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: ${{ artifactName }}
+ downloadPath: $(Build.ArtifactStagingDirectory)\artifacts
+ checkDownloadedFiles: true
+
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\BlobArtifacts
+ displayName: Extract Blob Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - powershell: eng/common/sdl/extract-artifact-packages.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts\PackageArtifacts
+ displayName: Extract Package Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - ${{ if ne(parameters.extractArchiveArtifacts, 'false') }}:
+ - powershell: eng/common/sdl/extract-artifact-archives.ps1
+ -InputPath $(Build.ArtifactStagingDirectory)\artifacts
+ -ExtractPath $(Build.ArtifactStagingDirectory)\artifacts
+ displayName: Extract Archive Artifacts
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+
+ - template: /eng/common/templates/steps/execute-sdl.yml
+ parameters:
+ overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
+ executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
+ overrideParameters: ${{ parameters.overrideParameters }}
+ additionalParameters: ${{ parameters.additionalParameters }}
+ publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
+ sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
new file mode 100644
index 0000000..459f3c4
--- /dev/null
+++ b/eng/common/templates/job/job.yml
@@ -0,0 +1,226 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ disableComponentGovernance: false
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ runAsPublic: false
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 7.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@3
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@0
+
+ - ${{ if or(eq(parameters.artifacts.download, 'true'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), ne(parameters.disableComponentGovernance, 'true')) }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if or(eq(parameters.artifacts.publish.artifacts, 'true'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(eq(parameters.artifacts.publish.logs, 'true'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - publish: artifacts/log
+ artifact: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: Publish logs
+ continueOnError: true
+ condition: always()
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/templates/steps/generate-sbom.yml
+ parameters:
+ PackageVersion: ${{ parameters.packageVersion}}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+
diff --git a/eng/common/templates/job/onelocbuild.yml b/eng/common/templates/job/onelocbuild.yml
new file mode 100644
index 0000000..6c523b7
--- /dev/null
+++ b/eng/common/templates/job/onelocbuild.yml
@@ -0,0 +1,106 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+
+jobs:
+- job: OneLocBuild
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+
+
+ steps:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish Localization Files
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish LocProject.json
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }} \ No newline at end of file
diff --git a/eng/common/templates/job/publish-build-assets.yml b/eng/common/templates/job/publish-build-assets.yml
new file mode 100644
index 0000000..1cbb6a0
--- /dev/null
+++ b/eng/common/templates/job/publish-build-assets.yml
@@ -0,0 +1,147 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ pool: ${{ parameters.pool }}
+
+ variables:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/templates/post-build/common-variables.yml
+
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@0
+
+ - task: PowerShell@2
+ displayName: Enable cross-org NuGet feed authentication
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-all-orgs-artifact-feeds-rw)
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(BARBuildId)
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value "$(DefaultChannels)"
+ Add-Content -Path "$(Build.StagingDirectory)/ReleaseConfigs.txt" -Value $(IsStableBuild)
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - task: PublishBuildArtifacts@1
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/templates/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/templates/steps/publish-logs.yml
+ parameters:
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates/job/source-build.yml b/eng/common/templates/job/source-build.yml
new file mode 100644
index 0000000..88f6f75
--- /dev/null
+++ b/eng/common/templates/job/source-build.yml
@@ -0,0 +1,63 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: NetCore-Public
+ demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - template: /eng/common/templates/steps/source-build.yml
+ parameters:
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
new file mode 100644
index 0000000..21fd122
--- /dev/null
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -0,0 +1,66 @@
+parameters:
+ runAsPublic: false
+ sourceIndexPackageVersion: 1.0.1-20220804.1
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexPackageVersion
+ value: ${{ parameters.sourceIndexPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: source-dot-net stage1 variables
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: NetCore-Public
+ demands: ImageOverride -equals windows.vs2019.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+
+ steps:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET Core sdk 3.1
+ inputs:
+ packageType: sdk
+ version: 3.1.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
+ displayName: Upload stage1 artifacts to source index
+ env:
+ BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
diff --git a/eng/common/templates/jobs/codeql-build.yml b/eng/common/templates/jobs/codeql-build.yml
new file mode 100644
index 0000000..f7dc5ea
--- /dev/null
+++ b/eng/common/templates/jobs/codeql-build.yml
@@ -0,0 +1,31 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+
+jobs:
+- template: /eng/common/templates/jobs/jobs.yml
+ parameters:
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
diff --git a/eng/common/templates/jobs/jobs.yml b/eng/common/templates/jobs/jobs.yml
new file mode 100644
index 0000000..64e5929
--- /dev/null
+++ b/eng/common/templates/jobs/jobs.yml
@@ -0,0 +1,106 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/templates/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - template: ../job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/templates/jobs/source-build.yml
+ parameters:
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/templates/jobs/source-build.yml b/eng/common/templates/jobs/source-build.yml
new file mode 100644
index 0000000..8dd2d35
--- /dev/null
+++ b/eng/common/templates/jobs/source-build.yml
@@ -0,0 +1,46 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/templates/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-20220809204800-17a4aab'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/templates/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/templates/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/templates/post-build/common-variables.yml b/eng/common/templates/post-build/common-variables.yml
new file mode 100644
index 0000000..1ac7f49
--- /dev/null
+++ b/eng/common/templates/post-build/common-variables.yml
@@ -0,0 +1,26 @@
+variables:
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - group: DotNet-Blob-Feed
+ - group: DotNet-DotNetCli-Storage
+ - group: DotNet-MSRC-Storage
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/templates/post-build/post-build.yml b/eng/common/templates/post-build/post-build.yml
new file mode 100644
index 0000000..87fcae9
--- /dev/null
+++ b/eng/common/templates/post-build/post-build.yml
@@ -0,0 +1,285 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: eq( ${{ parameters.enableNugetValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@0
+ displayName: 'Authenticate to AzDO Feeds'
+
+ - task: PowerShell@2
+ displayName: Enable cross-org publishing
+ inputs:
+ filePath: eng\common\enable-cross-org-publishing.ps1
+ arguments: -token $(dn-bot-dnceng-artifact-feeds-rw)
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+ - template: /eng/common/templates/job/execute-sdl.yml
+ parameters:
+ enable: ${{ parameters.SDLValidationParameters.enable }}
+ publishGuardianDirectoryToPipeline: ${{ parameters.SDLValidationParameters.publishGdn }}
+ additionalParameters: ${{ parameters.SDLValidationParameters.params }}
+ continueOnError: ${{ parameters.SDLValidationParameters.continueOnError }}
+ artifactNames: ${{ parameters.SDLValidationParameters.artifactNames }}
+ downloadArtifacts: ${{ parameters.SDLValidationParameters.downloadArtifacts }}
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: VSEngSS-MicroBuild2022-1ES
+ demands: Cmd
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Internal
+ demands: ImageOverride -equals windows.vs2019.amd64
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: NuGetAuthenticate@0
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}' \ No newline at end of file
diff --git a/eng/common/templates/post-build/setup-maestro-vars.yml b/eng/common/templates/post-build/setup-maestro-vars.yml
new file mode 100644
index 0000000..0c87f14
--- /dev/null
+++ b/eng/common/templates/post-build/setup-maestro-vars.yml
@@ -0,0 +1,70 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
+steps:
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ pwsh: true
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates/post-build/trigger-subscription.yml b/eng/common/templates/post-build/trigger-subscription.yml
new file mode 100644
index 0000000..da66903
--- /dev/null
+++ b/eng/common/templates/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/add-build-to-channel.yml b/eng/common/templates/steps/add-build-to-channel.yml
new file mode 100644
index 0000000..f67a210
--- /dev/null
+++ b/eng/common/templates/steps/add-build-to-channel.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates/steps/build-reason.yml b/eng/common/templates/steps/build-reason.yml
new file mode 100644
index 0000000..eba5810
--- /dev/null
+++ b/eng/common/templates/steps/build-reason.yml
@@ -0,0 +1,12 @@
+# build-reason.yml
+# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
+# to include steps (',' separated).
+parameters:
+ conditions: ''
+ steps: []
+
+steps:
+ - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
+ - ${{ parameters.steps }}
+ - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/execute-codeql.yml b/eng/common/templates/steps/execute-codeql.yml
new file mode 100644
index 0000000..3930b16
--- /dev/null
+++ b/eng/common/templates/steps/execute-codeql.yml
@@ -0,0 +1,32 @@
+parameters:
+ # Language that should be analyzed. Defaults to csharp
+ language: csharp
+ # Build Commands
+ buildCommands: ''
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
+ # diagnosis of problems with specific tool configurations.
+ publishGuardianDirectoryToPipeline: false
+ # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
+ # parameters rather than relying on YAML. It may be better to use a local script, because you can
+ # reproduce results locally without piecing together a command based on the YAML.
+ executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ # optional: determines whether to continue the build if the step errors;
+ sdlContinueOnError: false
+
+steps:
+- template: /eng/common/templates/steps/execute-sdl.yml
+ parameters:
+ overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
+ executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
+ overrideParameters: ${{ parameters.overrideParameters }}
+ additionalParameters: '${{ parameters.additionalParameters }}
+ -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
+ publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
+ sdlContinueOnError: ${{ parameters.sdlContinueOnError }} \ No newline at end of file
diff --git a/eng/common/templates/steps/execute-sdl.yml b/eng/common/templates/steps/execute-sdl.yml
new file mode 100644
index 0000000..9dd5709
--- /dev/null
+++ b/eng/common/templates/steps/execute-sdl.yml
@@ -0,0 +1,88 @@
+parameters:
+ overrideGuardianVersion: ''
+ executeAllSdlToolsScript: ''
+ overrideParameters: ''
+ additionalParameters: ''
+ publishGuardianDirectoryToPipeline: false
+ sdlContinueOnError: false
+ condition: ''
+
+steps:
+- task: NuGetAuthenticate@1
+ inputs:
+ nuGetServiceConnections: GuardianConnect
+
+- task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
+ - pwsh: |
+ Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
+ . .\sdl.ps1
+ $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
+ Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
+ displayName: Install Guardian (Overridden)
+
+- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
+ - pwsh: |
+ Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
+ . .\sdl.ps1
+ $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
+ Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
+ displayName: Install Guardian
+
+- ${{ if ne(parameters.overrideParameters, '') }}:
+ - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: ${{ parameters.executeAllSdlToolsScript }}
+ -GuardianCliLocation $(GuardianCliLocation)
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
+ # We want to publish the Guardian results and configuration for easy diagnosis. However, the
+ # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
+ # tooling files. Some of these files are large and aren't useful during an investigation, so
+ # exclude them by simply deleting them before publishing. (As of writing, there is no documented
+ # way to selectively exclude a dir from the pipeline artifact publish task.)
+ - task: DeleteFiles@1
+ displayName: Delete Guardian dependencies to avoid uploading
+ inputs:
+ SourceFolder: $(Agent.BuildDirectory)/.gdn
+ Contents: |
+ c
+ i
+ condition: succeededOrFailed()
+
+ - publish: $(Agent.BuildDirectory)/.gdn
+ artifact: GuardianConfiguration
+ displayName: Publish GuardianConfiguration
+ condition: succeededOrFailed()
+
+ # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
+ # with the "SARIF SAST Scans Tab" Azure DevOps extension
+ - task: CopyFiles@2
+ displayName: Copy SARIF files
+ inputs:
+ flattenFolders: true
+ sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
+ contents: '**/*.sarif'
+ targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
+ condition: succeededOrFailed()
+
+ # Use PublishBuildArtifacts because the SARIF extension only checks this case
+ # see microsoft/sarif-azuredevops-extension#4
+ - task: PublishBuildArtifacts@1
+ displayName: Publish SARIF files to CodeAnalysisLogs container
+ inputs:
+ pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
+ artifactName: CodeAnalysisLogs
+ condition: succeededOrFailed() \ No newline at end of file
diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml
new file mode 100644
index 0000000..4cea8c3
--- /dev/null
+++ b/eng/common/templates/steps/generate-sbom.yml
@@ -0,0 +1,44 @@
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+
+parameters:
+ PackageVersion: 7.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ sbomContinueOnError: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+
+- task: PublishPipelineArtifact@1
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ inputs:
+ targetPath: '${{parameters.manifestDirPath}}'
+ artifactName: $(ARTIFACT_NAME)
+
diff --git a/eng/common/templates/steps/publish-logs.yml b/eng/common/templates/steps/publish-logs.yml
new file mode 100644
index 0000000..88f238f
--- /dev/null
+++ b/eng/common/templates/steps/publish-logs.yml
@@ -0,0 +1,23 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates/steps/retain-build.yml b/eng/common/templates/steps/retain-build.yml
new file mode 100644
index 0000000..83d97a2
--- /dev/null
+++ b/eng/common/templates/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/<organization> format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId) \ No newline at end of file
diff --git a/eng/common/templates/steps/run-on-unix.yml b/eng/common/templates/steps/run-on-unix.yml
new file mode 100644
index 0000000..e173381
--- /dev/null
+++ b/eng/common/templates/steps/run-on-unix.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if ne(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-on-windows.yml b/eng/common/templates/steps/run-on-windows.yml
new file mode 100644
index 0000000..73e7e9c
--- /dev/null
+++ b/eng/common/templates/steps/run-on-windows.yml
@@ -0,0 +1,7 @@
+parameters:
+ agentOs: ''
+ steps: []
+
+steps:
+- ${{ if eq(parameters.agentOs, 'Windows_NT') }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates/steps/run-script-ifequalelse.yml b/eng/common/templates/steps/run-script-ifequalelse.yml
new file mode 100644
index 0000000..3d1242f
--- /dev/null
+++ b/eng/common/templates/steps/run-script-ifequalelse.yml
@@ -0,0 +1,33 @@
+parameters:
+ # if parameter1 equals parameter 2, run 'ifScript' command, else run 'elsescript' command
+ parameter1: ''
+ parameter2: ''
+ ifScript: ''
+ elseScript: ''
+
+ # name of script step
+ name: Script
+
+ # display name of script step
+ displayName: If-Equal-Else Script
+
+ # environment
+ env: {}
+
+ # conditional expression for step execution
+ condition: ''
+
+steps:
+- ${{ if and(ne(parameters.ifScript, ''), eq(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.ifScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if and(ne(parameters.elseScript, ''), ne(parameters.parameter1, parameters.parameter2)) }}:
+ - script: ${{ parameters.elseScript }}
+ name: ${{ parameters.name }}
+ displayName: ${{ parameters.displayName }}
+ env: ${{ parameters.env }}
+ condition: ${{ parameters.condition }} \ No newline at end of file
diff --git a/eng/common/templates/steps/send-to-helix.yml b/eng/common/templates/steps/send-to-helix.yml
new file mode 100644
index 0000000..3eb7e2d
--- /dev/null
+++ b/eng/common/templates/steps/send-to-helix.yml
@@ -0,0 +1,91 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates/steps/source-build.yml b/eng/common/templates/steps/source-build.yml
new file mode 100644
index 0000000..12a8ff9
--- /dev/null
+++ b/eng/common/templates/steps/source-build.yml
@@ -0,0 +1,102 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/templates/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
+ # In that case, call the feed setup script to add internal feeds corresponding to public ones.
+ # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
+ # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
+ # changes.
+ internalRestoreArgs=
+ if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
+ # Temporarily work around https://github.com/dotnet/arcade/issues/7709
+ chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/source-build/self/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- task: PublishPipelineArtifact@1
+ displayName: Publish BuildLogs
+ inputs:
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
diff --git a/eng/common/templates/steps/telemetry-end.yml b/eng/common/templates/steps/telemetry-end.yml
new file mode 100644
index 0000000..fadc04c
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-end.yml
@@ -0,0 +1,102 @@
+parameters:
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- bash: |
+ if [ "$AGENT_JOBSTATUS" = "Succeeded" ] || [ "$AGENT_JOBSTATUS" = "PartiallySucceeded" ]; then
+ errorCount=0
+ else
+ errorCount=1
+ fi
+ warningCount=0
+
+ curlStatus=1
+ retryCount=0
+ # retry loop to harden against spotty telemetry connections
+ # we don't retry successes and 4xx client errors
+ until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
+ do
+ if [ $retryCount -gt 0 ]; then
+ echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
+ sleep $RetryDelay
+ fi
+
+ # create a temporary file for curl output
+ res=`mktemp`
+
+ curlResult=`
+ curl --verbose --output $res --write-out "%{http_code}"\
+ -H 'Content-Type: application/json' \
+ -H "X-Helix-Job-Token: $Helix_JobToken" \
+ -H 'Content-Length: 0' \
+ -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$Helix_WorkItemId/finish" \
+ --data-urlencode "errorCount=$errorCount" \
+ --data-urlencode "warningCount=$warningCount"`
+ curlStatus=$?
+
+ if [ $curlStatus -eq 0 ]; then
+ if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
+ curlStatus=$curlResult
+ fi
+ fi
+
+ let retryCount++
+ done
+
+ if [ $curlStatus -ne 0 ]; then
+ echo "Failed to Send Build Finish information after $retryCount retries"
+ vstsLogOutput="vso[task.logissue type=error;sourcepath=templates/steps/telemetry-end.yml;code=1;]Failed to Send Build Finish information: $curlStatus"
+ echo "##$vstsLogOutput"
+ exit 1
+ fi
+ displayName: Send Unix Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.sh
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
+- powershell: |
+ if (($env:Agent_JobStatus -eq 'Succeeded') -or ($env:Agent_JobStatus -eq 'PartiallySucceeded')) {
+ $ErrorCount = 0
+ } else {
+ $ErrorCount = 1
+ }
+ $WarningCount = 0
+
+ # Basic retry loop to harden against server flakiness
+ $retryCount = 0
+ while ($retryCount -lt $env:MaxRetries) {
+ try {
+ Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build/$env:Helix_WorkItemId/finish?errorCount=$ErrorCount&warningCount=$WarningCount" -Method Post -ContentType "application/json" -Body "" `
+ -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
+ break
+ }
+ catch {
+ $statusCode = $_.Exception.Response.StatusCode.value__
+ if ($statusCode -ge 400 -and $statusCode -le 499) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
+ Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
+ exit 1
+ }
+ Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
+ $retryCount++
+ sleep $env:RetryDelay
+ continue
+ }
+ }
+
+ if ($retryCount -ge $env:MaxRetries) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
+ exit 1
+ }
+ displayName: Send Windows Build End Telemetry
+ env:
+ # defined via VSTS variables in start-job.ps1
+ Helix_JobToken: $(Helix_JobToken)
+ Helix_WorkItemId: $(Helix_WorkItemId)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(),eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/steps/telemetry-start.yml b/eng/common/templates/steps/telemetry-start.yml
new file mode 100644
index 0000000..32c01ef
--- /dev/null
+++ b/eng/common/templates/steps/telemetry-start.yml
@@ -0,0 +1,241 @@
+parameters:
+ helixSource: 'undefined_defaulted_in_telemetry.yml'
+ helixType: 'undefined_defaulted_in_telemetry.yml'
+ buildConfig: ''
+ runAsPublic: false
+ maxRetries: 5
+ retryDelay: 10 # in seconds
+
+steps:
+- ${{ if and(eq(parameters.runAsPublic, 'false'), not(eq(variables['System.TeamProject'], 'public'))) }}:
+ - task: AzureKeyVault@1
+ inputs:
+ azureSubscription: 'HelixProd_KeyVault'
+ KeyVaultName: HelixProdKV
+ SecretsFilter: 'HelixApiAccessToken'
+ condition: always()
+- bash: |
+ # create a temporary file
+ jobInfo=`mktemp`
+
+ # write job info content to temporary file
+ cat > $jobInfo <<JobListStuff
+ {
+ "QueueId": "$QueueId",
+ "Source": "$Source",
+ "Type": "$Type",
+ "Build": "$Build",
+ "Attempt": "$Attempt",
+ "Properties": {
+ "operatingSystem": "$OperatingSystem",
+ "configuration": "$Configuration"
+ }
+ }
+ JobListStuff
+
+ cat $jobInfo
+
+ # create a temporary file for curl output
+ res=`mktemp`
+
+ accessTokenParameter="?access_token=$HelixApiAccessToken"
+
+ curlStatus=1
+ retryCount=0
+ # retry loop to harden against spotty telemetry connections
+ # we don't retry successes and 4xx client errors
+ until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
+ do
+ if [ $retryCount -gt 0 ]; then
+ echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
+ sleep $RetryDelay
+ fi
+
+ curlResult=`
+ cat $jobInfo |\
+ curl --trace - --verbose --output $res --write-out "%{http_code}" \
+ -H 'Content-Type: application/json' \
+ -X POST "https://helix.dot.net/api/2018-03-14/telemetry/job$accessTokenParameter" -d @-`
+ curlStatus=$?
+
+ if [ $curlStatus -eq 0 ]; then
+ if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
+ curlStatus=$curlResult
+ fi
+ fi
+
+ let retryCount++
+ done
+
+ curlResult=`cat $res`
+
+ # validate status of curl command
+ if [ $curlStatus -ne 0 ]; then
+ echo "Failed To Send Job Start information after $retryCount retries"
+ # We have to append the ## vso prefix or vso will pick up the command when it dumps the inline script into the shell
+ vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/start-job.sh;code=1;]Failed to Send Job Start information: $curlStatus"
+ echo "##$vstsLogOutput"
+ exit 1
+ fi
+
+ # Set the Helix_JobToken variable
+ export Helix_JobToken=`echo $curlResult | xargs echo` # Strip Quotes
+ echo "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$Helix_JobToken"
+ displayName: Send Unix Job Start Telemetry
+ env:
+ HelixApiAccessToken: $(HelixApiAccessToken)
+ Source: ${{ parameters.helixSource }}
+ Type: ${{ parameters.helixType }}
+ Build: $(Build.BuildNumber)
+ QueueId: $(Agent.Os)
+ Attempt: 1
+ OperatingSystem: $(Agent.Os)
+ Configuration: ${{ parameters.buildConfig }}
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
+- bash: |
+ curlStatus=1
+ retryCount=0
+ # retry loop to harden against spotty telemetry connections
+ # we don't retry successes and 4xx client errors
+ until [[ $curlStatus -eq 0 || ( $curlStatus -ge 400 && $curlStatus -le 499 ) || $retryCount -ge $MaxRetries ]]
+ do
+ if [ $retryCount -gt 0 ]; then
+ echo "Failed to send telemetry to Helix; waiting $RetryDelay seconds before retrying..."
+ sleep $RetryDelay
+ fi
+
+ res=`mktemp`
+ curlResult=`
+ curl --verbose --output $res --write-out "%{http_code}"\
+ -H 'Content-Type: application/json' \
+ -H "X-Helix-Job-Token: $Helix_JobToken" \
+ -H 'Content-Length: 0' \
+ -X POST -G "https://helix.dot.net/api/2018-03-14/telemetry/job/build" \
+ --data-urlencode "buildUri=$BuildUri"`
+ curlStatus=$?
+
+ if [ $curlStatus -eq 0 ]; then
+ if [ $curlResult -gt 299 ] || [ $curlResult -lt 200 ]; then
+ curlStatus=$curlResult
+ fi
+ fi
+
+ curlResult=`cat $res`
+ let retryCount++
+ done
+
+ # validate status of curl command
+ if [ $curlStatus -ne 0 ]; then
+ echo "Failed to Send Build Start information after $retryCount retries"
+ vstsLogOutput="vso[task.logissue type=error;sourcepath=telemetry/build/start.sh;code=1;]Failed to Send Build Start information: $curlStatus"
+ echo "##$vstsLogOutput"
+ exit 1
+ fi
+
+ export Helix_WorkItemId=`echo $curlResult | xargs echo` # Strip Quotes
+ echo "##vso[task.setvariable variable=Helix_WorkItemId]$Helix_WorkItemId"
+ displayName: Send Unix Build Start Telemetry
+ env:
+ BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
+ Helix_JobToken: $(Helix_JobToken)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), ne(variables['Agent.Os'], 'Windows_NT'))
+
+- powershell: |
+ $jobInfo = [pscustomobject]@{
+ QueueId=$env:QueueId;
+ Source=$env:Source;
+ Type=$env:Type;
+ Build=$env:Build;
+ Attempt=$env:Attempt;
+ Properties=[pscustomobject]@{ operatingSystem=$env:OperatingSystem; configuration=$env:Configuration };
+ }
+
+ $jobInfoJson = $jobInfo | ConvertTo-Json
+
+ if ($env:HelixApiAccessToken) {
+ $accessTokenParameter="?access_token=$($env:HelixApiAccessToken)"
+ }
+ Write-Host "Job Info: $jobInfoJson"
+
+ # Basic retry loop to harden against server flakiness
+ $retryCount = 0
+ while ($retryCount -lt $env:MaxRetries) {
+ try {
+ $jobToken = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job$($accessTokenParameter)" -Method Post -ContentType "application/json" -Body $jobInfoJson
+ break
+ }
+ catch {
+ $statusCode = $_.Exception.Response.StatusCode.value__
+ if ($statusCode -ge 400 -and $statusCode -le 499) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
+ Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
+ exit 1
+ }
+ Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
+ $retryCount++
+ sleep $env:RetryDelay
+ continue
+ }
+ }
+
+ if ($retryCount -ge $env:MaxRetries) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
+ exit 1
+ }
+
+ $env:Helix_JobToken = $jobToken
+ Write-Host "##vso[task.setvariable variable=Helix_JobToken;issecret=true;]$env:Helix_JobToken"
+ env:
+ HelixApiAccessToken: $(HelixApiAccessToken)
+ Source: ${{ parameters.helixSource }}
+ Type: ${{ parameters.helixType }}
+ Build: $(Build.BuildNumber)
+ QueueId: $(Agent.Os)
+ Attempt: 1
+ OperatingSystem: $(Agent.Os)
+ Configuration: ${{ parameters.buildConfig }}
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
+ displayName: Send Windows Job Start Telemetry
+- powershell: |
+ # Basic retry loop to harden against server flakiness
+ $retryCount = 0
+ while ($retryCount -lt $env:MaxRetries) {
+ try {
+ $workItemId = Invoke-RestMethod -Uri "https://helix.dot.net/api/2018-03-14/telemetry/job/build?buildUri=$([Net.WebUtility]::UrlEncode($env:BuildUri))" -Method Post -ContentType "application/json" -Body "" `
+ -Headers @{ 'X-Helix-Job-Token'=$env:Helix_JobToken }
+ break
+ }
+ catch {
+ $statusCode = $_.Exception.Response.StatusCode.value__
+ if ($statusCode -ge 400 -and $statusCode -le 499) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix (status code $statusCode); not retrying (4xx client error)"
+ Write-Host "##vso[task.logissue]error ", $_.Exception.GetType().FullName, $_.Exception.Message
+ exit 1
+ }
+ Write-Host "Failed to send telemetry to Helix (status code $statusCode); waiting $env:RetryDelay seconds before retrying..."
+ $retryCount++
+ sleep $env:RetryDelay
+ continue
+ }
+ }
+
+ if ($retryCount -ge $env:MaxRetries) {
+ Write-Host "##vso[task.logissue]error Failed to send telemetry to Helix after $retryCount retries."
+ exit 1
+ }
+
+ $env:Helix_WorkItemId = $workItemId
+ Write-Host "##vso[task.setvariable variable=Helix_WorkItemId]$env:Helix_WorkItemId"
+ displayName: Send Windows Build Start Telemetry
+ env:
+ BuildUri: $(System.TaskDefinitionsUri)$(System.TeamProject)/_build/index?buildId=$(Build.BuildId)&_a=summary
+ Helix_JobToken: $(Helix_JobToken)
+ MaxRetries: ${{ parameters.maxRetries }}
+ RetryDelay: ${{ parameters.retryDelay }}
+ condition: and(always(), eq(variables['Agent.Os'], 'Windows_NT'))
diff --git a/eng/common/templates/variables/sdl-variables.yml b/eng/common/templates/variables/sdl-variables.yml
new file mode 100644
index 0000000..dbdd66d
--- /dev/null
+++ b/eng/common/templates/variables/sdl-variables.yml
@@ -0,0 +1,7 @@
+variables:
+# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+# sync with the packages.config file.
+- name: DefaultGuardianVersion
+ value: 0.109.0
+- name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config \ No newline at end of file
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
new file mode 100644
index 0000000..aba6308
--- /dev/null
+++ b/eng/common/tools.ps1
@@ -0,0 +1,919 @@
+# Initialize variables if they aren't already defined.
+# These may be defined as parameters of the importing script, or set after importing this script.
+
+# CI mode - set to true on CI server for PR validation build or official build.
+[bool]$ci = if (Test-Path variable:ci) { $ci } else { $false }
+
+# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
+[string]$configuration = if (Test-Path variable:configuration) { $configuration } else { 'Debug' }
+
+# Set to true to opt out of outputting binary log while running in CI
+[bool]$excludeCIBinarylog = if (Test-Path variable:excludeCIBinarylog) { $excludeCIBinarylog } else { $false }
+
+# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
+[bool]$binaryLog = if (Test-Path variable:binaryLog) { $binaryLog } else { $ci -and !$excludeCIBinarylog }
+
+# Set to true to use the pipelines logger which will enable Azure logging output.
+# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
+# This flag is meant as a temporary opt-opt for the feature while validate it across
+# our consumers. It will be deleted in the future.
+[bool]$pipelinesLog = if (Test-Path variable:pipelinesLog) { $pipelinesLog } else { $ci }
+
+# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
+[bool]$prepareMachine = if (Test-Path variable:prepareMachine) { $prepareMachine } else { $false }
+
+# True to restore toolsets and dependencies.
+[bool]$restore = if (Test-Path variable:restore) { $restore } else { $true }
+
+# Adjusts msbuild verbosity level.
+[string]$verbosity = if (Test-Path variable:verbosity) { $verbosity } else { 'minimal' }
+
+# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
+[bool]$nodeReuse = if (Test-Path variable:nodeReuse) { $nodeReuse } else { !$ci }
+
+# Configures warning treatment in msbuild.
+[bool]$warnAsError = if (Test-Path variable:warnAsError) { $warnAsError } else { $true }
+
+# Specifies which msbuild engine to use for build: 'vs', 'dotnet' or unspecified (determined based on presence of tools.vs in global.json).
+[string]$msbuildEngine = if (Test-Path variable:msbuildEngine) { $msbuildEngine } else { $null }
+
+# True to attempt using .NET Core already that meets requirements specified in global.json
+# installed on the machine instead of downloading one.
+[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
+
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1
+[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
+
+# True to use global NuGet cache instead of restoring packages to repository-local directory.
+[bool]$useGlobalNuGetCache = if (Test-Path variable:useGlobalNuGetCache) { $useGlobalNuGetCache } else { !$ci }
+
+# True to exclude prerelease versions Visual Studio during build
+[bool]$excludePrereleaseVS = if (Test-Path variable:excludePrereleaseVS) { $excludePrereleaseVS } else { $false }
+
+# An array of names of processes to stop on script exit if prepareMachine is true.
+$processesToStopOnExit = if (Test-Path variable:processesToStopOnExit) { $processesToStopOnExit } else { @('msbuild', 'dotnet', 'vbcscompiler') }
+
+$disableConfigureToolsetImport = if (Test-Path variable:disableConfigureToolsetImport) { $disableConfigureToolsetImport } else { $null }
+
+set-strictmode -version 2.0
+$ErrorActionPreference = 'Stop'
+[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
+
+# If specifies, provides an alternate path for getting .NET Core SDKs and Runtimes. This script will still try public sources first.
+[string]$runtimeSourceFeed = if (Test-Path variable:runtimeSourceFeed) { $runtimeSourceFeed } else { $null }
+# Base-64 encoded SAS token that has permission to storage container described by $runtimeSourceFeed
+[string]$runtimeSourceFeedKey = if (Test-Path variable:runtimeSourceFeedKey) { $runtimeSourceFeedKey } else { $null }
+
+function Create-Directory ([string[]] $path) {
+ New-Item -Path $path -Force -ItemType 'Directory' | Out-Null
+}
+
+function Unzip([string]$zipfile, [string]$outpath) {
+ Add-Type -AssemblyName System.IO.Compression.FileSystem
+ [System.IO.Compression.ZipFile]::ExtractToDirectory($zipfile, $outpath)
+}
+
+# This will exec a process using the console and return it's exit code.
+# This will not throw when the process fails.
+# Returns process exit code.
+function Exec-Process([string]$command, [string]$commandArgs) {
+ $startInfo = New-Object System.Diagnostics.ProcessStartInfo
+ $startInfo.FileName = $command
+ $startInfo.Arguments = $commandArgs
+ $startInfo.UseShellExecute = $false
+ $startInfo.WorkingDirectory = Get-Location
+
+ $process = New-Object System.Diagnostics.Process
+ $process.StartInfo = $startInfo
+ $process.Start() | Out-Null
+
+ $finished = $false
+ try {
+ while (-not $process.WaitForExit(100)) {
+ # Non-blocking loop done to allow ctr-c interrupts
+ }
+
+ $finished = $true
+ return $global:LASTEXITCODE = $process.ExitCode
+ }
+ finally {
+ # If we didn't finish then an error occurred or the user hit ctrl-c. Either
+ # way kill the process
+ if (-not $finished) {
+ $process.Kill()
+ }
+ }
+}
+
+# Take the given block, print it, print what the block probably references from the current set of
+# variables using low-effort string matching, then run the block.
+#
+# This is intended to replace the pattern of manually copy-pasting a command, wrapping it in quotes,
+# and printing it using "Write-Host". The copy-paste method is more readable in build logs, but less
+# maintainable and less reliable. It is easy to make a mistake and modify the command without
+# properly updating the "Write-Host" line, resulting in misleading build logs. The probability of
+# this mistake makes the pattern hard to trust when it shows up in build logs. Finding the bug in
+# existing source code can also be difficult, because the strings are not aligned to each other and
+# the line may be 300+ columns long.
+#
+# By removing the need to maintain two copies of the command, Exec-BlockVerbosely avoids the issues.
+#
+# In Bash (or any posix-like shell), "set -x" prints usable verbose output automatically.
+# "Set-PSDebug" appears to be similar at first glance, but unfortunately, it isn't very useful: it
+# doesn't print any info about the variables being used by the command, which is normally the
+# interesting part to diagnose.
+function Exec-BlockVerbosely([scriptblock] $block) {
+ Write-Host "--- Running script block:"
+ $blockString = $block.ToString().Trim()
+ Write-Host $blockString
+
+ Write-Host "--- List of variables that might be used:"
+ # For each variable x in the environment, check the block for a reference to x via simple "$x" or
+ # "@x" syntax. This doesn't detect other ways to reference variables ("${x}" nor "$variable:x",
+ # among others). It only catches what this function was originally written for: simple
+ # command-line commands.
+ $variableTable = Get-Variable |
+ Where-Object {
+ $blockString.Contains("`$$($_.Name)") -or $blockString.Contains("@$($_.Name)")
+ } |
+ Format-Table -AutoSize -HideTableHeaders -Wrap |
+ Out-String
+ Write-Host $variableTable.Trim()
+
+ Write-Host "--- Executing:"
+ & $block
+ Write-Host "--- Done running script block!"
+}
+
+# createSdkLocationFile parameter enables a file being generated under the toolset directory
+# which writes the sdk's location into. This is only necessary for cmd --> powershell invocations
+# as dot sourcing isn't possible.
+function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) {
+ if (Test-Path variable:global:_DotNetInstallDir) {
+ return $global:_DotNetInstallDir
+ }
+
+ # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
+ $env:DOTNET_MULTILEVEL_LOOKUP=0
+
+ # Disable first run since we do not need all ASP.NET packages restored.
+ $env:DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+
+ # Disable telemetry on CI.
+ if ($ci) {
+ $env:DOTNET_CLI_TELEMETRY_OPTOUT=1
+ }
+
+ # Source Build uses DotNetCoreSdkDir variable
+ if ($env:DotNetCoreSdkDir -ne $null) {
+ $env:DOTNET_INSTALL_DIR = $env:DotNetCoreSdkDir
+ }
+
+ # Find the first path on %PATH% that contains the dotnet.exe
+ if ($useInstalledDotNetCli -and (-not $globalJsonHasRuntimes) -and ($env:DOTNET_INSTALL_DIR -eq $null)) {
+ $dotnetExecutable = GetExecutableFileName 'dotnet'
+ $dotnetCmd = Get-Command $dotnetExecutable -ErrorAction SilentlyContinue
+
+ if ($dotnetCmd -ne $null) {
+ $env:DOTNET_INSTALL_DIR = Split-Path $dotnetCmd.Path -Parent
+ }
+ }
+
+ $dotnetSdkVersion = $GlobalJson.tools.dotnet
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+ if ((-not $globalJsonHasRuntimes) -and (-not [string]::IsNullOrEmpty($env:DOTNET_INSTALL_DIR)) -and (Test-Path(Join-Path $env:DOTNET_INSTALL_DIR "sdk\$dotnetSdkVersion"))) {
+ $dotnetRoot = $env:DOTNET_INSTALL_DIR
+ } else {
+ $dotnetRoot = Join-Path $RepoRoot '.dotnet'
+
+ if (-not (Test-Path(Join-Path $dotnetRoot "sdk\$dotnetSdkVersion"))) {
+ if ($install) {
+ InstallDotNetSdk $dotnetRoot $dotnetSdkVersion
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to find dotnet with SDK version '$dotnetSdkVersion'"
+ ExitWithExitCode 1
+ }
+ }
+
+ $env:DOTNET_INSTALL_DIR = $dotnetRoot
+ }
+
+ # Creates a temporary file under the toolset dir.
+ # The following code block is protecting against concurrent access so that this function can
+ # be called in parallel.
+ if ($createSdkLocationFile) {
+ do {
+ $sdkCacheFileTemp = Join-Path $ToolsetDir $([System.IO.Path]::GetRandomFileName())
+ }
+ until (!(Test-Path $sdkCacheFileTemp))
+ Set-Content -Path $sdkCacheFileTemp -Value $dotnetRoot
+
+ try {
+ Move-Item -Force $sdkCacheFileTemp (Join-Path $ToolsetDir 'sdk.txt')
+ } catch {
+ # Somebody beat us
+ Remove-Item -Path $sdkCacheFileTemp
+ }
+ }
+
+ # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
+ # build steps from using anything other than what we've downloaded.
+ # It also ensures that VS msbuild will use the downloaded sdk targets.
+ $env:PATH = "$dotnetRoot;$env:PATH"
+
+ # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build
+ Write-PipelinePrependPath -Path $dotnetRoot
+
+ Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0'
+ Write-PipelineSetVariable -Name 'DOTNET_SKIP_FIRST_TIME_EXPERIENCE' -Value '1'
+
+ return $global:_DotNetInstallDir = $dotnetRoot
+}
+
+function Retry($downloadBlock, $maxRetries = 5) {
+ $retries = 1
+
+ while($true) {
+ try {
+ & $downloadBlock
+ break
+ }
+ catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ }
+
+ if (++$retries -le $maxRetries) {
+ $delayInSeconds = [math]::Pow(2, $retries) - 1 # Exponential backoff
+ Write-Host "Retrying. Waiting for $delayInSeconds seconds before next attempt ($retries of $maxRetries)."
+ Start-Sleep -Seconds $delayInSeconds
+ }
+ else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download file in $maxRetries attempts."
+ break
+ }
+
+ }
+}
+
+function GetDotNetInstallScript([string] $dotnetRoot) {
+ $installScript = Join-Path $dotnetRoot 'dotnet-install.ps1'
+ if (!(Test-Path $installScript)) {
+ Create-Directory $dotnetRoot
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+ $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
+
+ Retry({
+ Write-Host "GET $uri"
+ Invoke-WebRequest $uri -OutFile $installScript
+ })
+ }
+
+ return $installScript
+}
+
+function InstallDotNetSdk([string] $dotnetRoot, [string] $version, [string] $architecture = '', [switch] $noPath) {
+ InstallDotNet $dotnetRoot $version $architecture '' $false $runtimeSourceFeed $runtimeSourceFeedKey -noPath:$noPath
+}
+
+function InstallDotNet([string] $dotnetRoot,
+ [string] $version,
+ [string] $architecture = '',
+ [string] $runtime = '',
+ [bool] $skipNonVersionedFiles = $false,
+ [string] $runtimeSourceFeed = '',
+ [string] $runtimeSourceFeedKey = '',
+ [switch] $noPath) {
+
+ $installScript = GetDotNetInstallScript $dotnetRoot
+ $installParameters = @{
+ Version = $version
+ InstallDir = $dotnetRoot
+ }
+
+ if ($architecture) { $installParameters.Architecture = $architecture }
+ if ($runtime) { $installParameters.Runtime = $runtime }
+ if ($skipNonVersionedFiles) { $installParameters.SkipNonVersionedFiles = $skipNonVersionedFiles }
+ if ($noPath) { $installParameters.NoPath = $True }
+
+ $variations = @()
+ $variations += @($installParameters)
+
+ $dotnetBuilds = $installParameters.Clone()
+ $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public"
+ $variations += @($dotnetBuilds)
+
+ if ($runtimeSourceFeed) {
+ $runtimeSource = $installParameters.Clone()
+ $runtimeSource.AzureFeed = $runtimeSourceFeed
+ if ($runtimeSourceFeedKey) {
+ $decodedBytes = [System.Convert]::FromBase64String($runtimeSourceFeedKey)
+ $decodedString = [System.Text.Encoding]::UTF8.GetString($decodedBytes)
+ $runtimeSource.FeedCredential = $decodedString
+ }
+ $variations += @($runtimeSource)
+ }
+
+ $installSuccess = $false
+ foreach ($variation in $variations) {
+ if ($variation | Get-Member AzureFeed) {
+ $location = $variation.AzureFeed
+ } else {
+ $location = "public location";
+ }
+ Write-Host "Attempting to install dotnet from $location."
+ try {
+ & $installScript @variation
+ $installSuccess = $true
+ break
+ }
+ catch {
+ Write-Host "Failed to install dotnet from $location."
+ }
+ }
+ if (-not $installSuccess) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Failed to install dotnet from any of the specified locations."
+ ExitWithExitCode 1
+ }
+}
+
+#
+# Locates Visual Studio MSBuild installation.
+# The preference order for MSBuild to use is as follows:
+#
+# 1. MSBuild from an active VS command prompt
+# 2. MSBuild from a compatible VS installation
+# 3. MSBuild from the xcopy tool package
+#
+# Returns full path to msbuild.exe.
+# Throws on failure.
+#
+function InitializeVisualStudioMSBuild([bool]$install, [object]$vsRequirements = $null) {
+ if (-not (IsWindowsPlatform)) {
+ throw "Cannot initialize Visual Studio on non-Windows"
+ }
+
+ if (Test-Path variable:global:_MSBuildExe) {
+ return $global:_MSBuildExe
+ }
+
+ # Minimum VS version to require.
+ $vsMinVersionReqdStr = '16.8'
+ $vsMinVersionReqd = [Version]::new($vsMinVersionReqdStr)
+
+ # If the version of msbuild is going to be xcopied,
+ # use this version. Version matches a package here:
+ # https://dev.azure.com/dnceng/public/_packaging?_a=package&feed=dotnet-eng&package=RoslynTools.MSBuild&protocolType=NuGet&version=17.2.1&view=overview
+ $defaultXCopyMSBuildVersion = '17.2.1'
+
+ if (!$vsRequirements) {
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
+ $vsRequirements = $GlobalJson.tools.vs
+ }
+ else {
+ $vsRequirements = New-Object PSObject -Property @{ version = $vsMinVersionReqdStr }
+ }
+ }
+ $vsMinVersionStr = if ($vsRequirements.version) { $vsRequirements.version } else { $vsMinVersionReqdStr }
+ $vsMinVersion = [Version]::new($vsMinVersionStr)
+
+ # Try msbuild command available in the environment.
+ if ($env:VSINSTALLDIR -ne $null) {
+ $msbuildCmd = Get-Command 'msbuild.exe' -ErrorAction SilentlyContinue
+ if ($msbuildCmd -ne $null) {
+ # Workaround for https://github.com/dotnet/roslyn/issues/35793
+ # Due to this issue $msbuildCmd.Version returns 0.0.0.0 for msbuild.exe 16.2+
+ $msbuildVersion = [Version]::new((Get-Item $msbuildCmd.Path).VersionInfo.ProductVersion.Split([char[]]@('-', '+'))[0])
+
+ if ($msbuildVersion -ge $vsMinVersion) {
+ return $global:_MSBuildExe = $msbuildCmd.Path
+ }
+
+ # Report error - the developer environment is initialized with incompatible VS version.
+ throw "Developer Command Prompt for VS $($env:VisualStudioVersion) is not recent enough. Please upgrade to $vsMinVersionStr or build from a plain CMD window"
+ }
+ }
+
+ # Locate Visual Studio installation or download x-copy msbuild.
+ $vsInfo = LocateVisualStudio $vsRequirements
+ if ($vsInfo -ne $null) {
+ $vsInstallDir = $vsInfo.installationPath
+ $vsMajorVersion = $vsInfo.installationVersion.Split('.')[0]
+
+ InitializeVisualStudioEnvironmentVariables $vsInstallDir $vsMajorVersion
+ } else {
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'xcopy-msbuild') {
+ $xcopyMSBuildVersion = $GlobalJson.tools.'xcopy-msbuild'
+ $vsMajorVersion = $xcopyMSBuildVersion.Split('.')[0]
+ } else {
+ #if vs version provided in global.json is incompatible (too low) then use the default version for xcopy msbuild download
+ if($vsMinVersion -lt $vsMinVersionReqd){
+ Write-Host "Using xcopy-msbuild version of $defaultXCopyMSBuildVersion since VS version $vsMinVersionStr provided in global.json is not compatible"
+ $xcopyMSBuildVersion = $defaultXCopyMSBuildVersion
+ }
+ else{
+ # If the VS version IS compatible, look for an xcopy msbuild package
+ # with a version matching VS.
+ # Note: If this version does not exist, then an explicit version of xcopy msbuild
+ # can be specified in global.json. This will be required for pre-release versions of msbuild.
+ $vsMajorVersion = $vsMinVersion.Major
+ $vsMinorVersion = $vsMinVersion.Minor
+ $xcopyMSBuildVersion = "$vsMajorVersion.$vsMinorVersion.0"
+ }
+ }
+
+ $vsInstallDir = $null
+ if ($xcopyMSBuildVersion.Trim() -ine "none") {
+ $vsInstallDir = InitializeXCopyMSBuild $xcopyMSBuildVersion $install
+ if ($vsInstallDir -eq $null) {
+ throw "Could not xcopy msbuild. Please check that package 'RoslynTools.MSBuild @ $xcopyMSBuildVersion' exists on feed 'dotnet-eng'."
+ }
+ }
+ if ($vsInstallDir -eq $null) {
+ throw 'Unable to find Visual Studio that has required version and components installed'
+ }
+ }
+
+ $msbuildVersionDir = if ([int]$vsMajorVersion -lt 16) { "$vsMajorVersion.0" } else { "Current" }
+
+ $local:BinFolder = Join-Path $vsInstallDir "MSBuild\$msbuildVersionDir\Bin"
+ $local:Prefer64bit = if (Get-Member -InputObject $vsRequirements -Name 'Prefer64bit') { $vsRequirements.Prefer64bit } else { $false }
+ if ($local:Prefer64bit -and (Test-Path(Join-Path $local:BinFolder "amd64"))) {
+ $global:_MSBuildExe = Join-Path $local:BinFolder "amd64\msbuild.exe"
+ } else {
+ $global:_MSBuildExe = Join-Path $local:BinFolder "msbuild.exe"
+ }
+
+ return $global:_MSBuildExe
+}
+
+function InitializeVisualStudioEnvironmentVariables([string] $vsInstallDir, [string] $vsMajorVersion) {
+ $env:VSINSTALLDIR = $vsInstallDir
+ Set-Item "env:VS$($vsMajorVersion)0COMNTOOLS" (Join-Path $vsInstallDir "Common7\Tools\")
+
+ $vsSdkInstallDir = Join-Path $vsInstallDir "VSSDK\"
+ if (Test-Path $vsSdkInstallDir) {
+ Set-Item "env:VSSDK$($vsMajorVersion)0Install" $vsSdkInstallDir
+ $env:VSSDKInstall = $vsSdkInstallDir
+ }
+}
+
+function InstallXCopyMSBuild([string]$packageVersion) {
+ return InitializeXCopyMSBuild $packageVersion -install $true
+}
+
+function InitializeXCopyMSBuild([string]$packageVersion, [bool]$install) {
+ $packageName = 'RoslynTools.MSBuild'
+ $packageDir = Join-Path $ToolsDir "msbuild\$packageVersion"
+ $packagePath = Join-Path $packageDir "$packageName.$packageVersion.nupkg"
+
+ if (!(Test-Path $packageDir)) {
+ if (!$install) {
+ return $null
+ }
+
+ Create-Directory $packageDir
+
+ Write-Host "Downloading $packageName $packageVersion"
+ $ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
+ Retry({
+ Invoke-WebRequest "https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-eng/nuget/v3/flat2/$packageName/$packageVersion/$packageName.$packageVersion.nupkg" -OutFile $packagePath
+ })
+
+ Unzip $packagePath $packageDir
+ }
+
+ return Join-Path $packageDir 'tools'
+}
+
+#
+# Locates Visual Studio instance that meets the minimal requirements specified by tools.vs object in global.json.
+#
+# The following properties of tools.vs are recognized:
+# "version": "{major}.{minor}"
+# Two part minimal VS version, e.g. "15.9", "16.0", etc.
+# "components": ["componentId1", "componentId2", ...]
+# Array of ids of workload components that must be available in the VS instance.
+# See e.g. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-enterprise?view=vs-2017
+#
+# Returns JSON describing the located VS instance (same format as returned by vswhere),
+# or $null if no instance meeting the requirements is found on the machine.
+#
+function LocateVisualStudio([object]$vsRequirements = $null){
+ if (-not (IsWindowsPlatform)) {
+ throw "Cannot run vswhere on non-Windows platforms."
+ }
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vswhere') {
+ $vswhereVersion = $GlobalJson.tools.vswhere
+ } else {
+ $vswhereVersion = '2.5.2'
+ }
+
+ $vsWhereDir = Join-Path $ToolsDir "vswhere\$vswhereVersion"
+ $vsWhereExe = Join-Path $vsWhereDir 'vswhere.exe'
+
+ if (!(Test-Path $vsWhereExe)) {
+ Create-Directory $vsWhereDir
+ Write-Host 'Downloading vswhere'
+ Retry({
+ Invoke-WebRequest "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/vswhere/$vswhereVersion/vswhere.exe" -OutFile $vswhereExe
+ })
+ }
+
+ if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs }
+ $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*')
+
+ if (!$excludePrereleaseVS) {
+ $args += '-prerelease'
+ }
+
+ if (Get-Member -InputObject $vsRequirements -Name 'version') {
+ $args += '-version'
+ $args += $vsRequirements.version
+ }
+
+ if (Get-Member -InputObject $vsRequirements -Name 'components') {
+ foreach ($component in $vsRequirements.components) {
+ $args += '-requires'
+ $args += $component
+ }
+ }
+
+ $vsInfo =& $vsWhereExe $args | ConvertFrom-Json
+
+ if ($lastExitCode -ne 0) {
+ return $null
+ }
+
+ # use first matching instance
+ return $vsInfo[0]
+}
+
+function InitializeBuildTool() {
+ if (Test-Path variable:global:_BuildTool) {
+ # If the requested msbuild parameters do not match, clear the cached variables.
+ if($global:_BuildTool.Contains('ExcludePrereleaseVS') -and $global:_BuildTool.ExcludePrereleaseVS -ne $excludePrereleaseVS) {
+ Remove-Item variable:global:_BuildTool
+ Remove-Item variable:global:_MSBuildExe
+ } else {
+ return $global:_BuildTool
+ }
+ }
+
+ if (-not $msbuildEngine) {
+ $msbuildEngine = GetDefaultMSBuildEngine
+ }
+
+ # Initialize dotnet cli if listed in 'tools'
+ $dotnetRoot = $null
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ $dotnetRoot = InitializeDotNetCli -install:$restore
+ }
+
+ if ($msbuildEngine -eq 'dotnet') {
+ if (!$dotnetRoot) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "/global.json must specify 'tools.dotnet'."
+ ExitWithExitCode 1
+ }
+ $dotnetPath = Join-Path $dotnetRoot (GetExecutableFileName 'dotnet')
+ $buildTool = @{ Path = $dotnetPath; Command = 'msbuild'; Tool = 'dotnet'; Framework = 'net7.0' }
+ } elseif ($msbuildEngine -eq "vs") {
+ try {
+ $msbuildPath = InitializeVisualStudioMSBuild -install:$restore
+ } catch {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message $_
+ ExitWithExitCode 1
+ }
+
+ $buildTool = @{ Path = $msbuildPath; Command = ""; Tool = "vs"; Framework = "net472"; ExcludePrereleaseVS = $excludePrereleaseVS }
+ } else {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unexpected value of -msbuildEngine: '$msbuildEngine'."
+ ExitWithExitCode 1
+ }
+
+ return $global:_BuildTool = $buildTool
+}
+
+function GetDefaultMSBuildEngine() {
+ # Presence of tools.vs indicates the repo needs to build using VS msbuild on Windows.
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'vs') {
+ return 'vs'
+ }
+
+ if (Get-Member -InputObject $GlobalJson.tools -Name 'dotnet') {
+ return 'dotnet'
+ }
+
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "-msbuildEngine must be specified, or /global.json must specify 'tools.dotnet' or 'tools.vs'."
+ ExitWithExitCode 1
+}
+
+function GetNuGetPackageCachePath() {
+ if ($env:NUGET_PACKAGES -eq $null) {
+ # Use local cache on CI to ensure deterministic build.
+ # Avoid using the http cache as workaround for https://github.com/NuGet/Home/issues/3116
+ # use global cache in dev builds to avoid cost of downloading packages.
+ # For directory normalization, see also: https://github.com/NuGet/Home/issues/7968
+ if ($useGlobalNuGetCache) {
+ $env:NUGET_PACKAGES = Join-Path $env:UserProfile '.nuget\packages\'
+ } else {
+ $env:NUGET_PACKAGES = Join-Path $RepoRoot '.packages\'
+ $env:RESTORENOCACHE = $true
+ }
+ }
+
+ return $env:NUGET_PACKAGES
+}
+
+# Returns a full path to an Arcade SDK task project file.
+function GetSdkTaskProject([string]$taskName) {
+ return Join-Path (Split-Path (InitializeToolset) -Parent) "SdkTasks\$taskName.proj"
+}
+
+function InitializeNativeTools() {
+ if (-Not (Test-Path variable:DisableNativeToolsetInstalls) -And (Get-Member -InputObject $GlobalJson -Name "native-tools")) {
+ $nativeArgs= @{}
+ if ($ci) {
+ $nativeArgs = @{
+ InstallDirectory = "$ToolsDir"
+ }
+ }
+ if ($env:NativeToolsOnMachine) {
+ Write-Host "Variable NativeToolsOnMachine detected, enabling native tool path promotion..."
+ $nativeArgs += @{ PathPromotion = $true }
+ }
+ & "$PSScriptRoot/init-tools-native.ps1" @nativeArgs
+ }
+}
+
+function InitializeToolset() {
+ if (Test-Path variable:global:_ToolsetBuildProj) {
+ return $global:_ToolsetBuildProj
+ }
+
+ $nugetCache = GetNuGetPackageCachePath
+
+ $toolsetVersion = $GlobalJson.'msbuild-sdks'.'Microsoft.DotNet.Arcade.Sdk'
+ $toolsetLocationFile = Join-Path $ToolsetDir "$toolsetVersion.txt"
+
+ if (Test-Path $toolsetLocationFile) {
+ $path = Get-Content $toolsetLocationFile -TotalCount 1
+ if (Test-Path $path) {
+ return $global:_ToolsetBuildProj = $path
+ }
+ }
+
+ if (-not $restore) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Toolset version $toolsetVersion has not been restored."
+ ExitWithExitCode 1
+ }
+
+ $buildTool = InitializeBuildTool
+
+ $proj = Join-Path $ToolsetDir 'restore.proj'
+ $bl = if ($binaryLog) { '/bl:' + (Join-Path $LogDir 'ToolsetRestore.binlog') } else { '' }
+
+ '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' | Set-Content $proj
+
+ MSBuild-Core $proj $bl /t:__WriteToolsetLocation /clp:ErrorsOnly`;NoSummary /p:__ToolsetLocationOutputFile=$toolsetLocationFile
+
+ $path = Get-Content $toolsetLocationFile -Encoding UTF8 -TotalCount 1
+ if (!(Test-Path $path)) {
+ throw "Invalid toolset path: $path"
+ }
+
+ return $global:_ToolsetBuildProj = $path
+}
+
+function ExitWithExitCode([int] $exitCode) {
+ if ($ci -and $prepareMachine) {
+ Stop-Processes
+ }
+ exit $exitCode
+}
+
+# Check if $LASTEXITCODE is a nonzero exit code (NZEC). If so, print a Azure Pipeline error for
+# diagnostics, then exit the script with the $LASTEXITCODE.
+function Exit-IfNZEC([string] $category = "General") {
+ Write-Host "Exit code $LASTEXITCODE"
+ if ($LASTEXITCODE -ne 0) {
+ $message = "Last command failed with exit code $LASTEXITCODE."
+ Write-PipelineTelemetryError -Force -Category $category -Message $message
+ ExitWithExitCode $LASTEXITCODE
+ }
+}
+
+function Stop-Processes() {
+ Write-Host 'Killing running build processes...'
+ foreach ($processName in $processesToStopOnExit) {
+ Get-Process -Name $processName -ErrorAction SilentlyContinue | Stop-Process
+ }
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild() {
+ if ($pipelinesLog) {
+ $buildTool = InitializeBuildTool
+
+ if ($ci -and $buildTool.Tool -eq 'dotnet') {
+ $env:NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS = 20
+ $env:NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS = 20
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS' -Value '20'
+ Write-PipelineSetVariable -Name 'NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS' -Value '20'
+ }
+
+ Enable-Nuget-EnhancedRetry
+
+ $toolsetBuildProject = InitializeToolset
+ $basePath = Split-Path -parent $toolsetBuildProject
+ $possiblePaths = @(
+ # new scripts need to work with old packages, so we need to look for the old names/versions
+ (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.ArcadeLogging.dll')),
+ (Join-Path $basePath (Join-Path $buildTool.Framework 'Microsoft.DotNet.Arcade.Sdk.dll')),
+ (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.ArcadeLogging.dll')),
+ (Join-Path $basePath (Join-Path netcoreapp2.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
+ (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.ArcadeLogging.dll')),
+ (Join-Path $basePath (Join-Path netcoreapp3.1 'Microsoft.DotNet.Arcade.Sdk.dll'))
+ )
+ $selectedPath = $null
+ foreach ($path in $possiblePaths) {
+ if (Test-Path $path -PathType Leaf) {
+ $selectedPath = $path
+ break
+ }
+ }
+ if (-not $selectedPath) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Unable to find arcade sdk logger assembly.'
+ ExitWithExitCode 1
+ }
+ $args += "/logger:$selectedPath"
+ }
+
+ MSBuild-Core @args
+}
+
+#
+# Executes msbuild (or 'dotnet msbuild') with arguments passed to the function.
+# The arguments are automatically quoted.
+# Terminates the script if the build fails.
+#
+function MSBuild-Core() {
+ if ($ci) {
+ if (!$binaryLog -and !$excludeCIBinarylog) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Binary log must be enabled in CI build, or explicitly opted-out from with the -excludeCIBinarylog switch.'
+ ExitWithExitCode 1
+ }
+
+ if ($nodeReuse) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'Node reuse must be disabled in CI build.'
+ ExitWithExitCode 1
+ }
+ }
+
+ Enable-Nuget-EnhancedRetry
+
+ $buildTool = InitializeBuildTool
+
+ $cmdArgs = "$($buildTool.Command) /m /nologo /clp:Summary /v:$verbosity /nr:$nodeReuse /p:ContinuousIntegrationBuild=$ci"
+
+ if ($warnAsError) {
+ $cmdArgs += ' /warnaserror /p:TreatWarningsAsErrors=true'
+ }
+ else {
+ $cmdArgs += ' /p:TreatWarningsAsErrors=false'
+ }
+
+ foreach ($arg in $args) {
+ if ($null -ne $arg -and $arg.Trim() -ne "") {
+ if ($arg.EndsWith('\')) {
+ $arg = $arg + "\"
+ }
+ $cmdArgs += " `"$arg`""
+ }
+ }
+
+ $env:ARCADE_BUILD_TOOL_COMMAND = "$($buildTool.Path) $cmdArgs"
+
+ $exitCode = Exec-Process $buildTool.Path $cmdArgs
+
+ if ($exitCode -ne 0) {
+ # We should not Write-PipelineTaskError here because that message shows up in the build summary
+ # The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
+ Write-Host "Build failed with exit code $exitCode. Check errors above." -ForegroundColor Red
+
+ $buildLog = GetMSBuildBinaryLogCommandLineArgument $args
+ if ($null -ne $buildLog) {
+ Write-Host "See log: $buildLog" -ForegroundColor DarkGray
+ }
+
+ if ($ci) {
+ Write-PipelineSetResult -Result "Failed" -Message "msbuild execution failed."
+ # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
+ # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
+ ExitWithExitCode 0
+ } else {
+ ExitWithExitCode $exitCode
+ }
+ }
+}
+
+function GetMSBuildBinaryLogCommandLineArgument($arguments) {
+ foreach ($argument in $arguments) {
+ if ($argument -ne $null) {
+ $arg = $argument.Trim()
+ if ($arg.StartsWith('/bl:', "OrdinalIgnoreCase")) {
+ return $arg.Substring('/bl:'.Length)
+ }
+
+ if ($arg.StartsWith('/binaryLogger:', 'OrdinalIgnoreCase')) {
+ return $arg.Substring('/binaryLogger:'.Length)
+ }
+ }
+ }
+
+ return $null
+}
+
+function GetExecutableFileName($baseName) {
+ if (IsWindowsPlatform) {
+ return "$baseName.exe"
+ }
+ else {
+ return $baseName
+ }
+}
+
+function IsWindowsPlatform() {
+ return [environment]::OSVersion.Platform -eq [PlatformID]::Win32NT
+}
+
+function Get-Darc($version) {
+ $darcPath = "$TempDir\darc\$(New-Guid)"
+ if ($version -ne $null) {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath -darcVersion $version | Out-Host
+ } else {
+ & $PSScriptRoot\darc-init.ps1 -toolpath $darcPath | Out-Host
+ }
+ return "$darcPath\darc.exe"
+}
+
+. $PSScriptRoot\pipeline-logging-functions.ps1
+
+$RepoRoot = Resolve-Path (Join-Path $PSScriptRoot '..\..\')
+$EngRoot = Resolve-Path (Join-Path $PSScriptRoot '..')
+$ArtifactsDir = Join-Path $RepoRoot 'artifacts'
+$ToolsetDir = Join-Path $ArtifactsDir 'toolset'
+$ToolsDir = Join-Path $RepoRoot '.tools'
+$LogDir = Join-Path (Join-Path $ArtifactsDir 'log') $configuration
+$TempDir = Join-Path (Join-Path $ArtifactsDir 'tmp') $configuration
+$GlobalJson = Get-Content -Raw -Path (Join-Path $RepoRoot 'global.json') | ConvertFrom-Json
+# true if global.json contains a "runtimes" section
+$globalJsonHasRuntimes = if ($GlobalJson.tools.PSObject.Properties.Name -Match 'runtimes') { $true } else { $false }
+
+Create-Directory $ToolsetDir
+Create-Directory $TempDir
+Create-Directory $LogDir
+
+Write-PipelineSetVariable -Name 'Artifacts' -Value $ArtifactsDir
+Write-PipelineSetVariable -Name 'Artifacts.Toolset' -Value $ToolsetDir
+Write-PipelineSetVariable -Name 'Artifacts.Log' -Value $LogDir
+Write-PipelineSetVariable -Name 'TEMP' -Value $TempDir
+Write-PipelineSetVariable -Name 'TMP' -Value $TempDir
+
+# Import custom tools configuration, if present in the repo.
+# Note: Import in global scope so that the script set top-level variables without qualification.
+if (!$disableConfigureToolsetImport) {
+ $configureToolsetScript = Join-Path $EngRoot 'configure-toolset.ps1'
+ if (Test-Path $configureToolsetScript) {
+ . $configureToolsetScript
+ if ((Test-Path variable:failOnConfigureToolsetError) -And $failOnConfigureToolsetError) {
+ if ((Test-Path variable:LastExitCode) -And ($LastExitCode -ne 0)) {
+ Write-PipelineTelemetryError -Category 'Build' -Message 'configure-toolset.ps1 returned a non-zero exit code'
+ ExitWithExitCode $LastExitCode
+ }
+ }
+ }
+}
+
+#
+# If $ci flag is set, turn on (and log that we did) special environment variables for improved Nuget client retry logic.
+#
+function Enable-Nuget-EnhancedRetry() {
+ if ($ci) {
+ Write-Host "Setting NUGET enhanced retry environment variables"
+ $env:NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY = 'true'
+ $env:NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT = 6
+ $env:NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS = 1000
+ Write-PipelineSetVariable -Name 'NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY' -Value 'true'
+ Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT' -Value '6'
+ Write-PipelineSetVariable -Name 'NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS' -Value '1000'
+ }
+}
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
new file mode 100644
index 0000000..c110d0e
--- /dev/null
+++ b/eng/common/tools.sh
@@ -0,0 +1,550 @@
+#!/usr/bin/env bash
+
+# Initialize variables if they aren't already defined.
+
+# CI mode - set to true on CI server for PR validation build or official build.
+ci=${ci:-false}
+
+# Set to true to use the pipelines logger which will enable Azure logging output.
+# https://github.com/Microsoft/azure-pipelines-tasks/blob/master/docs/authoring/commands.md
+# This flag is meant as a temporary opt-opt for the feature while validate it across
+# our consumers. It will be deleted in the future.
+if [[ "$ci" == true ]]; then
+ pipelines_log=${pipelines_log:-true}
+else
+ pipelines_log=${pipelines_log:-false}
+fi
+
+# Build configuration. Common values include 'Debug' and 'Release', but the repository may use other names.
+configuration=${configuration:-'Debug'}
+
+# Set to true to opt out of outputting binary log while running in CI
+exclude_ci_binary_log=${exclude_ci_binary_log:-false}
+
+if [[ "$ci" == true && "$exclude_ci_binary_log" == false ]]; then
+ binary_log_default=true
+else
+ binary_log_default=false
+fi
+
+# Set to true to output binary log from msbuild. Note that emitting binary log slows down the build.
+binary_log=${binary_log:-$binary_log_default}
+
+# Turns on machine preparation/clean up code that changes the machine state (e.g. kills build processes).
+prepare_machine=${prepare_machine:-false}
+
+# True to restore toolsets and dependencies.
+restore=${restore:-true}
+
+# Adjusts msbuild verbosity level.
+verbosity=${verbosity:-'minimal'}
+
+# Set to true to reuse msbuild nodes. Recommended to not reuse on CI.
+if [[ "$ci" == true ]]; then
+ node_reuse=${node_reuse:-false}
+else
+ node_reuse=${node_reuse:-true}
+fi
+
+# Configures warning treatment in msbuild.
+warn_as_error=${warn_as_error:-true}
+
+# True to attempt using .NET Core already that meets requirements specified in global.json
+# installed on the machine instead of downloading one.
+use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
+
+# Enable repos to use a particular version of the on-line dotnet-install scripts.
+# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh
+dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
+
+# True to use global NuGet cache instead of restoring packages to repository-local directory.
+if [[ "$ci" == true ]]; then
+ use_global_nuget_cache=${use_global_nuget_cache:-false}
+else
+ use_global_nuget_cache=${use_global_nuget_cache:-true}
+fi
+
+# Used when restoring .NET SDK from alternative feeds
+runtime_source_feed=${runtime_source_feed:-''}
+runtime_source_feed_key=${runtime_source_feed_key:-''}
+
+# Resolve any symlinks in the given path.
+function ResolvePath {
+ local path=$1
+
+ while [[ -h $path ]]; do
+ local dir="$( cd -P "$( dirname "$path" )" && pwd )"
+ path="$(readlink "$path")"
+
+ # if $path was a relative symlink, we need to resolve it relative to the path where the
+ # symlink file was located
+ [[ $path != /* ]] && path="$dir/$path"
+ done
+
+ # return value
+ _ResolvePath="$path"
+}
+
+# ReadVersionFromJson [json key]
+function ReadGlobalVersion {
+ local key=$1
+
+ if command -v jq &> /dev/null; then
+ _ReadGlobalVersion="$(jq -r ".[] | select(has(\"$key\")) | .\"$key\"" "$global_json_file")"
+ elif [[ "$(cat "$global_json_file")" =~ \"$key\"[[:space:]\:]*\"([^\"]+) ]]; then
+ _ReadGlobalVersion=${BASH_REMATCH[1]}
+ fi
+
+ if [[ -z "$_ReadGlobalVersion" ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Error: Cannot find \"$key\" in $global_json_file"
+ ExitWithExitCode 1
+ fi
+}
+
+function InitializeDotNetCli {
+ if [[ -n "${_InitializeDotNetCli:-}" ]]; then
+ return
+ fi
+
+ local install=$1
+
+ # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism
+ export DOTNET_MULTILEVEL_LOOKUP=0
+
+ # Disable first run since we want to control all package sources
+ export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+
+ # Disable telemetry on CI
+ if [[ $ci == true ]]; then
+ export DOTNET_CLI_TELEMETRY_OPTOUT=1
+ fi
+
+ # LTTNG is the logging infrastructure used by Core CLR. Need this variable set
+ # so it doesn't output warnings to the console.
+ export LTTNG_HOME="$HOME"
+
+ # Source Build uses DotNetCoreSdkDir variable
+ if [[ -n "${DotNetCoreSdkDir:-}" ]]; then
+ export DOTNET_INSTALL_DIR="$DotNetCoreSdkDir"
+ fi
+
+ # Find the first path on $PATH that contains the dotnet.exe
+ if [[ "$use_installed_dotnet_cli" == true && $global_json_has_runtimes == false && -z "${DOTNET_INSTALL_DIR:-}" ]]; then
+ local dotnet_path=`command -v dotnet`
+ if [[ -n "$dotnet_path" ]]; then
+ ResolvePath "$dotnet_path"
+ export DOTNET_INSTALL_DIR=`dirname "$_ResolvePath"`
+ fi
+ fi
+
+ ReadGlobalVersion "dotnet"
+ local dotnet_sdk_version=$_ReadGlobalVersion
+ local dotnet_root=""
+
+ # Use dotnet installation specified in DOTNET_INSTALL_DIR if it contains the required SDK version,
+ # otherwise install the dotnet CLI and SDK to repo local .dotnet directory to avoid potential permission issues.
+ if [[ $global_json_has_runtimes == false && -n "${DOTNET_INSTALL_DIR:-}" && -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ dotnet_root="$DOTNET_INSTALL_DIR"
+ else
+ dotnet_root="$repo_root/.dotnet"
+
+ export DOTNET_INSTALL_DIR="$dotnet_root"
+
+ if [[ ! -d "$DOTNET_INSTALL_DIR/sdk/$dotnet_sdk_version" ]]; then
+ if [[ "$install" == true ]]; then
+ InstallDotNetSdk "$dotnet_root" "$dotnet_sdk_version"
+ else
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Unable to find dotnet with SDK version '$dotnet_sdk_version'"
+ ExitWithExitCode 1
+ fi
+ fi
+ fi
+
+ # Add dotnet to PATH. This prevents any bare invocation of dotnet in custom
+ # build steps from using anything other than what we've downloaded.
+ Write-PipelinePrependPath -path "$dotnet_root"
+
+ Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0"
+ Write-PipelineSetVariable -name "DOTNET_SKIP_FIRST_TIME_EXPERIENCE" -value "1"
+
+ # return value
+ _InitializeDotNetCli="$dotnet_root"
+}
+
+function InstallDotNetSdk {
+ local root=$1
+ local version=$2
+ local architecture="unset"
+ if [[ $# -ge 3 ]]; then
+ architecture=$3
+ fi
+ InstallDotNet "$root" "$version" $architecture 'sdk' 'true' $runtime_source_feed $runtime_source_feed_key
+}
+
+function InstallDotNet {
+ local root=$1
+ local version=$2
+
+ GetDotNetInstallScript "$root"
+ local install_script=$_GetDotNetInstallScript
+
+ local installParameters=(--version $version --install-dir "$root")
+
+ if [[ -n "${3:-}" ]] && [ "$3" != 'unset' ]; then
+ installParameters+=(--architecture $3)
+ fi
+ if [[ -n "${4:-}" ]] && [ "$4" != 'sdk' ]; then
+ installParameters+=(--runtime $4)
+ fi
+ if [[ "$#" -ge "5" ]] && [[ "$5" != 'false' ]]; then
+ installParameters+=(--skip-non-versioned-files)
+ fi
+
+ local variations=() # list of variable names with parameter arrays in them
+
+ local public_location=("${installParameters[@]}")
+ variations+=(public_location)
+
+ local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public")
+ variations+=(dotnetbuilds)
+
+ if [[ -n "${6:-}" ]]; then
+ variations+=(private_feed)
+ local private_feed=("${installParameters[@]}" --azure-feed $6)
+ if [[ -n "${7:-}" ]]; then
+ # The 'base64' binary on alpine uses '-d' and doesn't support '--decode'
+ # '-d'. To work around this, do a simple detection and switch the parameter
+ # accordingly.
+ decodeArg="--decode"
+ if base64 --help 2>&1 | grep -q "BusyBox"; then
+ decodeArg="-d"
+ fi
+ decodedFeedKey=`echo $7 | base64 $decodeArg`
+ private_feed+=(--feed-credential $decodedFeedKey)
+ fi
+ fi
+
+ local installSuccess=0
+ for variationName in "${variations[@]}"; do
+ local name="$variationName[@]"
+ local variation=("${!name}")
+ echo "Attempting to install dotnet from $variationName."
+ bash "$install_script" "${variation[@]}" && installSuccess=1
+ if [[ "$installSuccess" -eq 1 ]]; then
+ break
+ fi
+
+ echo "Failed to install dotnet from $variationName."
+ done
+
+ if [[ "$installSuccess" -eq 0 ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to install dotnet SDK from any of the specified locations."
+ ExitWithExitCode 1
+ fi
+}
+
+function with_retries {
+ local maxRetries=5
+ local retries=1
+ echo "Trying to run '$@' for maximum of $maxRetries attempts."
+ while [[ $((retries++)) -le $maxRetries ]]; do
+ "$@"
+
+ if [[ $? == 0 ]]; then
+ echo "Ran '$@' successfully."
+ return 0
+ fi
+
+ timeout=$((3**$retries-1))
+ echo "Failed to execute '$@'. Waiting $timeout seconds before next attempt ($retries out of $maxRetries)." 1>&2
+ sleep $timeout
+ done
+
+ echo "Failed to execute '$@' for $maxRetries times." 1>&2
+
+ return 1
+}
+
+function GetDotNetInstallScript {
+ local root=$1
+ local install_script="$root/dotnet-install.sh"
+ local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+
+ if [[ ! -a "$install_script" ]]; then
+ mkdir -p "$root"
+
+ echo "Downloading '$install_script_url'"
+
+ # Use curl if available, otherwise use wget
+ if command -v curl > /dev/null; then
+ # first, try directly, if this fails we will retry with verbose logging
+ curl "$install_script_url" -sSL --retry 10 --create-dirs -o "$install_script" || {
+ if command -v openssl &> /dev/null; then
+ echo "Curl failed; dumping some information about dotnet.microsoft.com for later investigation"
+ echo | openssl s_client -showcerts -servername dotnet.microsoft.com -connect dotnet.microsoft.com:443
+ fi
+ echo "Will now retry the same URL with verbose logging."
+ with_retries curl "$install_script_url" -sSL --verbose --retry 10 --create-dirs -o "$install_script" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ }
+ else
+ with_retries wget -v -O "$install_script" "$install_script_url" || {
+ local exit_code=$?
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Failed to acquire dotnet install script (exit code '$exit_code')."
+ ExitWithExitCode $exit_code
+ }
+ fi
+ fi
+ # return value
+ _GetDotNetInstallScript="$install_script"
+}
+
+function InitializeBuildTool {
+ if [[ -n "${_InitializeBuildTool:-}" ]]; then
+ return
+ fi
+
+ InitializeDotNetCli $restore
+
+ # return values
+ _InitializeBuildTool="$_InitializeDotNetCli/dotnet"
+ _InitializeBuildToolCommand="msbuild"
+ _InitializeBuildToolFramework="net7.0"
+}
+
+# Set RestoreNoCache as a workaround for https://github.com/NuGet/Home/issues/3116
+function GetNuGetPackageCachePath {
+ if [[ -z ${NUGET_PACKAGES:-} ]]; then
+ if [[ "$use_global_nuget_cache" == true ]]; then
+ export NUGET_PACKAGES="$HOME/.nuget/packages"
+ else
+ export NUGET_PACKAGES="$repo_root/.packages"
+ export RESTORENOCACHE=true
+ fi
+ fi
+
+ # return value
+ _GetNuGetPackageCachePath=$NUGET_PACKAGES
+}
+
+function InitializeNativeTools() {
+ if [[ -n "${DisableNativeToolsetInstalls:-}" ]]; then
+ return
+ fi
+ if grep -Fq "native-tools" $global_json_file
+ then
+ local nativeArgs=""
+ if [[ "$ci" == true ]]; then
+ nativeArgs="--installDirectory $tools_dir"
+ fi
+ "$_script_dir/init-tools-native.sh" $nativeArgs
+ fi
+}
+
+function InitializeToolset {
+ if [[ -n "${_InitializeToolset:-}" ]]; then
+ return
+ fi
+
+ GetNuGetPackageCachePath
+
+ ReadGlobalVersion "Microsoft.DotNet.Arcade.Sdk"
+
+ local toolset_version=$_ReadGlobalVersion
+ local toolset_location_file="$toolset_dir/$toolset_version.txt"
+
+ if [[ -a "$toolset_location_file" ]]; then
+ local path=`cat "$toolset_location_file"`
+ if [[ -a "$path" ]]; then
+ # return value
+ _InitializeToolset="$path"
+ return
+ fi
+ fi
+
+ if [[ "$restore" != true ]]; then
+ Write-PipelineTelemetryError -category 'InitializeToolset' "Toolset version $toolset_version has not been restored."
+ ExitWithExitCode 2
+ fi
+
+ local proj="$toolset_dir/restore.proj"
+
+ local bl=""
+ if [[ "$binary_log" == true ]]; then
+ bl="/bl:$log_dir/ToolsetRestore.binlog"
+ fi
+
+ echo '<Project Sdk="Microsoft.DotNet.Arcade.Sdk"/>' > "$proj"
+ MSBuild-Core "$proj" $bl /t:__WriteToolsetLocation /clp:ErrorsOnly\;NoSummary /p:__ToolsetLocationOutputFile="$toolset_location_file"
+
+ local toolset_build_proj=`cat "$toolset_location_file"`
+
+ if [[ ! -a "$toolset_build_proj" ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Invalid toolset path: $toolset_build_proj"
+ ExitWithExitCode 3
+ fi
+
+ # return value
+ _InitializeToolset="$toolset_build_proj"
+}
+
+function ExitWithExitCode {
+ if [[ "$ci" == true && "$prepare_machine" == true ]]; then
+ StopProcesses
+ fi
+ exit $1
+}
+
+function StopProcesses {
+ echo "Killing running build processes..."
+ pkill -9 "dotnet" || true
+ pkill -9 "vbcscompiler" || true
+ return 0
+}
+
+function MSBuild {
+ local args=$@
+ if [[ "$pipelines_log" == true ]]; then
+ InitializeBuildTool
+ InitializeToolset
+
+ if [[ "$ci" == true ]]; then
+ export NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS=20
+ export NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS=20
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS" -value "20"
+ Write-PipelineSetVariable -name "NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS" -value "20"
+
+ export NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY=true
+ export NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT=6
+ export NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS=1000
+ Write-PipelineSetVariable -name "NUGET_ENABLE_EXPERIMENTAL_HTTP_RETRY" -value "true"
+ Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_MAX_NETWORK_TRY_COUNT" -value "6"
+ Write-PipelineSetVariable -name "NUGET_EXPERIMENTAL_NETWORK_RETRY_DELAY_MILLISECONDS" -value "1000"
+ fi
+
+ local toolset_dir="${_InitializeToolset%/*}"
+ # new scripts need to work with old packages, so we need to look for the old names/versions
+ local selectedPath=
+ local possiblePaths=()
+ possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/$_InitializeBuildToolFramework/Microsoft.DotNet.Arcade.Sdk.dll" )
+ possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/netcoreapp2.1/Microsoft.DotNet.Arcade.Sdk.dll" )
+ possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.ArcadeLogging.dll" )
+ possiblePaths+=( "$toolset_dir/netcoreapp3.1/Microsoft.DotNet.Arcade.Sdk.dll" )
+ for path in "${possiblePaths[@]}"; do
+ if [[ -f $path ]]; then
+ selectedPath=$path
+ break
+ fi
+ done
+ if [[ -z "$selectedPath" ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Unable to find arcade sdk logger assembly."
+ ExitWithExitCode 1
+ fi
+ args+=( "-logger:$selectedPath" )
+ fi
+
+ MSBuild-Core ${args[@]}
+}
+
+function MSBuild-Core {
+ if [[ "$ci" == true ]]; then
+ if [[ "$binary_log" != true && "$exclude_ci_binary_log" != true ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Binary log must be enabled in CI build, or explicitly opted-out from with the -noBinaryLog switch."
+ ExitWithExitCode 1
+ fi
+
+ if [[ "$node_reuse" == true ]]; then
+ Write-PipelineTelemetryError -category 'Build' "Node reuse must be disabled in CI build."
+ ExitWithExitCode 1
+ fi
+ fi
+
+ InitializeBuildTool
+
+ local warnaserror_switch=""
+ if [[ $warn_as_error == true ]]; then
+ warnaserror_switch="/warnaserror"
+ fi
+
+ function RunBuildTool {
+ export ARCADE_BUILD_TOOL_COMMAND="$_InitializeBuildTool $@"
+
+ "$_InitializeBuildTool" "$@" || {
+ local exit_code=$?
+ # We should not Write-PipelineTaskError here because that message shows up in the build summary
+ # The build already logged an error, that's the reason it failed. Producing an error here only adds noise.
+ echo "Build failed with exit code $exit_code. Check errors above."
+ if [[ "$ci" == "true" ]]; then
+ Write-PipelineSetResult -result "Failed" -message "msbuild execution failed."
+ # Exiting with an exit code causes the azure pipelines task to log yet another "noise" error
+ # The above Write-PipelineSetResult will cause the task to be marked as failure without adding yet another error
+ ExitWithExitCode 0
+ else
+ ExitWithExitCode $exit_code
+ fi
+ }
+ }
+
+ RunBuildTool "$_InitializeBuildToolCommand" /m /nologo /clp:Summary /v:$verbosity /nr:$node_reuse $warnaserror_switch /p:TreatWarningsAsErrors=$warn_as_error /p:ContinuousIntegrationBuild=$ci "$@"
+}
+
+ResolvePath "${BASH_SOURCE[0]}"
+_script_dir=`dirname "$_ResolvePath"`
+
+. "$_script_dir/pipeline-logging-functions.sh"
+
+eng_root=`cd -P "$_script_dir/.." && pwd`
+repo_root=`cd -P "$_script_dir/../.." && pwd`
+repo_root="${repo_root}/"
+artifacts_dir="${repo_root}artifacts"
+toolset_dir="$artifacts_dir/toolset"
+tools_dir="${repo_root}.tools"
+log_dir="$artifacts_dir/log/$configuration"
+temp_dir="$artifacts_dir/tmp/$configuration"
+
+global_json_file="${repo_root}global.json"
+# determine if global.json contains a "runtimes" entry
+global_json_has_runtimes=false
+if command -v jq &> /dev/null; then
+ if jq -er '. | select(has("runtimes"))' "$global_json_file" &> /dev/null; then
+ global_json_has_runtimes=true
+ fi
+elif [[ "$(cat "$global_json_file")" =~ \"runtimes\"[[:space:]\:]*\{ ]]; then
+ global_json_has_runtimes=true
+fi
+
+# HOME may not be defined in some scenarios, but it is required by NuGet
+if [[ -z $HOME ]]; then
+ export HOME="${repo_root}artifacts/.home/"
+ mkdir -p "$HOME"
+fi
+
+mkdir -p "$toolset_dir"
+mkdir -p "$temp_dir"
+mkdir -p "$log_dir"
+
+Write-PipelineSetVariable -name "Artifacts" -value "$artifacts_dir"
+Write-PipelineSetVariable -name "Artifacts.Toolset" -value "$toolset_dir"
+Write-PipelineSetVariable -name "Artifacts.Log" -value "$log_dir"
+Write-PipelineSetVariable -name "Temp" -value "$temp_dir"
+Write-PipelineSetVariable -name "TMP" -value "$temp_dir"
+
+# Import custom tools configuration, if present in the repo.
+if [ -z "${disable_configure_toolset_import:-}" ]; then
+ configure_toolset_script="$eng_root/configure-toolset.sh"
+ if [[ -a "$configure_toolset_script" ]]; then
+ . "$configure_toolset_script"
+ fi
+fi
+
+# TODO: https://github.com/dotnet/arcade/issues/1468
+# Temporary workaround to avoid breaking change.
+# Remove once repos are updated.
+if [[ -n "${useInstalledDotNetCli:-}" ]]; then
+ use_installed_dotnet_cli="$useInstalledDotNetCli"
+fi
diff --git a/global.json b/global.json
new file mode 100644
index 0000000..dc9f4b8
--- /dev/null
+++ b/global.json
@@ -0,0 +1,8 @@
+{
+ "tools": {
+ "dotnet": "7.0.100-preview.7.22377.5"
+ },
+ "msbuild-sdks": {
+ "Microsoft.DotNet.Arcade.Sdk": "8.0.0-beta.22462.4"
+ }
+}
diff --git a/rocks/Mono.Cecil.Rocks.csproj b/rocks/Mono.Cecil.Rocks.csproj
index 3d66047..c6d9e87 100644
--- a/rocks/Mono.Cecil.Rocks.csproj
+++ b/rocks/Mono.Cecil.Rocks.csproj
@@ -1,6 +1,6 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
- <TargetFrameworks>netstandard2.0;net40</TargetFrameworks>
+ <TargetFramework>$(ToolsFramework)</TargetFramework>
<DefineConstants>$(DefineConstants);INSIDE_ROCKS</DefineConstants>
</PropertyGroup>
<ItemGroup>
diff --git a/rocks/Test/Mono.Cecil.Rocks.Tests.csproj b/rocks/Test/Mono.Cecil.Rocks.Tests.csproj
index a15fce1..020d499 100644
--- a/rocks/Test/Mono.Cecil.Rocks.Tests.csproj
+++ b/rocks/Test/Mono.Cecil.Rocks.Tests.csproj
@@ -1,7 +1,7 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\..\Mono.Cecil.Tests.props" />
<PropertyGroup>
- <TargetFrameworks>netcoreapp3.1;net40</TargetFrameworks>
+ <TargetFramework>$(NetCoreAppTestsCurrent)</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\Mono.Cecil.csproj">
@@ -20,4 +20,9 @@
<ItemGroup>
<None Include="Resources\**\*" />
</ItemGroup>
+ <ItemGroup>
+ <RuntimeHostConfigurationOption Include="Mono.Cecil.ResourcesDirectory">
+ <Value>$(MSBuildThisFileDirectory)Resources</Value>
+ </RuntimeHostConfigurationOption>
+ </ItemGroup>
</Project>
diff --git a/symbols/mdb/Mono.Cecil.Mdb.csproj b/symbols/mdb/Mono.Cecil.Mdb.csproj
index 08256ba..5977a44 100644
--- a/symbols/mdb/Mono.Cecil.Mdb.csproj
+++ b/symbols/mdb/Mono.Cecil.Mdb.csproj
@@ -1,6 +1,6 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
- <TargetFrameworks>netstandard2.0;net40</TargetFrameworks>
+ <TargetFramework>$(ToolsFramework)</TargetFramework>
<DefineConstants>$(DefineConstants);CECIL</DefineConstants>
</PropertyGroup>
<ItemGroup>
diff --git a/symbols/mdb/Test/Mono.Cecil.Mdb.Tests.csproj b/symbols/mdb/Test/Mono.Cecil.Mdb.Tests.csproj
index 92da7f5..fdbc68a 100644
--- a/symbols/mdb/Test/Mono.Cecil.Mdb.Tests.csproj
+++ b/symbols/mdb/Test/Mono.Cecil.Mdb.Tests.csproj
@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
<Import Project="..\..\..\Mono.Cecil.Tests.props" />
<PropertyGroup>
- <TargetFrameworks>netcoreapp3.1;net40</TargetFrameworks>
+ <TargetFramework>$(NetCoreAppTestsCurrent)</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\Mono.Cecil.csproj">
@@ -20,4 +20,9 @@
<ItemGroup>
<None Include="Resources\**\*" />
</ItemGroup>
+ <ItemGroup>
+ <RuntimeHostConfigurationOption Include="Mono.Cecil.ResourcesDirectory">
+ <Value>$(MSBuildThisFileDirectory)Resources</Value>
+ </RuntimeHostConfigurationOption>
+ </ItemGroup>
</Project>
diff --git a/symbols/pdb/Microsoft.Cci.Pdb/IntHashTable.cs b/symbols/pdb/Microsoft.Cci.Pdb/IntHashTable.cs
index 221c07f..749f8f7 100644
--- a/symbols/pdb/Microsoft.Cci.Pdb/IntHashTable.cs
+++ b/symbols/pdb/Microsoft.Cci.Pdb/IntHashTable.cs
@@ -140,13 +140,13 @@ namespace Microsoft.Cci.Pdb {
// The hash table data.
// This cannot be serialised
- private struct bucket {
+ private struct Bucket {
internal int key;
internal int hash_coll; // Store hash code; sign bit means there was a collision.
internal Object val;
}
- private bucket[] buckets;
+ private Bucket[] buckets;
// The total number of entries in the hash table.
private int count;
@@ -200,7 +200,7 @@ namespace Microsoft.Cci.Pdb {
this.loadFactorPerc = (loadFactorPerc * 72) / 100;
int hashsize = GetPrime((int)(capacity / this.loadFactorPerc));
- buckets = new bucket[hashsize];
+ buckets = new Bucket[hashsize];
loadsize = (int)(this.loadFactorPerc * hashsize) / 100;
if (loadsize >= hashsize)
@@ -293,11 +293,11 @@ namespace Microsoft.Cci.Pdb {
uint seed;
uint incr;
// Take a snapshot of buckets, in case another thread does a resize
- bucket[] lbuckets = buckets;
+ Bucket[] lbuckets = buckets;
uint hashcode = InitHash(key, lbuckets.Length, out seed, out incr);
int ntry = 0;
- bucket b;
+ Bucket b;
do {
int bucketNumber = (int)(seed % (uint)lbuckets.Length);
b = lbuckets[bucketNumber];
@@ -343,12 +343,12 @@ namespace Microsoft.Cci.Pdb {
// at all times
// 2) Protect against an OutOfMemoryException while allocating this
// new bucket[].
- bucket[] newBuckets = new bucket[newsize];
+ Bucket[] newBuckets = new Bucket[newsize];
// rehash table into new buckets
int nb;
for (nb = 0; nb < buckets.Length; nb++) {
- bucket oldb = buckets[nb];
+ Bucket oldb = buckets[nb];
if (oldb.val != null) {
putEntry(newBuckets, oldb.key, oldb.val, oldb.hash_coll & 0x7FFFFFFF);
}
@@ -475,7 +475,7 @@ namespace Microsoft.Cci.Pdb {
throw new InvalidOperationException("InvalidOperation_HashInsertFailed");
}
- private void putEntry(bucket[] newBuckets, int key, Object nvalue, int hashcode) {
+ private void putEntry(Bucket[] newBuckets, int key, Object nvalue, int hashcode) {
uint seed = (uint)hashcode;
uint incr = (uint)(1 + (((seed >> 5) + 1) % ((uint)newBuckets.Length - 1)));
diff --git a/symbols/pdb/Mono.Cecil.Pdb.csproj b/symbols/pdb/Mono.Cecil.Pdb.csproj
index 8e7d08c..7cebe91 100644
--- a/symbols/pdb/Mono.Cecil.Pdb.csproj
+++ b/symbols/pdb/Mono.Cecil.Pdb.csproj
@@ -1,6 +1,6 @@
-<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
+<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
- <TargetFrameworks>netstandard2.0;net40</TargetFrameworks>
+ <TargetFramework>$(ToolsFramework)</TargetFramework>
<NoWarn>0649</NoWarn>
<AllowUnsafeBlocks>True</AllowUnsafeBlocks>
</PropertyGroup>
diff --git a/symbols/pdb/Mono.Cecil.Pdb/SymWriter.cs b/symbols/pdb/Mono.Cecil.Pdb/SymWriter.cs
index f309f2f..78e8a1e 100644
--- a/symbols/pdb/Mono.Cecil.Pdb/SymWriter.cs
+++ b/symbols/pdb/Mono.Cecil.Pdb/SymWriter.cs
@@ -79,10 +79,12 @@ namespace Mono.Cecil.Pdb
public void Close ()
{
writer.Close ();
+#pragma warning disable CA1416 // Validate platform compatibility
Marshal.ReleaseComObject (writer);
foreach (var document in documents)
Marshal.ReleaseComObject (document);
+#pragma warning restore CA1416 // Validate platform compatibility
}
public void CloseMethod ()
diff --git a/symbols/pdb/Test/Mono.Cecil.Pdb.Tests.csproj b/symbols/pdb/Test/Mono.Cecil.Pdb.Tests.csproj
index 8baa446..be65b59 100644
--- a/symbols/pdb/Test/Mono.Cecil.Pdb.Tests.csproj
+++ b/symbols/pdb/Test/Mono.Cecil.Pdb.Tests.csproj
@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="Current">
<Import Project="..\..\..\Mono.Cecil.Tests.props" />
<PropertyGroup>
- <TargetFrameworks>netcoreapp3.1;net40</TargetFrameworks>
+ <TargetFramework>$(NetCoreAppTestsCurrent)</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\Mono.Cecil.csproj">
@@ -20,4 +20,9 @@
<ItemGroup>
<None Include="Resources\**\*" />
</ItemGroup>
+ <ItemGroup>
+ <RuntimeHostConfigurationOption Include="Mono.Cecil.ResourcesDirectory">
+ <Value>$(MSBuildThisFileDirectory)Resources</Value>
+ </RuntimeHostConfigurationOption>
+ </ItemGroup>
</Project>