diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json
index 72af997c0c25..a42011c11afe 100644
--- a/.config/dotnet-tools.json
+++ b/.config/dotnet-tools.json
@@ -15,7 +15,7 @@
]
},
"microsoft.dotnet.xharness.cli": {
- "version": "8.0.0-prerelease.24060.1",
+ "version": "8.0.0-prerelease.24229.2",
"commands": [
"xharness"
]
diff --git a/NuGet.config b/NuGet.config
index a681bc250bc9..b2329569aa4b 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -9,7 +9,7 @@
-
+
diff --git a/THIRD-PARTY-NOTICES.TXT b/THIRD-PARTY-NOTICES.TXT
index 4b40333f72ce..9b4e777ca47a 100644
--- a/THIRD-PARTY-NOTICES.TXT
+++ b/THIRD-PARTY-NOTICES.TXT
@@ -73,7 +73,7 @@ https://github.com/madler/zlib
https://zlib.net/zlib_license.html
/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.2.13, October 13th, 2022
+ version 1.3.1, January 22nd, 2024
Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
diff --git a/eng/Publishing.props b/eng/Publishing.props
index 920e79cbbd2f..8b796225f827 100644
--- a/eng/Publishing.props
+++ b/eng/Publishing.props
@@ -1,6 +1,7 @@
-
+
- 3
+ true
-
\ No newline at end of file
+
+
diff --git a/eng/SourceBuildPrebuiltBaseline.xml b/eng/SourceBuildPrebuiltBaseline.xml
index 458b2d756cba..81041ed6ed63 100644
--- a/eng/SourceBuildPrebuiltBaseline.xml
+++ b/eng/SourceBuildPrebuiltBaseline.xml
@@ -28,5 +28,11 @@
+
+
+
+
+
+
-
\ No newline at end of file
+
diff --git a/eng/Subsets.props b/eng/Subsets.props
index eb4151f3a185..c27a28412773 100644
--- a/eng/Subsets.props
+++ b/eng/Subsets.props
@@ -342,7 +342,7 @@
-
+
@@ -504,9 +504,10 @@
-
+
+
diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index b54cff26d4cf..b3b3b4e6f82e 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -90,30 +90,30 @@
45dd3a73dd5b64b010c4251303b3664bb30df029
-
+
https://github.com/dotnet/emsdk
- 9a29abdd764a4de0f253ed368871877a47725247
+ a64772f521c578bc9925578b1384d3a08a02d31d
-
+
https://github.com/dotnet/emsdk
- 9a29abdd764a4de0f253ed368871877a47725247
+ a64772f521c578bc9925578b1384d3a08a02d31d
-
+
https://github.com/dotnet/source-build-reference-packages
- 453a37ef7ae6c335cd49b3b9ab7713c87faeb265
+ 6ed73280a6d70f7e7ac39c86f2abe8c10983f0bb
-
+
https://github.com/dotnet/source-build-externals
- 83274d94c7e2ff21081b0d75ecbec2da2241f831
+ 4f2151df120194f0268944f1b723c14820738fc8
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
@@ -121,121 +121,121 @@
73f0850939d96131c28cf6ea6ee5aacb4da0083a
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
https://github.com/dotnet/llvm-project
@@ -322,21 +322,21 @@
https://github.com/dotnet/runtime
edbd5c769a19798b6955050baccf99e6797d3208
-
+
https://github.com/dotnet/xharness
- a417169d3ba558fd6daa522f04e686574bbce520
+ aacfb6328fdef17e572617bbb551431bb9cb1ff2
-
+
https://github.com/dotnet/xharness
- a417169d3ba558fd6daa522f04e686574bbce520
+ aacfb6328fdef17e572617bbb551431bb9cb1ff2
-
+
https://github.com/dotnet/xharness
- a417169d3ba558fd6daa522f04e686574bbce520
+ aacfb6328fdef17e572617bbb551431bb9cb1ff2
-
+
https://github.com/dotnet/arcade
- da98edc4c3ea539f109ea320672136ceb32591a7
+ e6f70c7dd528f05cd28cec2a179d58c22e91d9ac
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
@@ -354,13 +354,13 @@
https://dev.azure.com/dnceng/internal/_git/dotnet-optimization
67613417f5e1af250e6ddfba79f8f2885d8e90fb
-
+
https://github.com/dotnet/hotreload-utils
- bc857c64c5c5f1fc73048261e8f471c3310224d2
+ c804541158619aae93105f54698ca7f149d28232
-
+
https://github.com/dotnet/runtime-assets
- ca6c46012f68934198ce0d303196c3ae179230f5
+ 20ef600733c107d19f57de4955dfb025d39b99e3
https://github.com/dotnet/roslyn
@@ -410,5 +410,14 @@
https://github.com/dotnet/installer
46a7370763921ded24dcb70c585ee97883c615d4
+
+ https://github.com/dotnet/msbuild
+ 195e7f5a3a8e51c37d83cd9e54cb99dc3fc69c22
+
+
+ https://github.com/dotnet/msbuild
+ 195e7f5a3a8e51c37d83cd9e54cb99dc3fc69c22
+
+
diff --git a/eng/Versions.props b/eng/Versions.props
index 00d310742cb1..7528b6bfbf30 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -1,14 +1,14 @@
- 8.0.3
+ 8.0.7
8
0
- 3
+ 7
8.0.100
- 7.0.$([MSBuild]::Add($(PatchVersion),14))
- 6.0.$([MSBuild]::Add($([System.Version]::Parse('$(PackageVersionNet7)').Build),11))
+ 7.0.20
+ 6.0.$([MSBuild]::Add($(PatchVersion),25))
servicing
@@ -87,21 +87,21 @@
8.0.100
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 2.5.1-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
- 8.0.0-beta.24113.2
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 2.5.1-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
+ 8.0.0-beta.24266.3
6.0.0-preview.1.102
@@ -143,20 +143,20 @@
4.5.0
8.0.0-rc.1.23406.6
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
- 8.0.0-beta.24108.4
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
+ 8.0.0-beta.24270.1
1.0.0-prerelease.23566.3
1.0.0-prerelease.23566.3
@@ -165,14 +165,14 @@
1.0.0-prerelease.23566.3
1.0.0-prerelease.23566.3
- 16.11.29-beta1.23404.4
+ 17.10.0-beta1.24272.1
2.0.0-beta4.23307.1
3.0.3
2.1.0
2.0.3
1.0.4-preview6.19326.1
2.0.5
- 17.3.2
+ 17.8.3
$(MicrosoftBuildVersion)
$(MicrosoftBuildVersion)
$(MicrosoftBuildVersion)
@@ -183,10 +183,10 @@
1.1.0
17.4.0-preview-20220707-01
- 8.0.0-prerelease.24060.1
- 8.0.0-prerelease.24060.1
- 8.0.0-prerelease.24060.1
- 8.0.0-alpha.0.24072.2
+ 8.0.0-prerelease.24229.2
+ 8.0.0-prerelease.24229.2
+ 8.0.0-prerelease.24229.2
+ 8.0.0-alpha.0.24271.1
2.4.2
1.0.0
2.4.5
@@ -219,7 +219,7 @@
8.0.0-rtm.23523.2
- 2.2.3
+ 2.3.5
8.0.0-alpha.1.23527.1
16.0.5-alpha.1.23566.1
@@ -240,7 +240,7 @@
Note: when the name is updated, make sure to update dependency name in eng/pipelines/common/xplat-setup.yml
like - DarcDependenciesChanged.Microsoft_NET_Workload_Emscripten_Current_Manifest-8_0_100_Transport
-->
- 8.0.3
+ 8.0.7
$(MicrosoftNETWorkloadEmscriptenCurrentManifest80100Version)
1.1.87-gba258badda
@@ -260,4 +260,12 @@
8.0.101
$(MicrosoftDotnetSdkInternalVersion)
+
+
+ true
+
diff --git a/eng/build.sh b/eng/build.sh
index 772d7ac8be82..4d0d17a22044 100755
--- a/eng/build.sh
+++ b/eng/build.sh
@@ -32,7 +32,7 @@ usage()
echo " [Default: Debug]"
echo " --os Target operating system: windows, linux, freebsd, osx, maccatalyst, tvos,"
echo " tvossimulator, ios, iossimulator, android, browser, wasi, netbsd, illumos, solaris"
- echo " linux-musl, linux-bionic or haiku."
+ echo " linux-musl, linux-bionic, tizen, or haiku."
echo " [Default: Your machine's OS.]"
echo " --outputrid Optional argument that overrides the target rid name."
echo " --projects Project or solution file(s) to build."
diff --git a/eng/collect_vsinfo.ps1 b/eng/collect_vsinfo.ps1
new file mode 100644
index 000000000000..e2178fe4b24f
--- /dev/null
+++ b/eng/collect_vsinfo.ps1
@@ -0,0 +1,65 @@
+<#
+.PARAMETER ArchiveRunName
+Name of the run for vs logs
+
+.NOTES
+Returns 0 if succeeds, 1 otherwise
+#>
+[CmdletBinding(PositionalBinding=$false)]
+Param (
+ [Parameter(Mandatory=$True)]
+ [string] $ArchiveRunName
+)
+
+. $PSScriptRoot/common/tools.ps1
+
+$ProgressPreference = "SilentlyContinue"
+$LogDir = Join-Path $LogDir $ArchiveRunName
+mkdir $LogDir
+
+$vscollect_uri="http://aka.ms/vscollect.exe"
+$vscollect="$env:TEMP\vscollect.exe"
+
+if (-not (Test-Path $vscollect)) {
+ Retry({
+ Write-Host "GET $vscollect_uri"
+ Invoke-WebRequest $vscollect_uri -OutFile $vscollect -UseBasicParsing
+ })
+
+ if (-not (Test-Path $vscollect)) {
+ Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Unable to download vscollect."
+ exit 1
+ }
+}
+
+&"$vscollect"
+Move-Item $env:TEMP\vslogs.zip "$LogDir"
+
+$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
+if (-not (Test-Path -Path "$vswhere" -PathType Leaf))
+{
+ Write-Error "Couldn't locate vswhere at $vswhere"
+ exit 1
+}
+
+&"$vswhere" -all -prerelease -products * | Tee-Object -FilePath "$LogDir\vs_where.log"
+
+$vsdir = &"$vswhere" -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath
+
+if (-not (Test-Path $vsdir))
+{
+ $procDumpDir = Join-Path $ToolsDir "procdump"
+ $procDumpToolPath = Join-Path $procDumpDir "procdump.exe"
+ $procdump_uri = "https://download.sysinternals.com/files/Procdump.zip"
+
+ if (-not (Test-Path $procDumpToolPath)) {
+ Retry({
+ Write-Host "GET $procdump_uri"
+ Invoke-WebRequest $procdump_uri -OutFile "$TempDir\Procdump.zip" -UseBasicParsing
+ })
+
+ Expand-Archive -Path "$TempDir\Procdump.zip" $procDumpDir
+ }
+
+ &"$procDumpToolPath" -ma -accepteula VSIXAutoUpdate.exe "$LogDir"
+}
diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1
index 6c65e81925f2..efa2fd72bfaa 100644
--- a/eng/common/SetupNugetSources.ps1
+++ b/eng/common/SetupNugetSources.ps1
@@ -35,7 +35,7 @@ Set-StrictMode -Version 2.0
. $PSScriptRoot\tools.ps1
# Add source entry to PackageSources
-function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $Password) {
+function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Username, $pwd) {
$packageSource = $sources.SelectSingleNode("add[@key='$SourceName']")
if ($packageSource -eq $null)
@@ -48,12 +48,11 @@ function AddPackageSource($sources, $SourceName, $SourceEndPoint, $creds, $Usern
else {
Write-Host "Package source $SourceName already present."
}
-
- AddCredential -Creds $creds -Source $SourceName -Username $Username -Password $Password
+ AddCredential -Creds $creds -Source $SourceName -Username $Username -pwd $pwd
}
# Add a credential node for the specified source
-function AddCredential($creds, $source, $username, $password) {
+function AddCredential($creds, $source, $username, $pwd) {
# Looks for credential configuration for the given SourceName. Create it if none is found.
$sourceElement = $creds.SelectSingleNode($Source)
if ($sourceElement -eq $null)
@@ -82,17 +81,18 @@ function AddCredential($creds, $source, $username, $password) {
$passwordElement.SetAttribute("key", "ClearTextPassword")
$sourceElement.AppendChild($passwordElement) | Out-Null
}
- $passwordElement.SetAttribute("value", $Password)
+
+ $passwordElement.SetAttribute("value", $pwd)
}
-function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $Password) {
+function InsertMaestroPrivateFeedCredentials($Sources, $Creds, $Username, $pwd) {
$maestroPrivateSources = $Sources.SelectNodes("add[contains(@key,'darc-int')]")
Write-Host "Inserting credentials for $($maestroPrivateSources.Count) Maestro's private feeds."
ForEach ($PackageSource in $maestroPrivateSources) {
Write-Host "`tInserting credential for Maestro's feed:" $PackageSource.Key
- AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -Password $Password
+ AddCredential -Creds $creds -Source $PackageSource.Key -Username $Username -pwd $pwd
}
}
@@ -144,13 +144,13 @@ if ($disabledSources -ne $null) {
$userName = "dn-bot"
# Insert credential nodes for Maestro's private feeds
-InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -Password $Password
+InsertMaestroPrivateFeedCredentials -Sources $sources -Creds $creds -Username $userName -pwd $Password
# 3.1 uses a different feed url format so it's handled differently here
$dotnet31Source = $sources.SelectSingleNode("add[@key='dotnet3.1']")
if ($dotnet31Source -ne $null) {
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
- AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "dotnet3.1-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/_packaging/dotnet3.1-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
}
$dotnetVersions = @('5','6','7','8')
@@ -159,9 +159,9 @@ foreach ($dotnetVersion in $dotnetVersions) {
$feedPrefix = "dotnet" + $dotnetVersion;
$dotnetSource = $sources.SelectSingleNode("add[@key='$feedPrefix']")
if ($dotnetSource -ne $null) {
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -Password $Password
- AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -Password $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal/nuget/v2" -Creds $creds -Username $userName -pwd $Password
+ AddPackageSource -Sources $sources -SourceName "$feedPrefix-internal-transport" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/$feedPrefix-internal-transport/nuget/v2" -Creds $creds -Username $userName -pwd $Password
}
}
-$doc.Save($filename)
+$doc.Save($filename)
\ No newline at end of file
diff --git a/eng/common/native/init-compiler.sh b/eng/common/native/init-compiler.sh
index f5c1ec7eafeb..2d5660642b8d 100644
--- a/eng/common/native/init-compiler.sh
+++ b/eng/common/native/init-compiler.sh
@@ -63,7 +63,7 @@ if [ -z "$CLR_CC" ]; then
# Set default versions
if [ -z "$majorVersion" ]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
- if [ "$compiler" = "clang" ]; then versions="17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
+ if [ "$compiler" = "clang" ]; then versions="18 17 16 15 14 13 12 11 10 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5"
elif [ "$compiler" = "gcc" ]; then versions="13 12 11 10 9 8 7 6 5 4.9"; fi
for version in $versions; do
diff --git a/eng/common/templates-official/job/job.yml b/eng/common/templates-official/job/job.yml
new file mode 100644
index 000000000000..1f035fee73f4
--- /dev/null
+++ b/eng/common/templates-official/job/job.yml
@@ -0,0 +1,264 @@
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+parameters:
+# Job schema parameters - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ cancelTimeoutInMinutes: ''
+ condition: ''
+ container: ''
+ continueOnError: false
+ dependsOn: ''
+ displayName: ''
+ pool: ''
+ steps: []
+ strategy: ''
+ timeoutInMinutes: ''
+ variables: []
+ workspace: ''
+ templateContext: ''
+
+# Job base template specific parameters
+ # See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
+ artifacts: ''
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishBuildAssets: false
+ enablePublishTestResults: false
+ enablePublishUsingPipelines: false
+ enableBuildRetry: false
+ disableComponentGovernance: ''
+ componentGovernanceIgnoreDirectories: ''
+ mergeTestResults: false
+ testRunTitle: ''
+ testResultsFormat: ''
+ name: ''
+ preSteps: []
+ runAsPublic: false
+# Sbom related params
+ enableSbom: true
+ PackageVersion: 7.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+
+jobs:
+- job: ${{ parameters.name }}
+
+ ${{ if ne(parameters.cancelTimeoutInMinutes, '') }}:
+ cancelTimeoutInMinutes: ${{ parameters.cancelTimeoutInMinutes }}
+
+ ${{ if ne(parameters.condition, '') }}:
+ condition: ${{ parameters.condition }}
+
+ ${{ if ne(parameters.container, '') }}:
+ container: ${{ parameters.container }}
+
+ ${{ if ne(parameters.continueOnError, '') }}:
+ continueOnError: ${{ parameters.continueOnError }}
+
+ ${{ if ne(parameters.dependsOn, '') }}:
+ dependsOn: ${{ parameters.dependsOn }}
+
+ ${{ if ne(parameters.displayName, '') }}:
+ displayName: ${{ parameters.displayName }}
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+
+ ${{ if ne(parameters.strategy, '') }}:
+ strategy: ${{ parameters.strategy }}
+
+ ${{ if ne(parameters.timeoutInMinutes, '') }}:
+ timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
+ variables:
+ - ${{ if ne(parameters.enableTelemetry, 'false') }}:
+ - name: DOTNET_CLI_TELEMETRY_PROFILE
+ value: '$(Build.Repository.Uri)'
+ - ${{ if eq(parameters.enableRichCodeNavigation, 'true') }}:
+ - name: EnableRichCodeNavigation
+ value: 'true'
+ # Retry signature validation up to three times, waiting 2 seconds between attempts.
+ # See https://learn.microsoft.com/en-us/nuget/reference/errors-and-warnings/nu3028#retry-untrusted-root-failures
+ - name: NUGET_EXPERIMENTAL_CHAIN_BUILD_RETRY_POLICY
+ value: 3,2000
+ - ${{ each variable in parameters.variables }}:
+ # handle name-value variable syntax
+ # example:
+ # - name: [key]
+ # value: [value]
+ - ${{ if ne(variable.name, '') }}:
+ - name: ${{ variable.name }}
+ value: ${{ variable.value }}
+
+ # handle variable groups
+ - ${{ if ne(variable.group, '') }}:
+ - group: ${{ variable.group }}
+
+ # handle template variable syntax
+ # example:
+ # - template: path/to/template.yml
+ # parameters:
+ # [key]: [value]
+ - ${{ if ne(variable.template, '') }}:
+ - template: ${{ variable.template }}
+ ${{ if ne(variable.parameters, '') }}:
+ parameters: ${{ variable.parameters }}
+
+ # handle key-value variable syntax.
+ # example:
+ # - [key]: [value]
+ - ${{ if and(eq(variable.name, ''), eq(variable.group, ''), eq(variable.template, '')) }}:
+ - ${{ each pair in variable }}:
+ - name: ${{ pair.key }}
+ value: ${{ pair.value }}
+
+ # DotNet-HelixApi-Access provides 'HelixApiAccessToken' for internal builds
+ - ${{ if and(eq(parameters.enableTelemetry, 'true'), eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: DotNet-HelixApi-Access
+
+ ${{ if ne(parameters.workspace, '') }}:
+ workspace: ${{ parameters.workspace }}
+
+ steps:
+ - ${{ if ne(parameters.preSteps, '') }}:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - task: MicroBuildSigningPlugin@4
+ displayName: Install MicroBuild plugin
+ inputs:
+ signType: $(_SignType)
+ zipSources: false
+ feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json
+ env:
+ TeamName: $(_TeamName)
+ MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ continueOnError: ${{ parameters.continueOnError }}
+ condition: and(succeeded(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), eq(variables['System.TeamProject'], 'internal')) }}:
+ - task: NuGetAuthenticate@1
+
+ - ${{ if and(ne(parameters.artifacts.download, 'false'), ne(parameters.artifacts.download, '')) }}:
+ - task: DownloadPipelineArtifact@2
+ inputs:
+ buildType: current
+ artifactName: ${{ coalesce(parameters.artifacts.download.name, 'Artifacts_$(Agent.OS)_$(_BuildConfig)') }}
+ targetPath: ${{ coalesce(parameters.artifacts.download.path, 'artifacts') }}
+ itemPattern: ${{ coalesce(parameters.artifacts.download.pattern, '**') }}
+
+ - ${{ each step in parameters.steps }}:
+ - ${{ step }}
+
+ - ${{ if eq(parameters.enableRichCodeNavigation, true) }}:
+ - task: RichCodeNavIndexer@0
+ displayName: RichCodeNav Upload
+ inputs:
+ languages: ${{ coalesce(parameters.richCodeNavigationLanguage, 'csharp') }}
+ environment: ${{ coalesce(parameters.richCodeNavigationEnvironment, 'production') }}
+ richNavLogOutputDirectory: $(Build.SourcesDirectory)/artifacts/bin
+ uploadRichNavArtifacts: ${{ coalesce(parameters.richCodeNavigationUploadArtifacts, false) }}
+ continueOnError: true
+
+ - template: /eng/common/templates-official/steps/component-governance.yml
+ parameters:
+ ${{ if eq(parameters.disableComponentGovernance, '') }}:
+ ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.runAsPublic, 'false'), or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/dotnet/'), startsWith(variables['Build.SourceBranch'], 'refs/heads/microsoft/'), eq(variables['Build.SourceBranch'], 'refs/heads/main'))) }}:
+ disableComponentGovernance: false
+ ${{ else }}:
+ disableComponentGovernance: true
+ ${{ else }}:
+ disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
+ componentGovernanceIgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableMicrobuild, 'true') }}:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: MicroBuildCleanup@1
+ displayName: Execute Microbuild cleanup tasks
+ condition: and(always(), in(variables['_SignType'], 'real', 'test'), eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ env:
+ TeamName: $(_TeamName)
+
+ - ${{ if ne(parameters.artifacts.publish, '') }}:
+ - ${{ if and(ne(parameters.artifacts.publish.artifacts, 'false'), ne(parameters.artifacts.publish.artifacts, '')) }}:
+ - task: CopyFiles@2
+ displayName: Gather binaries for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/bin'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/bin'
+ - task: CopyFiles@2
+ displayName: Gather packages for publish to artifacts
+ inputs:
+ SourceFolder: 'artifacts/packages'
+ Contents: '**'
+ TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/packages'
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish pipeline artifacts
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.artifacts.publish.artifacts.name , 'Artifacts_$(Agent.Os)_$(_BuildConfig)') }}
+ continueOnError: true
+ condition: always()
+ - ${{ if and(ne(parameters.artifacts.publish.logs, 'false'), ne(parameters.artifacts.publish.logs, '')) }}:
+ - task: 1ES.PublishPipelineArtifact@1
+ inputs:
+ targetPath: 'artifacts/log'
+ artifactName: ${{ coalesce(parameters.artifacts.publish.logs.name, 'Logs_Build_$(Agent.Os)_$(_BuildConfig)') }}
+ displayName: 'Publish logs'
+ continueOnError: true
+ condition: always()
+
+ - ${{ if ne(parameters.enablePublishBuildArtifacts, 'false') }}:
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/$(_BuildConfig)'
+ PublishLocation: Container
+ ArtifactName: ${{ coalesce(parameters.enablePublishBuildArtifacts.artifactName, '$(Agent.Os)_$(Agent.JobName)' ) }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'xunit')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish XUnit Test Results
+ inputs:
+ testResultsFormat: 'xUnit'
+ testResultsFiles: '*.xml'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-xunit
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+ - ${{ if or(and(eq(parameters.enablePublishTestResults, 'true'), eq(parameters.testResultsFormat, '')), eq(parameters.testResultsFormat, 'vstest')) }}:
+ - task: PublishTestResults@2
+ displayName: Publish TRX Test Results
+ inputs:
+ testResultsFormat: 'VSTest'
+ testResultsFiles: '*.trx'
+ searchFolder: '$(Build.SourcesDirectory)/artifacts/TestResults/$(_BuildConfig)'
+ testRunTitle: ${{ coalesce(parameters.testRunTitle, parameters.name, '$(System.JobName)') }}-trx
+ mergeTestResults: ${{ parameters.mergeTestResults }}
+ continueOnError: true
+ condition: always()
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest'), eq(parameters.enableSbom, 'true')) }}:
+ - template: /eng/common/templates-official/steps/generate-sbom.yml
+ parameters:
+ PackageVersion: ${{ parameters.packageVersion}}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ IgnoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
+
+ - ${{ if eq(parameters.enableBuildRetry, 'true') }}:
+ - task: 1ES.PublishPipelineArtifact@1
+ inputs:
+ targetPath: '$(Build.SourcesDirectory)\eng\common\BuildConfiguration'
+ artifactName: 'BuildConfiguration'
+ displayName: 'Publish build retry configuration'
+ continueOnError: true
\ No newline at end of file
diff --git a/eng/common/templates-official/job/onelocbuild.yml b/eng/common/templates-official/job/onelocbuild.yml
new file mode 100644
index 000000000000..52b4d05d3f8d
--- /dev/null
+++ b/eng/common/templates-official/job/onelocbuild.yml
@@ -0,0 +1,112 @@
+parameters:
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: ''
+
+ CeapexPat: $(dn-bot-ceapex-package-r) # PAT for the loc AzDO instance https://dev.azure.com/ceapex
+ GithubPat: $(BotAccount-dotnet-bot-repo-PAT)
+
+ SourcesDirectory: $(Build.SourcesDirectory)
+ CreatePr: true
+ AutoCompletePr: false
+ ReusePr: true
+ UseLfLineEndings: true
+ UseCheckedInLocProjectJson: false
+ SkipLocProjectJsonGeneration: false
+ LanguageSet: VS_Main_Languages
+ LclSource: lclFilesInRepo
+ LclPackageId: ''
+ RepoType: gitHub
+ GitHubOrg: dotnet
+ MirrorRepo: ''
+ MirrorBranch: main
+ condition: ''
+ JobNameSuffix: ''
+
+jobs:
+- job: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ dependsOn: ${{ parameters.dependsOn }}
+
+ displayName: OneLocBuild${{ parameters.JobNameSuffix }}
+
+ variables:
+ - group: OneLocBuildVariables # Contains the CeapexPat and GithubPat
+ - name: _GenerateLocProjectArguments
+ value: -SourcesDirectory ${{ parameters.SourcesDirectory }}
+ -LanguageSet "${{ parameters.LanguageSet }}"
+ -CreateNeutralXlfs
+ - ${{ if eq(parameters.UseCheckedInLocProjectJson, 'true') }}:
+ - name: _GenerateLocProjectArguments
+ value: ${{ variables._GenerateLocProjectArguments }} -UseCheckedInLocProjectJson
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - ${{ if ne(parameters.SkipLocProjectJsonGeneration, 'true') }}:
+ - task: Powershell@2
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/generate-locproject.ps1
+ arguments: $(_GenerateLocProjectArguments)
+ displayName: Generate LocProject.json
+ condition: ${{ parameters.condition }}
+
+ - task: OneLocBuild@2
+ displayName: OneLocBuild
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ inputs:
+ locProj: eng/Localize/LocProject.json
+ outDir: $(Build.ArtifactStagingDirectory)
+ lclSource: ${{ parameters.LclSource }}
+ lclPackageId: ${{ parameters.LclPackageId }}
+ isCreatePrSelected: ${{ parameters.CreatePr }}
+ isAutoCompletePrSelected: ${{ parameters.AutoCompletePr }}
+ ${{ if eq(parameters.CreatePr, true) }}:
+ isUseLfLineEndingsSelected: ${{ parameters.UseLfLineEndings }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ isShouldReusePrSelected: ${{ parameters.ReusePr }}
+ packageSourceAuth: patAuth
+ patVariable: ${{ parameters.CeapexPat }}
+ ${{ if eq(parameters.RepoType, 'gitHub') }}:
+ repoType: ${{ parameters.RepoType }}
+ gitHubPatVariable: "${{ parameters.GithubPat }}"
+ ${{ if ne(parameters.MirrorRepo, '') }}:
+ isMirrorRepoSelected: true
+ gitHubOrganization: ${{ parameters.GitHubOrg }}
+ mirrorRepo: ${{ parameters.MirrorRepo }}
+ mirrorBranch: ${{ parameters.MirrorBranch }}
+ condition: ${{ parameters.condition }}
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Localization Files
+ inputs:
+ PathtoPublish: '$(Build.ArtifactStagingDirectory)/loc'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }}
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish LocProject.json
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/Localize/'
+ PublishLocation: Container
+ ArtifactName: Loc
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/common/templates-official/job/publish-build-assets.yml b/eng/common/templates-official/job/publish-build-assets.yml
new file mode 100644
index 000000000000..589ac80a18b7
--- /dev/null
+++ b/eng/common/templates-official/job/publish-build-assets.yml
@@ -0,0 +1,155 @@
+parameters:
+ configuration: 'Debug'
+
+ # Optional: condition for the job to run
+ condition: ''
+
+ # Optional: 'true' if future jobs should run even if this job fails
+ continueOnError: false
+
+ # Optional: dependencies of the job
+ dependsOn: ''
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: A defined YAML pool - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#pool
+ pool: {}
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishUsingPipelines: false
+
+ # Optional: whether the build's artifacts will be published using release pipelines or direct feed publishing
+ publishAssetsImmediately: false
+
+ artifactsPublishingAdditionalParameters: ''
+
+ signingValidationAdditionalParameters: ''
+
+jobs:
+- job: Asset_Registry_Publish
+
+ dependsOn: ${{ parameters.dependsOn }}
+ timeoutInMinutes: 150
+
+ ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ displayName: Publish Assets
+ ${{ else }}:
+ displayName: Publish to Build Asset Registry
+
+ variables:
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - group: Publish-Build-Assets
+ - group: AzureDevOps-Artifact-Feeds-Pats
+ - name: runCodesignValidationInjection
+ value: false
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/templates-official/post-build/common-variables.yml
+
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download artifact
+ inputs:
+ artifactName: AssetManifests
+ downloadPath: '$(Build.StagingDirectory)/Download'
+ checkDownloadedFiles: true
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Build Assets
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task PublishBuildAssets -restore -msbuildEngine dotnet
+ /p:ManifestsPath='$(Build.StagingDirectory)/Download/AssetManifests'
+ /p:BuildAssetRegistryToken=$(MaestroAccessToken)
+ /p:MaestroApiEndpoint=https://maestro-prod.westus2.cloudapp.azure.com
+ /p:PublishUsingPipelines=${{ parameters.publishUsingPipelines }}
+ /p:OfficialBuildId=$(Build.BuildNumber)
+ condition: ${{ parameters.condition }}
+ continueOnError: ${{ parameters.continueOnError }}
+
+ - task: powershell@2
+ displayName: Create ReleaseConfigs Artifact
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -Path "$(Build.StagingDirectory)/ReleaseConfigs" -ItemType Directory -Force
+ $filePath = "$(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt"
+ Add-Content -Path $filePath -Value $(BARBuildId)
+ Add-Content -Path $filePath -Value "$(DefaultChannels)"
+ Add-Content -Path $filePath -Value $(IsStableBuild)
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish ReleaseConfigs Artifact
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/ReleaseConfigs'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - task: powershell@2
+ displayName: Check if SymbolPublishingExclusionsFile.txt exists
+ inputs:
+ targetType: inline
+ script: |
+ $symbolExclusionfile = "$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt"
+ if(Test-Path -Path $symbolExclusionfile)
+ {
+ Write-Host "SymbolExclusionFile exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]true"
+ }
+ else{
+ Write-Host "Symbols Exclusion file does not exists"
+ Write-Host "##vso[task.setvariable variable=SymbolExclusionFile]false"
+ }
+
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish SymbolPublishingExclusionsFile Artifact
+ condition: eq(variables['SymbolExclusionFile'], 'true')
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/eng/SymbolPublishingExclusionsFile.txt'
+ PublishLocation: Container
+ ArtifactName: ReleaseConfigs
+
+ - ${{ if eq(parameters.publishAssetsImmediately, 'true') }}:
+ - template: /eng/common/templates-official/post-build/setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion 3
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
+
+ - ${{ if eq(parameters.enablePublishBuildArtifacts, 'true') }}:
+ - template: /eng/common/templates-official/steps/publish-logs.yml
+ parameters:
+ JobLabel: 'Publish_Artifacts_Logs'
diff --git a/eng/common/templates-official/job/source-build.yml b/eng/common/templates-official/job/source-build.yml
new file mode 100644
index 000000000000..f193dfbe2366
--- /dev/null
+++ b/eng/common/templates-official/job/source-build.yml
@@ -0,0 +1,67 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. The template produces a server job with a
+ # default ID 'Source_Build_Complete' to put in a dependency list if necessary.
+
+ # Specifies the prefix for source-build jobs added to pipeline. Use this if disambiguation needed.
+ jobNamePrefix: 'Source_Build'
+
+ # Defines the platform on which to run the job. By default, a linux-x64 machine, suitable for
+ # managed-only repositories. This is an object with these properties:
+ #
+ # name: ''
+ # The name of the job. This is included in the job ID.
+ # targetRID: ''
+ # The name of the target RID to use, instead of the one auto-detected by Arcade.
+ # nonPortable: false
+ # Enables non-portable mode. This means a more specific RID (e.g. fedora.32-x64 rather than
+ # linux-x64), and compiling against distro-provided packages rather than portable ones.
+ # skipPublishValidation: false
+ # Disables publishing validation. By default, a check is performed to ensure no packages are
+ # published by source-build.
+ # container: ''
+ # A container to use. Runs in docker.
+ # pool: {}
+ # A pool to use. Runs directly on an agent.
+ # buildScript: ''
+ # Specifies the build script to invoke to perform the build in the repo. The default
+ # './build.sh' should work for typical Arcade repositories, but this is customizable for
+ # difficult situations.
+ # jobProperties: {}
+ # A list of job properties to inject at the top level, for potential extensibility beyond
+ # container and pool.
+ platform: {}
+
+jobs:
+- job: ${{ parameters.jobNamePrefix }}_${{ parameters.platform.name }}
+ displayName: Source-Build (${{ parameters.platform.name }})
+
+ ${{ each property in parameters.platform.jobProperties }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ ${{ if ne(parameters.platform.container, '') }}:
+ container: ${{ parameters.platform.container }}
+
+ ${{ if eq(parameters.platform.pool, '') }}:
+ # The default VM host AzDO pool. This should be capable of running Docker containers: almost all
+ # source-build builds run in Docker, including the default managed platform.
+ # /eng/common/templates-official/variables/pool-providers.yml can't be used here (some customers declare variables already), so duplicate its logic
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')]
+ demands: ImageOverride -equals Build.Ubuntu.1804.Amd64.Open
+
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')]
+ image: 1es-mariner-2
+ os: linux
+
+ ${{ if ne(parameters.platform.pool, '') }}:
+ pool: ${{ parameters.platform.pool }}
+
+ workspace:
+ clean: all
+
+ steps:
+ - template: /eng/common/templates-official/steps/source-build.yml
+ parameters:
+ platform: ${{ parameters.platform }}
diff --git a/eng/common/templates-official/job/source-index-stage1.yml b/eng/common/templates-official/job/source-index-stage1.yml
new file mode 100644
index 000000000000..43ee0c202fc7
--- /dev/null
+++ b/eng/common/templates-official/job/source-index-stage1.yml
@@ -0,0 +1,85 @@
+parameters:
+ runAsPublic: false
+ sourceIndexUploadPackageVersion: 2.0.0-20240502.12
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
+ sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
+ sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
+ preSteps: []
+ binlogPath: artifacts/log/Debug/Build.binlog
+ condition: ''
+ dependsOn: ''
+ pool: ''
+
+jobs:
+- job: SourceIndexStage1
+ dependsOn: ${{ parameters.dependsOn }}
+ condition: ${{ parameters.condition }}
+ variables:
+ - name: SourceIndexUploadPackageVersion
+ value: ${{ parameters.sourceIndexUploadPackageVersion }}
+ - name: SourceIndexProcessBinlogPackageVersion
+ value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
+ - name: SourceIndexPackageSource
+ value: ${{ parameters.sourceIndexPackageSource }}
+ - name: BinlogPath
+ value: ${{ parameters.binlogPath }}
+ - template: /eng/common/templates/variables/pool-providers.yml
+
+ ${{ if ne(parameters.pool, '') }}:
+ pool: ${{ parameters.pool }}
+ ${{ if eq(parameters.pool, '') }}:
+ pool:
+ ${{ if eq(variables['System.TeamProject'], 'public') }}:
+ name: $(DncEngPublicBuildPool)
+ demands: ImageOverride -equals windows.vs2019.amd64.open
+ ${{ if eq(variables['System.TeamProject'], 'internal') }}:
+ name: $(DncEngInternalBuildPool)
+ demands: ImageOverride -equals windows.vs2019.amd64
+
+ steps:
+ - ${{ each preStep in parameters.preSteps }}:
+ - ${{ preStep }}
+
+ - task: UseDotNet@2
+ displayName: Use .NET 8 SDK
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: $(Agent.TempDirectory)/dotnet
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: |
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ displayName: Download Tools
+ # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
+ workingDirectory: $(Agent.TempDirectory)
+
+ - script: ${{ parameters.sourceIndexBuildCommand }}
+ displayName: Build Repository
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/BinLogToSln -i $(BinlogPath) -r $(Build.SourcesDirectory) -n $(Build.Repository.Name) -o .source-index/stage1output
+ displayName: Process Binlog into indexable sln
+
+ - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - task: AzureCLI@2
+ displayName: Get stage 1 auth token
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId"
+ echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken"
+ echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId"
+
+ - script: |
+ echo "Client ID: $(ARM_CLIENT_ID)"
+ echo "ID Token: $(ARM_ID_TOKEN)"
+ echo "Tenant ID: $(ARM_TENANT_ID)"
+ az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
+ displayName: "Login to Azure"
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
+ displayName: Upload stage1 artifacts to source index
\ No newline at end of file
diff --git a/eng/common/templates-official/jobs/codeql-build.yml b/eng/common/templates-official/jobs/codeql-build.yml
new file mode 100644
index 000000000000..b68d3c2f3199
--- /dev/null
+++ b/eng/common/templates-official/jobs/codeql-build.yml
@@ -0,0 +1,31 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+
+jobs:
+- template: /eng/common/templates-official/jobs/jobs.yml
+ parameters:
+ enableMicrobuild: false
+ enablePublishBuildArtifacts: false
+ enablePublishTestResults: false
+ enablePublishBuildAssets: false
+ enablePublishUsingPipelines: false
+ enableTelemetry: true
+
+ variables:
+ - group: Publish-Build-Assets
+ # The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+ # sync with the packages.config file.
+ - name: DefaultGuardianVersion
+ value: 0.109.0
+ - name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
+ - name: GuardianVersion
+ value: ${{ coalesce(parameters.overrideGuardianVersion, '$(DefaultGuardianVersion)') }}
+
+ jobs: ${{ parameters.jobs }}
+
diff --git a/eng/common/templates-official/jobs/jobs.yml b/eng/common/templates-official/jobs/jobs.yml
new file mode 100644
index 000000000000..857a0f8ba43e
--- /dev/null
+++ b/eng/common/templates-official/jobs/jobs.yml
@@ -0,0 +1,97 @@
+parameters:
+ # See schema documentation in /Documentation/AzureDevOps/TemplateSchema.md
+ continueOnError: false
+
+ # Optional: Include PublishBuildArtifacts task
+ enablePublishBuildArtifacts: false
+
+ # Optional: Enable publishing using release pipelines
+ enablePublishUsingPipelines: false
+
+ # Optional: Enable running the source-build jobs to build repo from source
+ enableSourceBuild: false
+
+ # Optional: Parameters for source-build template.
+ # See /eng/common/templates-official/jobs/source-build.yml for options
+ sourceBuildParameters: []
+
+ graphFileGeneration:
+ # Optional: Enable generating the graph files at the end of the build
+ enabled: false
+ # Optional: Include toolset dependencies in the generated graph files
+ includeToolset: false
+
+ # Required: A collection of jobs to run - https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=vsts&tabs=schema#job
+ jobs: []
+
+ # Optional: Override automatically derived dependsOn value for "publish build assets" job
+ publishBuildAssetsDependsOn: ''
+
+ # Optional: Publish the assets as soon as the publish to BAR stage is complete, rather doing so in a separate stage.
+ publishAssetsImmediately: false
+
+ # Optional: If using publishAssetsImmediately and additional parameters are needed, can be used to send along additional parameters (normally sent to post-build.yml)
+ artifactsPublishingAdditionalParameters: ''
+ signingValidationAdditionalParameters: ''
+
+ # Optional: should run as a public build even in the internal project
+ # if 'true', the build won't run any of the internal only steps, even if it is running in non-public projects.
+ runAsPublic: false
+
+ enableSourceIndex: false
+ sourceIndexParams: {}
+
+# Internal resources (telemetry, microbuild) can only be accessed from non-public projects,
+# and some (Microbuild) should only be applied to non-PR cases for internal builds.
+
+jobs:
+- ${{ each job in parameters.jobs }}:
+ - template: ../job/job.yml
+ parameters:
+ # pass along parameters
+ ${{ each parameter in parameters }}:
+ ${{ if ne(parameter.key, 'jobs') }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+ # pass along job properties
+ ${{ each property in job }}:
+ ${{ if ne(property.key, 'job') }}:
+ ${{ property.key }}: ${{ property.value }}
+
+ name: ${{ job.job }}
+
+- ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - template: /eng/common/templates-official/jobs/source-build.yml
+ parameters:
+ allCompletedJobId: Source_Build_Complete
+ ${{ each parameter in parameters.sourceBuildParameters }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if eq(parameters.enableSourceIndex, 'true') }}:
+ - template: ../job/source-index-stage1.yml
+ parameters:
+ runAsPublic: ${{ parameters.runAsPublic }}
+ ${{ each parameter in parameters.sourceIndexParams }}:
+ ${{ parameter.key }}: ${{ parameter.value }}
+
+- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - ${{ if or(eq(parameters.enablePublishBuildAssets, true), eq(parameters.artifacts.publish.manifests, 'true'), ne(parameters.artifacts.publish.manifests, '')) }}:
+ - template: ../job/publish-build-assets.yml
+ parameters:
+ continueOnError: ${{ parameters.continueOnError }}
+ dependsOn:
+ - ${{ if ne(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.publishBuildAssetsDependsOn }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.publishBuildAssetsDependsOn, '') }}:
+ - ${{ each job in parameters.jobs }}:
+ - ${{ job.job }}
+ - ${{ if eq(parameters.enableSourceBuild, true) }}:
+ - Source_Build_Complete
+
+ runAsPublic: ${{ parameters.runAsPublic }}
+ publishUsingPipelines: ${{ parameters.enablePublishUsingPipelines }}
+ publishAssetsImmediately: ${{ parameters.publishAssetsImmediately }}
+ enablePublishBuildArtifacts: ${{ parameters.enablePublishBuildArtifacts }}
+ artifactsPublishingAdditionalParameters: ${{ parameters.artifactsPublishingAdditionalParameters }}
+ signingValidationAdditionalParameters: ${{ parameters.signingValidationAdditionalParameters }}
diff --git a/eng/common/templates-official/jobs/source-build.yml b/eng/common/templates-official/jobs/source-build.yml
new file mode 100644
index 000000000000..08e5db9bb116
--- /dev/null
+++ b/eng/common/templates-official/jobs/source-build.yml
@@ -0,0 +1,46 @@
+parameters:
+ # This template adds arcade-powered source-build to CI. A job is created for each platform, as
+ # well as an optional server job that completes when all platform jobs complete.
+
+ # The name of the "join" job for all source-build platforms. If set to empty string, the job is
+ # not included. Existing repo pipelines can use this job depend on all source-build jobs
+ # completing without maintaining a separate list of every single job ID: just depend on this one
+ # server job. By default, not included. Recommended name if used: 'Source_Build_Complete'.
+ allCompletedJobId: ''
+
+ # See /eng/common/templates-official/job/source-build.yml
+ jobNamePrefix: 'Source_Build'
+
+ # This is the default platform provided by Arcade, intended for use by a managed-only repo.
+ defaultManagedPlatform:
+ name: 'Managed'
+ container: 'mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8'
+
+ # Defines the platforms on which to run build jobs. One job is created for each platform, and the
+ # object in this array is sent to the job template as 'platform'. If no platforms are specified,
+ # one job runs on 'defaultManagedPlatform'.
+ platforms: []
+
+jobs:
+
+- ${{ if ne(parameters.allCompletedJobId, '') }}:
+ - job: ${{ parameters.allCompletedJobId }}
+ displayName: Source-Build Complete
+ pool: server
+ dependsOn:
+ - ${{ each platform in parameters.platforms }}:
+ - ${{ parameters.jobNamePrefix }}_${{ platform.name }}
+ - ${{ if eq(length(parameters.platforms), 0) }}:
+ - ${{ parameters.jobNamePrefix }}_${{ parameters.defaultManagedPlatform.name }}
+
+- ${{ each platform in parameters.platforms }}:
+ - template: /eng/common/templates-official/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ platform }}
+
+- ${{ if eq(length(parameters.platforms), 0) }}:
+ - template: /eng/common/templates-official/job/source-build.yml
+ parameters:
+ jobNamePrefix: ${{ parameters.jobNamePrefix }}
+ platform: ${{ parameters.defaultManagedPlatform }}
diff --git a/eng/common/templates-official/post-build/common-variables.yml b/eng/common/templates-official/post-build/common-variables.yml
new file mode 100644
index 000000000000..c24193acfc98
--- /dev/null
+++ b/eng/common/templates-official/post-build/common-variables.yml
@@ -0,0 +1,22 @@
+variables:
+ - group: Publish-Build-Assets
+
+ # Whether the build is internal or not
+ - name: IsInternalBuild
+ value: ${{ and(ne(variables['System.TeamProject'], 'public'), contains(variables['Build.SourceBranch'], 'internal')) }}
+
+ # Default Maestro++ API Endpoint and API Version
+ - name: MaestroApiEndPoint
+ value: "https://maestro-prod.westus2.cloudapp.azure.com"
+ - name: MaestroApiAccessToken
+ value: $(MaestroAccessToken)
+ - name: MaestroApiVersion
+ value: "2020-02-20"
+
+ - name: SourceLinkCLIVersion
+ value: 3.0.0
+ - name: SymbolToolVersion
+ value: 1.0.1
+
+ - name: runCodesignValidationInjection
+ value: false
diff --git a/eng/common/templates-official/post-build/post-build.yml b/eng/common/templates-official/post-build/post-build.yml
new file mode 100644
index 000000000000..da1f40958b45
--- /dev/null
+++ b/eng/common/templates-official/post-build/post-build.yml
@@ -0,0 +1,285 @@
+parameters:
+ # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST.
+ # Publishing V1 is no longer supported
+ # Publishing V2 is no longer supported
+ # Publishing V3 is the default
+ - name: publishingInfraVersion
+ displayName: Which version of publishing should be used to promote the build definition?
+ type: number
+ default: 3
+ values:
+ - 3
+
+ - name: BARBuildId
+ displayName: BAR Build Id
+ type: number
+ default: 0
+
+ - name: PromoteToChannelIds
+ displayName: Channel to promote BARBuildId to
+ type: string
+ default: ''
+
+ - name: enableSourceLinkValidation
+ displayName: Enable SourceLink validation
+ type: boolean
+ default: false
+
+ - name: enableSigningValidation
+ displayName: Enable signing validation
+ type: boolean
+ default: true
+
+ - name: enableSymbolValidation
+ displayName: Enable symbol validation
+ type: boolean
+ default: false
+
+ - name: enableNugetValidation
+ displayName: Enable NuGet validation
+ type: boolean
+ default: true
+
+ - name: publishInstallersAndChecksums
+ displayName: Publish installers and checksums
+ type: boolean
+ default: true
+
+ - name: SDLValidationParameters
+ type: object
+ default:
+ enable: false
+ publishGdn: false
+ continueOnError: false
+ params: ''
+ artifactNames: ''
+ downloadArtifacts: true
+
+ # These parameters let the user customize the call to sdk-task.ps1 for publishing
+ # symbols & general artifacts as well as for signing validation
+ - name: symbolPublishingAdditionalParameters
+ displayName: Symbol publishing additional parameters
+ type: string
+ default: ''
+
+ - name: artifactsPublishingAdditionalParameters
+ displayName: Artifact publishing additional parameters
+ type: string
+ default: ''
+
+ - name: signingValidationAdditionalParameters
+ displayName: Signing validation additional parameters
+ type: string
+ default: ''
+
+ # Which stages should finish execution before post-build stages start
+ - name: validateDependsOn
+ type: object
+ default:
+ - build
+
+ - name: publishDependsOn
+ type: object
+ default:
+ - Validate
+
+ # Optional: Call asset publishing rather than running in a separate stage
+ - name: publishAssetsImmediately
+ type: boolean
+ default: false
+
+stages:
+- ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ - stage: Validate
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Validate Build Assets
+ variables:
+ - template: common-variables.yml
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ jobs:
+ - job:
+ displayName: NuGet Validation
+ condition: and(succeededOrFailed(), eq( ${{ parameters.enableNugetValidation }}, 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/nuget-validation.ps1
+ arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/
+ -ToolDestinationPath $(Agent.BuildDirectory)/Extract/
+
+ - job:
+ displayName: Signing Validation
+ condition: and( eq( ${{ parameters.enableSigningValidation }}, 'true'), ne( variables['PostBuildSign'], 'true'))
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Package Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: PackageArtifacts
+ checkDownloadedFiles: true
+ itemPattern: |
+ **
+ !**/Microsoft.SourceBuild.Intermediate.*.nupkg
+
+ # This is necessary whenever we want to publish/restore to an AzDO private feed
+ # Since sdk-task.ps1 tries to restore packages we need to do this authentication here
+ # otherwise it'll complain about accessing a private feed.
+ - task: NuGetAuthenticate@1
+ displayName: 'Authenticate to AzDO Feeds'
+
+ # Signing validation will optionally work with the buildmanifest file which is downloaded from
+ # Azure DevOps above.
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: eng\common\sdk-task.ps1
+ arguments: -task SigningValidation -restore -msbuildEngine vs
+ /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts'
+ /p:SignCheckExclusionsFile='$(Build.SourcesDirectory)/eng/SignCheckExclusionsFile.txt'
+ ${{ parameters.signingValidationAdditionalParameters }}
+
+ - template: ../steps/publish-logs.yml
+ parameters:
+ StageLabel: 'Validation'
+ JobLabel: 'Signing'
+ BinlogToolVersion: $(BinlogToolVersion)
+
+ - job:
+ displayName: SourceLink Validation
+ condition: eq( ${{ parameters.enableSourceLinkValidation }}, 'true')
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Blob Artifacts
+ inputs:
+ buildType: specific
+ buildVersionToDownload: specific
+ project: $(AzDOProjectName)
+ pipeline: $(AzDOPipelineId)
+ buildId: $(AzDOBuildId)
+ artifactName: BlobArtifacts
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ displayName: Validate
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/sourcelink-validation.ps1
+ arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/
+ -ExtractPath $(Agent.BuildDirectory)/Extract/
+ -GHRepoName $(Build.Repository.Name)
+ -GHCommit $(Build.SourceVersion)
+ -SourcelinkCliVersion $(SourceLinkCLIVersion)
+ continueOnError: true
+
+- ${{ if ne(parameters.publishAssetsImmediately, 'true') }}:
+ - stage: publish_using_darc
+ ${{ if or(eq(parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}:
+ dependsOn: ${{ parameters.publishDependsOn }}
+ ${{ else }}:
+ dependsOn: ${{ parameters.validateDependsOn }}
+ displayName: Publish using Darc
+ variables:
+ - template: common-variables.yml
+ - template: /eng/common/templates-official/variables/pool-providers.yml
+ jobs:
+ - job:
+ displayName: Publish Using Darc
+ timeoutInMinutes: 120
+ pool:
+ # We don't use the collection uri here because it might vary (.visualstudio.com vs. dev.azure.com)
+ ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}:
+ name: AzurePipelines-EO
+ image: 1ESPT-Windows2022
+ demands: Cmd
+ os: windows
+ # If it's not devdiv, it's dnceng
+ ${{ else }}:
+ name: NetCore1ESPool-Publishing-Internal
+ image: windows.vs2019.amd64
+ os: windows
+ steps:
+ - template: setup-maestro-vars.yml
+ parameters:
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }}
+
+ - task: NuGetAuthenticate@1
+
+ - task: PowerShell@2
+ displayName: Publish Using Darc
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/publish-using-darc.ps1
+ arguments: -BuildId $(BARBuildId)
+ -PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
+ -AzdoToken '$(publishing-dnceng-devdiv-code-r-build-re)'
+ -MaestroToken '$(MaestroApiAccessToken)'
+ -WaitPublishingFinish true
+ -ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
+ -SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/templates-official/post-build/setup-maestro-vars.yml b/eng/common/templates-official/post-build/setup-maestro-vars.yml
new file mode 100644
index 000000000000..0c87f149a4ad
--- /dev/null
+++ b/eng/common/templates-official/post-build/setup-maestro-vars.yml
@@ -0,0 +1,70 @@
+parameters:
+ BARBuildId: ''
+ PromoteToChannelIds: ''
+
+steps:
+ - ${{ if eq(coalesce(parameters.PromoteToChannelIds, 0), 0) }}:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Release Configs
+ inputs:
+ buildType: current
+ artifactName: ReleaseConfigs
+ checkDownloadedFiles: true
+
+ - task: PowerShell@2
+ name: setReleaseVars
+ displayName: Set Release Configs Vars
+ inputs:
+ targetType: inline
+ pwsh: true
+ script: |
+ try {
+ if (!$Env:PromoteToMaestroChannels -or $Env:PromoteToMaestroChannels.Trim() -eq '') {
+ $Content = Get-Content $(Build.StagingDirectory)/ReleaseConfigs/ReleaseConfigs.txt
+
+ $BarId = $Content | Select -Index 0
+ $Channels = $Content | Select -Index 1
+ $IsStableBuild = $Content | Select -Index 2
+
+ $AzureDevOpsProject = $Env:System_TeamProject
+ $AzureDevOpsBuildDefinitionId = $Env:System_DefinitionId
+ $AzureDevOpsBuildId = $Env:Build_BuildId
+ }
+ else {
+ $buildApiEndpoint = "${Env:MaestroApiEndPoint}/api/builds/${Env:BARBuildId}?api-version=${Env:MaestroApiVersion}"
+
+ $apiHeaders = New-Object 'System.Collections.Generic.Dictionary[[String],[String]]'
+ $apiHeaders.Add('Accept', 'application/json')
+ $apiHeaders.Add('Authorization',"Bearer ${Env:MAESTRO_API_TOKEN}")
+
+ $buildInfo = try { Invoke-WebRequest -Method Get -Uri $buildApiEndpoint -Headers $apiHeaders | ConvertFrom-Json } catch { Write-Host "Error: $_" }
+
+ $BarId = $Env:BARBuildId
+ $Channels = $Env:PromoteToMaestroChannels -split ","
+ $Channels = $Channels -join "]["
+ $Channels = "[$Channels]"
+
+ $IsStableBuild = $buildInfo.stable
+ $AzureDevOpsProject = $buildInfo.azureDevOpsProject
+ $AzureDevOpsBuildDefinitionId = $buildInfo.azureDevOpsBuildDefinitionId
+ $AzureDevOpsBuildId = $buildInfo.azureDevOpsBuildId
+ }
+
+ Write-Host "##vso[task.setvariable variable=BARBuildId]$BarId"
+ Write-Host "##vso[task.setvariable variable=TargetChannels]$Channels"
+ Write-Host "##vso[task.setvariable variable=IsStableBuild]$IsStableBuild"
+
+ Write-Host "##vso[task.setvariable variable=AzDOProjectName]$AzureDevOpsProject"
+ Write-Host "##vso[task.setvariable variable=AzDOPipelineId]$AzureDevOpsBuildDefinitionId"
+ Write-Host "##vso[task.setvariable variable=AzDOBuildId]$AzureDevOpsBuildId"
+ }
+ catch {
+ Write-Host $_
+ Write-Host $_.Exception
+ Write-Host $_.ScriptStackTrace
+ exit 1
+ }
+ env:
+ MAESTRO_API_TOKEN: $(MaestroApiAccessToken)
+ BARBuildId: ${{ parameters.BARBuildId }}
+ PromoteToMaestroChannels: ${{ parameters.PromoteToChannelIds }}
diff --git a/eng/common/templates-official/post-build/trigger-subscription.yml b/eng/common/templates-official/post-build/trigger-subscription.yml
new file mode 100644
index 000000000000..da669030daf6
--- /dev/null
+++ b/eng/common/templates-official/post-build/trigger-subscription.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Triggering subscriptions
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/trigger-subscriptions.ps1
+ arguments: -SourceRepo $(Build.Repository.Uri)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/add-build-to-channel.yml b/eng/common/templates-official/steps/add-build-to-channel.yml
new file mode 100644
index 000000000000..f67a210d62f3
--- /dev/null
+++ b/eng/common/templates-official/steps/add-build-to-channel.yml
@@ -0,0 +1,13 @@
+parameters:
+ ChannelId: 0
+
+steps:
+- task: PowerShell@2
+ displayName: Add Build to Channel
+ inputs:
+ filePath: $(Build.SourcesDirectory)/eng/common/post-build/add-build-to-channel.ps1
+ arguments: -BuildId $(BARBuildId)
+ -ChannelId ${{ parameters.ChannelId }}
+ -MaestroApiAccessToken $(MaestroApiAccessToken)
+ -MaestroApiEndPoint $(MaestroApiEndPoint)
+ -MaestroApiVersion $(MaestroApiVersion)
diff --git a/eng/common/templates-official/steps/build-reason.yml b/eng/common/templates-official/steps/build-reason.yml
new file mode 100644
index 000000000000..eba58109b52c
--- /dev/null
+++ b/eng/common/templates-official/steps/build-reason.yml
@@ -0,0 +1,12 @@
+# build-reason.yml
+# Description: runs steps if build.reason condition is valid. conditions is a string of valid build reasons
+# to include steps (',' separated).
+parameters:
+ conditions: ''
+ steps: []
+
+steps:
+ - ${{ if and( not(startsWith(parameters.conditions, 'not')), contains(parameters.conditions, variables['build.reason'])) }}:
+ - ${{ parameters.steps }}
+ - ${{ if and( startsWith(parameters.conditions, 'not'), not(contains(parameters.conditions, variables['build.reason']))) }}:
+ - ${{ parameters.steps }}
diff --git a/eng/common/templates-official/steps/component-governance.yml b/eng/common/templates-official/steps/component-governance.yml
new file mode 100644
index 000000000000..cbba0596709d
--- /dev/null
+++ b/eng/common/templates-official/steps/component-governance.yml
@@ -0,0 +1,13 @@
+parameters:
+ disableComponentGovernance: false
+ componentGovernanceIgnoreDirectories: ''
+
+steps:
+- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ displayName: Set skipComponentGovernanceDetection variable
+- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
+ - task: ComponentGovernanceComponentDetection@0
+ continueOnError: true
+ inputs:
+ ignoreDirectories: ${{ parameters.componentGovernanceIgnoreDirectories }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-codeql.yml b/eng/common/templates-official/steps/execute-codeql.yml
new file mode 100644
index 000000000000..9b4a5ffa30a7
--- /dev/null
+++ b/eng/common/templates-official/steps/execute-codeql.yml
@@ -0,0 +1,32 @@
+parameters:
+ # Language that should be analyzed. Defaults to csharp
+ language: csharp
+ # Build Commands
+ buildCommands: ''
+ overrideParameters: '' # Optional: to override values for parameters.
+ additionalParameters: '' # Optional: parameters that need user specific values eg: '-SourceToolsList @("abc","def") -ArtifactToolsList @("ghi","jkl")'
+ # Optional: if specified, restore and use this version of Guardian instead of the default.
+ overrideGuardianVersion: ''
+ # Optional: if true, publish the '.gdn' folder as a pipeline artifact. This can help with in-depth
+ # diagnosis of problems with specific tool configurations.
+ publishGuardianDirectoryToPipeline: false
+ # The script to run to execute all SDL tools. Use this if you want to use a script to define SDL
+ # parameters rather than relying on YAML. It may be better to use a local script, because you can
+ # reproduce results locally without piecing together a command based on the YAML.
+ executeAllSdlToolsScript: 'eng/common/sdl/execute-all-sdl-tools.ps1'
+ # There is some sort of bug (has been reported) in Azure DevOps where if this parameter is named
+ # 'continueOnError', the parameter value is not correctly picked up.
+ # This can also be remedied by the caller (post-build.yml) if it does not use a nested parameter
+ # optional: determines whether to continue the build if the step errors;
+ sdlContinueOnError: false
+
+steps:
+- template: /eng/common/templates-official/steps/execute-sdl.yml
+ parameters:
+ overrideGuardianVersion: ${{ parameters.overrideGuardianVersion }}
+ executeAllSdlToolsScript: ${{ parameters.executeAllSdlToolsScript }}
+ overrideParameters: ${{ parameters.overrideParameters }}
+ additionalParameters: '${{ parameters.additionalParameters }}
+ -CodeQLAdditionalRunConfigParams @("BuildCommands < ${{ parameters.buildCommands }}", "Language < ${{ parameters.language }}")'
+ publishGuardianDirectoryToPipeline: ${{ parameters.publishGuardianDirectoryToPipeline }}
+ sdlContinueOnError: ${{ parameters.sdlContinueOnError }}
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/execute-sdl.yml b/eng/common/templates-official/steps/execute-sdl.yml
new file mode 100644
index 000000000000..07426fde05d8
--- /dev/null
+++ b/eng/common/templates-official/steps/execute-sdl.yml
@@ -0,0 +1,88 @@
+parameters:
+ overrideGuardianVersion: ''
+ executeAllSdlToolsScript: ''
+ overrideParameters: ''
+ additionalParameters: ''
+ publishGuardianDirectoryToPipeline: false
+ sdlContinueOnError: false
+ condition: ''
+
+steps:
+- task: NuGetAuthenticate@1
+ inputs:
+ nuGetServiceConnections: GuardianConnect
+
+- task: NuGetToolInstaller@1
+ displayName: 'Install NuGet.exe'
+
+- ${{ if ne(parameters.overrideGuardianVersion, '') }}:
+ - pwsh: |
+ Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
+ . .\sdl.ps1
+ $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts -Version ${{ parameters.overrideGuardianVersion }}
+ Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
+ displayName: Install Guardian (Overridden)
+
+- ${{ if eq(parameters.overrideGuardianVersion, '') }}:
+ - pwsh: |
+ Set-Location -Path $(Build.SourcesDirectory)\eng\common\sdl
+ . .\sdl.ps1
+ $guardianCliLocation = Install-Gdn -Path $(Build.SourcesDirectory)\.artifacts
+ Write-Host "##vso[task.setvariable variable=GuardianCliLocation]$guardianCliLocation"
+ displayName: Install Guardian
+
+- ${{ if ne(parameters.overrideParameters, '') }}:
+ - powershell: ${{ parameters.executeAllSdlToolsScript }} ${{ parameters.overrideParameters }}
+ displayName: Execute SDL (Overridden)
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if eq(parameters.overrideParameters, '') }}:
+ - powershell: ${{ parameters.executeAllSdlToolsScript }}
+ -GuardianCliLocation $(GuardianCliLocation)
+ -NugetPackageDirectory $(Build.SourcesDirectory)\.packages
+ -AzureDevOpsAccessToken $(dn-bot-dotnet-build-rw-code-rw)
+ ${{ parameters.additionalParameters }}
+ displayName: Execute SDL
+ continueOnError: ${{ parameters.sdlContinueOnError }}
+ condition: ${{ parameters.condition }}
+
+- ${{ if ne(parameters.publishGuardianDirectoryToPipeline, 'false') }}:
+ # We want to publish the Guardian results and configuration for easy diagnosis. However, the
+ # '.gdn' dir is a mix of configuration, results, extracted dependencies, and Guardian default
+ # tooling files. Some of these files are large and aren't useful during an investigation, so
+ # exclude them by simply deleting them before publishing. (As of writing, there is no documented
+ # way to selectively exclude a dir from the pipeline artifact publish task.)
+ - task: DeleteFiles@1
+ displayName: Delete Guardian dependencies to avoid uploading
+ inputs:
+ SourceFolder: $(Agent.BuildDirectory)/.gdn
+ Contents: |
+ c
+ i
+ condition: succeededOrFailed()
+
+ - publish: $(Agent.BuildDirectory)/.gdn
+ artifact: GuardianConfiguration
+ displayName: Publish GuardianConfiguration
+ condition: succeededOrFailed()
+
+ # Publish the SARIF files in a container named CodeAnalysisLogs to enable integration
+ # with the "SARIF SAST Scans Tab" Azure DevOps extension
+ - task: CopyFiles@2
+ displayName: Copy SARIF files
+ inputs:
+ flattenFolders: true
+ sourceFolder: $(Agent.BuildDirectory)/.gdn/rc/
+ contents: '**/*.sarif'
+ targetFolder: $(Build.SourcesDirectory)/CodeAnalysisLogs
+ condition: succeededOrFailed()
+
+ # Use PublishBuildArtifacts because the SARIF extension only checks this case
+ # see microsoft/sarif-azuredevops-extension#4
+ - task: PublishBuildArtifacts@1
+ displayName: Publish SARIF files to CodeAnalysisLogs container
+ inputs:
+ pathToPublish: $(Build.SourcesDirectory)/CodeAnalysisLogs
+ artifactName: CodeAnalysisLogs
+ condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/generate-sbom.yml b/eng/common/templates-official/steps/generate-sbom.yml
new file mode 100644
index 000000000000..1bf43bf807af
--- /dev/null
+++ b/eng/common/templates-official/steps/generate-sbom.yml
@@ -0,0 +1,48 @@
+# BuildDropPath - The root folder of the drop directory for which the manifest file will be generated.
+# PackageName - The name of the package this SBOM represents.
+# PackageVersion - The version of the package this SBOM represents.
+# ManifestDirPath - The path of the directory where the generated manifest files will be placed
+# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
+
+parameters:
+ PackageVersion: 8.0.0
+ BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
+ PackageName: '.NET'
+ ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
+ IgnoreDirectories: ''
+ sbomContinueOnError: true
+
+steps:
+- task: PowerShell@2
+ displayName: Prep for SBOM generation in (Non-linux)
+ condition: or(eq(variables['Agent.Os'], 'Windows_NT'), eq(variables['Agent.Os'], 'Darwin'))
+ inputs:
+ filePath: ./eng/common/generate-sbom-prep.ps1
+ arguments: ${{parameters.manifestDirPath}}
+
+# Chmodding is a workaround for https://github.com/dotnet/arcade/issues/8461
+- script: |
+ chmod +x ./eng/common/generate-sbom-prep.sh
+ ./eng/common/generate-sbom-prep.sh ${{parameters.manifestDirPath}}
+ displayName: Prep for SBOM generation in (Linux)
+ condition: eq(variables['Agent.Os'], 'Linux')
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+
+- task: AzureArtifacts.manifest-generator-task.manifest-generator-task.ManifestGeneratorTask@0
+ displayName: 'Generate SBOM manifest'
+ continueOnError: ${{ parameters.sbomContinueOnError }}
+ inputs:
+ PackageName: ${{ parameters.packageName }}
+ BuildDropPath: ${{ parameters.buildDropPath }}
+ PackageVersion: ${{ parameters.packageVersion }}
+ ManifestDirPath: ${{ parameters.manifestDirPath }}
+ ${{ if ne(parameters.IgnoreDirectories, '') }}:
+ AdditionalComponentDetectorArgs: '--IgnoreDirectories ${{ parameters.IgnoreDirectories }}'
+
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: Publish SBOM manifest
+ continueOnError: ${{parameters.sbomContinueOnError}}
+ inputs:
+ targetPath: '${{parameters.manifestDirPath}}'
+ artifactName: $(ARTIFACT_NAME)
+
diff --git a/eng/common/templates-official/steps/publish-logs.yml b/eng/common/templates-official/steps/publish-logs.yml
new file mode 100644
index 000000000000..04012fed182a
--- /dev/null
+++ b/eng/common/templates-official/steps/publish-logs.yml
@@ -0,0 +1,23 @@
+parameters:
+ StageLabel: ''
+ JobLabel: ''
+
+steps:
+- task: Powershell@2
+ displayName: Prepare Binlogs to Upload
+ inputs:
+ targetType: inline
+ script: |
+ New-Item -ItemType Directory $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ Move-Item -Path $(Build.SourcesDirectory)/artifacts/log/Debug/* $(Build.SourcesDirectory)/PostBuildLogs/${{parameters.StageLabel}}/${{parameters.JobLabel}}/
+ continueOnError: true
+ condition: always()
+
+- task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/PostBuildLogs'
+ PublishLocation: Container
+ ArtifactName: PostBuildLogs
+ continueOnError: true
+ condition: always()
diff --git a/eng/common/templates-official/steps/retain-build.yml b/eng/common/templates-official/steps/retain-build.yml
new file mode 100644
index 000000000000..83d97a26a01f
--- /dev/null
+++ b/eng/common/templates-official/steps/retain-build.yml
@@ -0,0 +1,28 @@
+parameters:
+ # Optional azure devops PAT with build execute permissions for the build's organization,
+ # only needed if the build that should be retained ran on a different organization than
+ # the pipeline where this template is executing from
+ Token: ''
+ # Optional BuildId to retain, defaults to the current running build
+ BuildId: ''
+ # Azure devops Organization URI for the build in the https://dev.azure.com/ format.
+ # Defaults to the organization the current pipeline is running on
+ AzdoOrgUri: '$(System.CollectionUri)'
+ # Azure devops project for the build. Defaults to the project the current pipeline is running on
+ AzdoProject: '$(System.TeamProject)'
+
+steps:
+ - task: powershell@2
+ inputs:
+ targetType: 'filePath'
+ filePath: eng/common/retain-build.ps1
+ pwsh: true
+ arguments: >
+ -AzdoOrgUri: ${{parameters.AzdoOrgUri}}
+ -AzdoProject ${{parameters.AzdoProject}}
+ -Token ${{coalesce(parameters.Token, '$env:SYSTEM_ACCESSTOKEN') }}
+ -BuildId ${{coalesce(parameters.BuildId, '$env:BUILD_ID')}}
+ displayName: Enable permanent build retention
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ BUILD_ID: $(Build.BuildId)
\ No newline at end of file
diff --git a/eng/common/templates-official/steps/send-to-helix.yml b/eng/common/templates-official/steps/send-to-helix.yml
new file mode 100644
index 000000000000..3eb7e2d5f840
--- /dev/null
+++ b/eng/common/templates-official/steps/send-to-helix.yml
@@ -0,0 +1,91 @@
+# Please remember to update the documentation if you make changes to these parameters!
+parameters:
+ HelixSource: 'pr/default' # required -- sources must start with pr/, official/, prodcon/, or agent/
+ HelixType: 'tests/default/' # required -- Helix telemetry which identifies what type of data this is; should include "test" for clarity and must end in '/'
+ HelixBuild: $(Build.BuildNumber) # required -- the build number Helix will use to identify this -- automatically set to the AzDO build number
+ HelixTargetQueues: '' # required -- semicolon-delimited list of Helix queues to test on; see https://helix.dot.net/ for a list of queues
+ HelixAccessToken: '' # required -- access token to make Helix API requests; should be provided by the appropriate variable group
+ HelixConfiguration: '' # optional -- additional property attached to a job
+ HelixPreCommands: '' # optional -- commands to run before Helix work item execution
+ HelixPostCommands: '' # optional -- commands to run after Helix work item execution
+ WorkItemDirectory: '' # optional -- a payload directory to zip up and send to Helix; requires WorkItemCommand; incompatible with XUnitProjects
+ WorkItemCommand: '' # optional -- a command to execute on the payload; requires WorkItemDirectory; incompatible with XUnitProjects
+ WorkItemTimeout: '' # optional -- a timeout in TimeSpan.Parse-ready value (e.g. 00:02:00) for the work item command; requires WorkItemDirectory; incompatible with XUnitProjects
+ CorrelationPayloadDirectory: '' # optional -- a directory to zip up and send to Helix as a correlation payload
+ XUnitProjects: '' # optional -- semicolon-delimited list of XUnitProjects to parse and send to Helix; requires XUnitRuntimeTargetFramework, XUnitPublishTargetFramework, XUnitRunnerVersion, and IncludeDotNetCli=true
+ XUnitWorkItemTimeout: '' # optional -- the workitem timeout in seconds for all workitems created from the xUnit projects specified by XUnitProjects
+ XUnitPublishTargetFramework: '' # optional -- framework to use to publish your xUnit projects
+ XUnitRuntimeTargetFramework: '' # optional -- framework to use for the xUnit console runner
+ XUnitRunnerVersion: '' # optional -- version of the xUnit nuget package you wish to use on Helix; required for XUnitProjects
+ IncludeDotNetCli: false # optional -- true will download a version of the .NET CLI onto the Helix machine as a correlation payload; requires DotNetCliPackageType and DotNetCliVersion
+ DotNetCliPackageType: '' # optional -- either 'sdk', 'runtime' or 'aspnetcore-runtime'; determines whether the sdk or runtime will be sent to Helix; see https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ DotNetCliVersion: '' # optional -- version of the CLI to send to Helix; based on this: https://raw.githubusercontent.com/dotnet/core/main/release-notes/releases-index.json
+ WaitForWorkItemCompletion: true # optional -- true will make the task wait until work items have been completed and fail the build if work items fail. False is "fire and forget."
+ IsExternal: false # [DEPRECATED] -- doesn't do anything, jobs are external if HelixAccessToken is empty and Creator is set
+ HelixBaseUri: 'https://helix.dot.net/' # optional -- sets the Helix API base URI (allows targeting https://helix.int-dot.net )
+ Creator: '' # optional -- if the build is external, use this to specify who is sending the job
+ DisplayNamePrefix: 'Run Tests' # optional -- rename the beginning of the displayName of the steps in AzDO
+ condition: succeeded() # optional -- condition for step to execute; defaults to succeeded()
+ continueOnError: false # optional -- determines whether to continue the build if the step errors; defaults to false
+
+steps:
+ - powershell: 'powershell "$env:BUILD_SOURCESDIRECTORY\eng\common\msbuild.ps1 $env:BUILD_SOURCESDIRECTORY\eng\common\helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$env:BUILD_SOURCESDIRECTORY\artifacts\log\$env:BuildConfig\SendToHelix.binlog"'
+ displayName: ${{ parameters.DisplayNamePrefix }} (Windows)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, eq(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
+ - script: $BUILD_SOURCESDIRECTORY/eng/common/msbuild.sh $BUILD_SOURCESDIRECTORY/eng/common/helixpublish.proj /restore /p:TreatWarningsAsErrors=false /t:Test /bl:$BUILD_SOURCESDIRECTORY/artifacts/log/$BuildConfig/SendToHelix.binlog
+ displayName: ${{ parameters.DisplayNamePrefix }} (Unix)
+ env:
+ BuildConfig: $(_BuildConfig)
+ HelixSource: ${{ parameters.HelixSource }}
+ HelixType: ${{ parameters.HelixType }}
+ HelixBuild: ${{ parameters.HelixBuild }}
+ HelixConfiguration: ${{ parameters.HelixConfiguration }}
+ HelixTargetQueues: ${{ parameters.HelixTargetQueues }}
+ HelixAccessToken: ${{ parameters.HelixAccessToken }}
+ HelixPreCommands: ${{ parameters.HelixPreCommands }}
+ HelixPostCommands: ${{ parameters.HelixPostCommands }}
+ WorkItemDirectory: ${{ parameters.WorkItemDirectory }}
+ WorkItemCommand: ${{ parameters.WorkItemCommand }}
+ WorkItemTimeout: ${{ parameters.WorkItemTimeout }}
+ CorrelationPayloadDirectory: ${{ parameters.CorrelationPayloadDirectory }}
+ XUnitProjects: ${{ parameters.XUnitProjects }}
+ XUnitWorkItemTimeout: ${{ parameters.XUnitWorkItemTimeout }}
+ XUnitPublishTargetFramework: ${{ parameters.XUnitPublishTargetFramework }}
+ XUnitRuntimeTargetFramework: ${{ parameters.XUnitRuntimeTargetFramework }}
+ XUnitRunnerVersion: ${{ parameters.XUnitRunnerVersion }}
+ IncludeDotNetCli: ${{ parameters.IncludeDotNetCli }}
+ DotNetCliPackageType: ${{ parameters.DotNetCliPackageType }}
+ DotNetCliVersion: ${{ parameters.DotNetCliVersion }}
+ WaitForWorkItemCompletion: ${{ parameters.WaitForWorkItemCompletion }}
+ HelixBaseUri: ${{ parameters.HelixBaseUri }}
+ Creator: ${{ parameters.Creator }}
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ condition: and(${{ parameters.condition }}, ne(variables['Agent.Os'], 'Windows_NT'))
+ continueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/common/templates-official/steps/source-build.yml b/eng/common/templates-official/steps/source-build.yml
new file mode 100644
index 000000000000..829f17c34d11
--- /dev/null
+++ b/eng/common/templates-official/steps/source-build.yml
@@ -0,0 +1,129 @@
+parameters:
+ # This template adds arcade-powered source-build to CI.
+
+ # This is a 'steps' template, and is intended for advanced scenarios where the existing build
+ # infra has a careful build methodology that must be followed. For example, a repo
+ # (dotnet/runtime) might choose to clone the GitHub repo only once and store it as a pipeline
+ # artifact for all subsequent jobs to use, to reduce dependence on a strong network connection to
+ # GitHub. Using this steps template leaves room for that infra to be included.
+
+ # Defines the platform on which to run the steps. See 'eng/common/templates-official/job/source-build.yml'
+ # for details. The entire object is described in the 'job' template for simplicity, even though
+ # the usage of the properties on this object is split between the 'job' and 'steps' templates.
+ platform: {}
+
+steps:
+# Build. Keep it self-contained for simple reusability. (No source-build-specific job variables.)
+- script: |
+ set -x
+ df -h
+
+ # If building on the internal project, the artifact feeds variable may be available (usually only if needed)
+ # In that case, call the feed setup script to add internal feeds corresponding to public ones.
+ # In addition, add an msbuild argument to copy the WIP from the repo to the target build location.
+ # This is because SetupNuGetSources.sh will alter the current NuGet.config file, and we need to preserve those
+ # changes.
+ internalRestoreArgs=
+ if [ '$(dn-bot-dnceng-artifact-feeds-rw)' != '$''(dn-bot-dnceng-artifact-feeds-rw)' ]; then
+ # Temporarily work around https://github.com/dotnet/arcade/issues/7709
+ chmod +x $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh
+ $(Build.SourcesDirectory)/eng/common/SetupNugetSources.sh $(Build.SourcesDirectory)/NuGet.config $(dn-bot-dnceng-artifact-feeds-rw)
+ internalRestoreArgs='/p:CopyWipIntoInnerSourceBuildRepo=true'
+
+ # The 'Copy WIP' feature of source build uses git stash to apply changes from the original repo.
+ # This only works if there is a username/email configured, which won't be the case in most CI runs.
+ git config --get user.email
+ if [ $? -ne 0 ]; then
+ git config user.email dn-bot@microsoft.com
+ git config user.name dn-bot
+ fi
+ fi
+
+ # If building on the internal project, the internal storage variable may be available (usually only if needed)
+ # In that case, add variables to allow the download of internal runtimes if the specified versions are not found
+ # in the default public locations.
+ internalRuntimeDownloadArgs=
+ if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then
+ internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://dotnetbuilds.blob.core.windows.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://dotnetbuilds.blob.core.windows.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)'
+ fi
+
+ buildConfig=Release
+ # Check if AzDO substitutes in a build config from a variable, and use it if so.
+ if [ '$(_BuildConfig)' != '$''(_BuildConfig)' ]; then
+ buildConfig='$(_BuildConfig)'
+ fi
+
+ officialBuildArgs=
+ if [ '${{ and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}' = 'True' ]; then
+ officialBuildArgs='/p:DotNetPublishUsingPipelines=true /p:OfficialBuildId=$(BUILD.BUILDNUMBER)'
+ fi
+
+ targetRidArgs=
+ if [ '${{ parameters.platform.targetRID }}' != '' ]; then
+ targetRidArgs='/p:TargetRid=${{ parameters.platform.targetRID }}'
+ fi
+
+ runtimeOsArgs=
+ if [ '${{ parameters.platform.runtimeOS }}' != '' ]; then
+ runtimeOsArgs='/p:RuntimeOS=${{ parameters.platform.runtimeOS }}'
+ fi
+
+ baseOsArgs=
+ if [ '${{ parameters.platform.baseOS }}' != '' ]; then
+ baseOsArgs='/p:BaseOS=${{ parameters.platform.baseOS }}'
+ fi
+
+ publishArgs=
+ if [ '${{ parameters.platform.skipPublishValidation }}' != 'true' ]; then
+ publishArgs='--publish'
+ fi
+
+ assetManifestFileName=SourceBuild_RidSpecific.xml
+ if [ '${{ parameters.platform.name }}' != '' ]; then
+ assetManifestFileName=SourceBuild_${{ parameters.platform.name }}.xml
+ fi
+
+ ${{ coalesce(parameters.platform.buildScript, './build.sh') }} --ci \
+ --configuration $buildConfig \
+ --restore --build --pack $publishArgs -bl \
+ $officialBuildArgs \
+ $internalRuntimeDownloadArgs \
+ $internalRestoreArgs \
+ $targetRidArgs \
+ $runtimeOsArgs \
+ $baseOsArgs \
+ /p:SourceBuildNonPortable=${{ parameters.platform.nonPortable }} \
+ /p:ArcadeBuildFromSource=true \
+ /p:AssetManifestFileName=$assetManifestFileName
+ displayName: Build
+
+# Upload build logs for diagnosis.
+- task: CopyFiles@2
+ displayName: Prepare BuildLogs staging directory
+ inputs:
+ SourceFolder: '$(Build.SourcesDirectory)'
+ Contents: |
+ **/*.log
+ **/*.binlog
+ artifacts/source-build/self/prebuilt-report/**
+ TargetFolder: '$(Build.StagingDirectory)/BuildLogs'
+ CleanTargetFolder: true
+ continueOnError: true
+ condition: succeededOrFailed()
+
+- task: 1ES.PublishPipelineArtifact@1
+ displayName: Publish BuildLogs
+ inputs:
+ targetPath: '$(Build.StagingDirectory)/BuildLogs'
+ artifactName: BuildLogs_SourceBuild_${{ parameters.platform.name }}_Attempt$(System.JobAttempt)
+ continueOnError: true
+ condition: succeededOrFailed()
+
+# Manually inject component detection so that we can ignore the source build upstream cache, which contains
+# a nupkg cache of input packages (a local feed).
+# This path must match the upstream cache path in property 'CurrentRepoSourceBuiltNupkgCacheDir'
+# in src\Microsoft.DotNet.Arcade.Sdk\tools\SourceBuild\SourceBuildArcade.targets
+- task: ComponentGovernanceComponentDetection@0
+ displayName: Component Detection (Exclude upstream cache)
+ inputs:
+ ignoreDirectories: '$(Build.SourcesDirectory)/artifacts/source-build/self/src/artifacts/obj/source-built-upstream-cache'
diff --git a/eng/common/templates-official/variables/pool-providers.yml b/eng/common/templates-official/variables/pool-providers.yml
new file mode 100644
index 000000000000..1f308b24efc4
--- /dev/null
+++ b/eng/common/templates-official/variables/pool-providers.yml
@@ -0,0 +1,45 @@
+# Select a pool provider based off branch name. Anything with branch name containing 'release' must go into an -Svc pool,
+# otherwise it should go into the "normal" pools. This separates out the queueing and billing of released branches.
+
+# Motivation:
+# Once a given branch of a repository's output has been officially "shipped" once, it is then considered to be COGS
+# (Cost of goods sold) and should be moved to a servicing pool provider. This allows both separation of queueing
+# (allowing release builds and main PR builds to not intefere with each other) and billing (required for COGS.
+# Additionally, the pool provider name itself may be subject to change when the .NET Core Engineering Services
+# team needs to move resources around and create new and potentially differently-named pools. Using this template
+# file from an Arcade-ified repo helps guard against both having to update one's release/* branches and renaming.
+
+# How to use:
+# This yaml assumes your shipped product branches use the naming convention "release/..." (which many do).
+# If we find alternate naming conventions in broad usage it can be added to the condition below.
+#
+# First, import the template in an arcade-ified repo to pick up the variables, e.g.:
+#
+# variables:
+# - template: /eng/common/templates-official/variables/pool-providers.yml
+#
+# ... then anywhere specifying the pool provider use the runtime variables,
+# $(DncEngInternalBuildPool)
+#
+# pool:
+# name: $(DncEngInternalBuildPool)
+# image: 1es-windows-2022
+
+variables:
+ # Coalesce the target and source branches so we know when a PR targets a release branch
+ # If these variables are somehow missing, fall back to main (tends to have more capacity)
+
+ # Any new -Svc alternative pools should have variables added here to allow for splitting work
+
+ - name: DncEngInternalBuildPool
+ value: $[
+ replace(
+ replace(
+ eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'),
+ True,
+ 'NetCore1ESPool-Svc-Internal'
+ ),
+ False,
+ 'NetCore1ESPool-Internal'
+ )
+ ]
\ No newline at end of file
diff --git a/eng/common/templates-official/variables/sdl-variables.yml b/eng/common/templates-official/variables/sdl-variables.yml
new file mode 100644
index 000000000000..dbdd66d4a4b3
--- /dev/null
+++ b/eng/common/templates-official/variables/sdl-variables.yml
@@ -0,0 +1,7 @@
+variables:
+# The Guardian version specified in 'eng/common/sdl/packages.config'. This value must be kept in
+# sync with the packages.config file.
+- name: DefaultGuardianVersion
+ value: 0.109.0
+- name: GuardianPackagesConfigFile
+ value: $(Build.SourcesDirectory)\eng\common\sdl\packages.config
\ No newline at end of file
diff --git a/eng/common/templates/job/job.yml b/eng/common/templates/job/job.yml
index e24ca2f46f98..8ec5c4f2d9f9 100644
--- a/eng/common/templates/job/job.yml
+++ b/eng/common/templates/job/job.yml
@@ -15,6 +15,7 @@ parameters:
timeoutInMinutes: ''
variables: []
workspace: ''
+ templateContext: ''
# Job base template specific parameters
# See schema documentation - https://github.com/dotnet/arcade/blob/master/Documentation/AzureDevOps/TemplateSchema.md
@@ -68,6 +69,9 @@ jobs:
${{ if ne(parameters.timeoutInMinutes, '') }}:
timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
+ ${{ if ne(parameters.templateContext, '') }}:
+ templateContext: ${{ parameters.templateContext }}
+
variables:
- ${{ if ne(parameters.enableTelemetry, 'false') }}:
- name: DOTNET_CLI_TELEMETRY_PROFILE
diff --git a/eng/common/templates/job/source-index-stage1.yml b/eng/common/templates/job/source-index-stage1.yml
index b98202aa02d8..43ee0c202fc7 100644
--- a/eng/common/templates/job/source-index-stage1.yml
+++ b/eng/common/templates/job/source-index-stage1.yml
@@ -1,6 +1,7 @@
parameters:
runAsPublic: false
- sourceIndexPackageVersion: 1.0.1-20230228.2
+ sourceIndexUploadPackageVersion: 2.0.0-20240502.12
+ sourceIndexProcessBinlogPackageVersion: 1.0.1-20240129.2
sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json
sourceIndexBuildCommand: powershell -NoLogo -NoProfile -ExecutionPolicy Bypass -Command "eng/common/build.ps1 -restore -build -binarylog -ci"
preSteps: []
@@ -14,14 +15,14 @@ jobs:
dependsOn: ${{ parameters.dependsOn }}
condition: ${{ parameters.condition }}
variables:
- - name: SourceIndexPackageVersion
- value: ${{ parameters.sourceIndexPackageVersion }}
+ - name: SourceIndexUploadPackageVersion
+ value: ${{ parameters.sourceIndexUploadPackageVersion }}
+ - name: SourceIndexProcessBinlogPackageVersion
+ value: ${{ parameters.sourceIndexProcessBinlogPackageVersion }}
- name: SourceIndexPackageSource
value: ${{ parameters.sourceIndexPackageSource }}
- name: BinlogPath
value: ${{ parameters.binlogPath }}
- - ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - group: source-dot-net stage1 variables
- template: /eng/common/templates/variables/pool-providers.yml
${{ if ne(parameters.pool, '') }}:
@@ -40,16 +41,16 @@ jobs:
- ${{ preStep }}
- task: UseDotNet@2
- displayName: Use .NET Core SDK 6
+ displayName: Use .NET 8 SDK
inputs:
packageType: sdk
- version: 6.0.x
+ version: 8.0.x
installationPath: $(Agent.TempDirectory)/dotnet
workingDirectory: $(Agent.TempDirectory)
- script: |
- $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
- $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(SourceIndexPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version $(sourceIndexProcessBinlogPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
+ $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version $(sourceIndexUploadPackageVersion) --add-source $(SourceIndexPackageSource) --tool-path $(Agent.TempDirectory)/.source-index/tools
displayName: Download Tools
# Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk.
workingDirectory: $(Agent.TempDirectory)
@@ -61,7 +62,24 @@ jobs:
displayName: Process Binlog into indexable sln
- ${{ if and(eq(parameters.runAsPublic, 'false'), ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name)
- displayName: Upload stage1 artifacts to source index
- env:
- BLOB_CONTAINER_URL: $(source-dot-net-stage1-blob-container-url)
+ - task: AzureCLI@2
+ displayName: Get stage 1 auth token
+ inputs:
+ azureSubscription: 'SourceDotNet Stage1 Publish'
+ addSpnToEnvironment: true
+ scriptType: 'ps'
+ scriptLocation: 'inlineScript'
+ inlineScript: |
+ echo "##vso[task.setvariable variable=ARM_CLIENT_ID]$env:servicePrincipalId"
+ echo "##vso[task.setvariable variable=ARM_ID_TOKEN]$env:idToken"
+ echo "##vso[task.setvariable variable=ARM_TENANT_ID]$env:tenantId"
+
+ - script: |
+ echo "Client ID: $(ARM_CLIENT_ID)"
+ echo "ID Token: $(ARM_ID_TOKEN)"
+ echo "Tenant ID: $(ARM_TENANT_ID)"
+ az login --service-principal -u $(ARM_CLIENT_ID) --tenant $(ARM_TENANT_ID) --allow-no-subscriptions --federated-token $(ARM_ID_TOKEN)
+ displayName: "Login to Azure"
+
+ - script: $(Agent.TempDirectory)/.source-index/tools/UploadIndexStage1 -i .source-index/stage1output -n $(Build.Repository.Name) -s netsourceindexstage1 -b stage1
+ displayName: Upload stage1 artifacts to source index
\ No newline at end of file
diff --git a/eng/common/templates/steps/component-governance.yml b/eng/common/templates/steps/component-governance.yml
index 0ecec47b0c91..cbba0596709d 100644
--- a/eng/common/templates/steps/component-governance.yml
+++ b/eng/common/templates/steps/component-governance.yml
@@ -4,7 +4,7 @@ parameters:
steps:
- ${{ if eq(parameters.disableComponentGovernance, 'true') }}:
- - script: "echo ##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
+ - script: echo "##vso[task.setvariable variable=skipComponentGovernanceDetection]true"
displayName: Set skipComponentGovernanceDetection variable
- ${{ if ne(parameters.disableComponentGovernance, 'true') }}:
- task: ComponentGovernanceComponentDetection@0
diff --git a/eng/common/templates/steps/generate-sbom.yml b/eng/common/templates/steps/generate-sbom.yml
index a06373f38fa5..2b21eae42732 100644
--- a/eng/common/templates/steps/generate-sbom.yml
+++ b/eng/common/templates/steps/generate-sbom.yml
@@ -5,7 +5,7 @@
# IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector.
parameters:
- PackageVersion: 7.0.0
+ PackageVersion: 8.0.0
BuildDropPath: '$(Build.SourcesDirectory)/artifacts'
PackageName: '.NET'
ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom
diff --git a/eng/disable_vsupdate.ps1 b/eng/disable_vsupdate.ps1
new file mode 100644
index 000000000000..d8d365f05771
--- /dev/null
+++ b/eng/disable_vsupdate.ps1
@@ -0,0 +1,23 @@
+schtasks /change /tn "\Microsoft\VisualStudio\VSIX Auto Update" /disable
+
+$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
+if (-not (Test-Path -Path "$vswhere" -PathType Leaf))
+{
+ Write-Error "Couldn't locate vswhere at $vswhere"
+ exit 1
+}
+
+$vsdir = &"$vswhere" -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath
+$vsregedit = "$vsdir\Common7\IDE\VsRegEdit.exe"
+
+if (-not (Test-Path -Path "$vsregedit" ))
+{
+ Write-Error "VSWhere returned path: $vsdir, but regedit $vsregedit doesn't exist."
+ exit 1
+}
+
+Write-Output "VSWhere returned path: $vsdir, using regedit $vsregedit"
+Write-Output "Disabling updates through VS Registry:"
+
+&"$vsdir\Common7\IDE\VsRegEdit.exe" set local HKCU ExtensionManager AutomaticallyCheckForUpdates2Override dword 0
+&"$vsdir\Common7\IDE\VsRegEdit.exe" read local HKCU ExtensionManager AutomaticallyCheckForUpdates2Override dword
diff --git a/eng/empty.csproj b/eng/empty.csproj
deleted file mode 100644
index 55eefdef1ada..000000000000
--- a/eng/empty.csproj
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
-
-
- $(NetCoreAppToolCurrent)
-
-
-
-
-
diff --git a/eng/native/configurecompiler.cmake b/eng/native/configurecompiler.cmake
index 183811018533..0e6ee88b245e 100644
--- a/eng/native/configurecompiler.cmake
+++ b/eng/native/configurecompiler.cmake
@@ -590,6 +590,9 @@ if (CLR_CMAKE_HOST_UNIX)
# other clang 16.0 suppressions
add_compile_options(-Wno-single-bit-bitfield-constant-conversion)
add_compile_options(-Wno-cast-function-type-strict)
+
+ # clang 18.1 supressions
+ add_compile_options(-Wno-switch-default)
else()
add_compile_options(-Wno-uninitialized)
add_compile_options(-Wno-strict-aliasing)
diff --git a/eng/native/init-vs-env.cmd b/eng/native/init-vs-env.cmd
index 273f49b3392c..befe98ab407f 100644
--- a/eng/native/init-vs-env.cmd
+++ b/eng/native/init-vs-env.cmd
@@ -26,6 +26,8 @@ if defined VisualStudioVersion goto :VSDetected
set "__VSWhere=%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe"
set "__VSCOMNTOOLS="
+if not exist "%__VSWhere%" goto :VSWhereMissing
+
if exist "%__VSWhere%" (
for /f "tokens=*" %%p in (
'"%__VSWhere%" -latest -prerelease -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath'
@@ -56,6 +58,10 @@ echo %__MsgPrefix%Error: Visual Studio 2022 with C++ tools required. ^
Please see https://github.com/dotnet/runtime/blob/main/docs/workflow/requirements/windows-requirements.md for build requirements.
exit /b 1
+:VSWhereMissing
+echo %__MsgPrefix%Error: vswhere couldn not be found in Visual Studio Installer directory at "%__VSWhere%"
+exit /b 1
+
:SetVCEnvironment
if "%__VCBuildArch%"=="" exit /b 0
diff --git a/eng/pipelines/common/global-build-job.yml b/eng/pipelines/common/global-build-job.yml
index 39e7d9b5c53c..86cea9fbd98a 100644
--- a/eng/pipelines/common/global-build-job.yml
+++ b/eng/pipelines/common/global-build-job.yml
@@ -14,7 +14,6 @@ parameters:
dependsOn: []
pool: ''
platform: ''
- pgoType: ''
condition: true
useContinueOnErrorDuringBuild: false
shouldContinueOnError: false
@@ -27,14 +26,14 @@ parameters:
helixQueues: ''
enablePublishTestResults: false
testResultsFormat: ''
- extraStepsTemplate: ''
- extraStepsParameters: {}
+ postBuildSteps: []
extraVariablesTemplates: []
isManualCodeQLBuild: false
preBuildSteps: []
+ templatePath: 'templates'
jobs:
-- template: /eng/common/templates/job/job.yml
+- template: /eng/common/${{ parameters.templatePath }}/job/job.yml
parameters:
${{ if eq(parameters.hostedOs, '') }}:
name: ${{ format('build_{0}{1}_{2}_{3}_{4}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig, parameters.nameSuffix) }}
@@ -134,7 +133,6 @@ jobs:
targetRid: ${{ parameters.targetRid }}
nameSuffix: ${{ parameters.nameSuffix }}
platform: ${{ parameters.platform }}
- pgoType: ${{ parameters.pgoType }}
shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
${{ if ne(variableTemplate.forwardedParameters, '') }}:
${{ each parameter in variableTemplate.forwardedParameters }}:
@@ -144,6 +142,7 @@ jobs:
- ${{ each variable in parameters.variables }}:
- ${{ variable }}
+
steps:
- ${{ if eq(parameters.osGroup, 'windows') }}:
- template: /eng/pipelines/common/templates/disable-vsupdate-or-failfast.yml
@@ -184,7 +183,7 @@ jobs:
path: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
- ${{ if eq(parameters.isSourceBuild, true) }}:
- - template: /eng/common/templates/steps/source-build.yml
+ - template: /eng/common/${{ parameters.templatePath }}/steps/source-build.yml
parameters:
platform:
baseOS: ${{ parameters.baseOS }}
@@ -211,7 +210,28 @@ jobs:
- ${{ if ne(parameters.preBuildSteps,'') }}:
- ${{ each preBuildStep in parameters.preBuildSteps }}:
- - ${{ preBuildStep }}
+ - ${{ if ne(preBuildStep.template, '') }}:
+ - template: ${{ preBuildStep.template }}
+ parameters:
+ osGroup: ${{ parameters.osGroup }}
+ osSubgroup: ${{ parameters.osSubgroup }}
+ archType: ${{ parameters.archType }}
+ buildConfig: ${{ parameters.buildConfig }}
+ runtimeFlavor: ${{ parameters.runtimeFlavor }}
+ runtimeVariant: ${{ parameters.runtimeVariant }}
+ helixQueues: ${{ parameters.helixQueues }}
+ targetRid: ${{ parameters.targetRid }}
+ nameSuffix: ${{ parameters.nameSuffix }}
+ platform: ${{ parameters.platform }}
+ pgoType: ${{ parameters.pgoType }}
+ shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
+ ${{ if ne(preBuildStep.forwardedParameters, '') }}:
+ ${{ each parameter in preBuildStep.forwardedParameters }}:
+ ${{ parameter }}: ${{ parameters[parameter] }}
+ ${{ if ne(preBuildStep.parameters, '') }}:
+ ${{ insert }}: ${{ preBuildStep.parameters }}
+ - ${{ else }}:
+ - ${{ preBuildStep }}
# Build
- ${{ if eq(parameters.isSourceBuild, false) }}:
@@ -219,10 +239,11 @@ jobs:
- task: CodeQL3000Init@0
displayName: Initialize CodeQL (manually-injected)
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -ci $(_archParameter) $(_osParameter) $(crossArg) ${{ parameters.buildArgs }} $(_officialBuildParameter) $(_buildDarwinFrameworksParameter) $(_overrideTestScriptWindowsCmdParameter)
- displayName: Build product
- ${{ if eq(parameters.useContinueOnErrorDuringBuild, true) }}:
- continueOnError: ${{ parameters.shouldContinueOnError }}
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: ${{ parameters.buildArgs }}
+ useContinueOnErrorDuringBuild: ${{ parameters.useContinueOnErrorDuringBuild }}
+ shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
- ${{ if eq(parameters.isManualCodeQLBuild, true) }}:
- task: CodeQL3000Finalize@0
@@ -237,31 +258,45 @@ jobs:
condition: always()
# If intended to send extra steps after regular build add them here.
- - ${{ if ne(parameters.extraStepsTemplate, '') }}:
- - template: ${{ parameters.extraStepsTemplate }}
- parameters:
- osGroup: ${{ parameters.osGroup }}
- osSubgroup: ${{ parameters.osSubgroup }}
- archType: ${{ parameters.archType }}
- buildConfig: ${{ parameters.buildConfig }}
- runtimeFlavor: ${{ parameters.runtimeFlavor }}
- runtimeVariant: ${{ parameters.runtimeVariant }}
- helixQueues: ${{ parameters.helixQueues }}
- targetRid: ${{ parameters.targetRid }}
- nameSuffix: ${{ parameters.nameSuffix }}
- platform: ${{ parameters.platform }}
- pgoType: ${{ parameters.pgoType }}
- shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
- ${{ insert }}: ${{ parameters.extraStepsParameters }}
-
- - task: PublishBuildArtifacts@1
- displayName: Publish Logs
- inputs:
- PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/'
- PublishLocation: Container
- ${{ if notin(parameters.osGroup, 'browser', 'wasi') }}:
- ArtifactName: Logs_Build_${{ parameters.osGroup }}_${{ parameters.osSubGroup }}_${{ parameters.archType }}_${{ parameters.buildConfig }}_${{ parameters.nameSuffix }}
- ${{ if in(parameters.osGroup, 'browser', 'wasi') }}:
- ArtifactName: Logs_Build_${{ parameters.osGroup }}_${{ parameters.archType }}_${{ parameters.hostedOs }}_${{ parameters.buildConfig }}_${{ parameters.nameSuffix }}
- continueOnError: true
- condition: always()
+ - ${{ if ne(parameters.postBuildSteps,'') }}:
+ - ${{ each postBuildStep in parameters.postBuildSteps }}:
+ - ${{ if ne(postBuildStep.template, '') }}:
+ - template: ${{ postBuildStep.template }}
+ parameters:
+ osGroup: ${{ parameters.osGroup }}
+ osSubgroup: ${{ parameters.osSubgroup }}
+ archType: ${{ parameters.archType }}
+ buildConfig: ${{ parameters.buildConfig }}
+ runtimeFlavor: ${{ parameters.runtimeFlavor }}
+ runtimeVariant: ${{ parameters.runtimeVariant }}
+ helixQueues: ${{ parameters.helixQueues }}
+ targetRid: ${{ parameters.targetRid }}
+ nameSuffix: ${{ parameters.nameSuffix }}
+ platform: ${{ parameters.platform }}
+ shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
+ ${{ if ne(postBuildStep.forwardedParameters, '') }}:
+ ${{ each parameter in postBuildStep.forwardedParameters }}:
+ ${{ parameter }}: ${{ parameters[parameter] }}
+ ${{ if ne(postBuildStep.parameters, '') }}:
+ ${{ insert }}: ${{ postBuildStep.parameters }}
+ - ${{ else }}:
+ - ${{ postBuildStep }}
+
+ - ${{ if and(eq(parameters.isOfficialBuild, true), eq(parameters.osGroup, 'windows')) }}:
+ - powershell: ./eng/collect_vsinfo.ps1 -ArchiveRunName postbuild_log
+ displayName: Collect vslogs on exit
+ condition: always()
+
+ - template: /eng/pipelines/common/templates/publish-build-artifacts.yml
+ parameters:
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ displayName: Publish Logs
+ inputs:
+ PathtoPublish: '$(Build.SourcesDirectory)/artifacts/log/'
+ PublishLocation: Container
+ ${{ if notin(parameters.osGroup, 'browser', 'wasi') }}:
+ ArtifactName: Logs_Build_Attempt$(System.JobAttempt)_${{ parameters.osGroup }}_${{ parameters.osSubGroup }}_${{ parameters.archType }}_${{ parameters.buildConfig }}_${{ parameters.nameSuffix }}
+ ${{ if in(parameters.osGroup, 'browser', 'wasi') }}:
+ ArtifactName: Logs_Build_Attempt$(System.JobAttempt)_${{ parameters.osGroup }}_${{ parameters.archType }}_${{ parameters.hostedOs }}_${{ parameters.buildConfig }}_${{ parameters.nameSuffix }}
+ continueOnError: true
+ condition: always()
diff --git a/eng/pipelines/common/platform-matrix.yml b/eng/pipelines/common/platform-matrix.yml
index 8399c82ecd76..c2f754d6ee23 100644
--- a/eng/pipelines/common/platform-matrix.yml
+++ b/eng/pipelines/common/platform-matrix.yml
@@ -883,7 +883,7 @@ jobs:
jobTemplate: ${{ parameters.jobTemplate }}
helixQueuesTemplate: ${{ parameters.helixQueuesTemplate }}
variables: ${{ parameters.variables }}
- osGroup: tizen
+ osGroup: linux # Our build scripts don't support Tizen and have always used Linux as the OS parameter.
archType: armel
targetRid: tizen-armel
platform: tizen_armel
diff --git a/eng/pipelines/common/templates/browser-wasm-build-tests.yml b/eng/pipelines/common/templates/browser-wasm-build-tests.yml
index 742ad88de1c8..29a3f8547247 100644
--- a/eng/pipelines/common/templates/browser-wasm-build-tests.yml
+++ b/eng/pipelines/common/templates/browser-wasm-build-tests.yml
@@ -117,10 +117,11 @@ jobs:
eq(variables['isDefaultPipeline'], variables['shouldRunWasmBuildTestsOnDefaultPipeline']))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)_$(_hostedOs)
- extraHelixArguments: /p:BrowserHost=$(_hostedOs)
- scenarios:
- - buildwasmapps
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)_$(_hostedOs)
+ extraHelixArguments: /p:BrowserHost=$(_hostedOs)
+ scenarios:
+ - buildwasmapps
diff --git a/eng/pipelines/common/templates/global-build-step.yml b/eng/pipelines/common/templates/global-build-step.yml
new file mode 100644
index 000000000000..7f38a9fd1184
--- /dev/null
+++ b/eng/pipelines/common/templates/global-build-step.yml
@@ -0,0 +1,12 @@
+parameters:
+ buildArgs: ''
+ useContinueOnErrorDuringBuild: false
+ shouldContinueOnError: false
+ archParameter: $(_archParameter)
+ displayName: Build product
+
+steps:
+ - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -ci ${{ parameters.archParameter }} $(_osParameter) $(crossArg) ${{ parameters.buildArgs }} $(_officialBuildParameter) $(_buildDarwinFrameworksParameter) $(_overrideTestScriptWindowsCmdParameter)
+ displayName: ${{ parameters.displayName }}
+ ${{ if eq(parameters.useContinueOnErrorDuringBuild, true) }}:
+ continueOnError: ${{ parameters.shouldContinueOnError }}
diff --git a/eng/pipelines/common/templates/pipeline-with-resources.yml b/eng/pipelines/common/templates/pipeline-with-resources.yml
index 02242394fba6..f5fd4abbd25b 100644
--- a/eng/pipelines/common/templates/pipeline-with-resources.yml
+++ b/eng/pipelines/common/templates/pipeline-with-resources.yml
@@ -1,113 +1,124 @@
parameters:
- name: stages
type: stageList
-
-resources:
- containers:
- - container: linux_arm
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm
- env:
- ROOTFS_DIR: /crossrootfs/arm
-
- - container: linux_armv6
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-20.04-cross-armv6-raspbian-10
- env:
- ROOTFS_DIR: /crossrootfs/armv6
-
- - container: linux_arm64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64
- env:
- ROOTFS_DIR: /crossrootfs/arm64
-
- - container: linux_musl_x64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-alpine
- env:
- ROOTFS_DIR: /crossrootfs/x64
-
- - container: linux_musl_arm
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine
- env:
- ROOTFS_DIR: /crossrootfs/arm
-
- - container: linux_musl_arm64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64-alpine
- env:
- ROOTFS_DIR: /crossrootfs/arm64
-
- # This container contains all required toolsets to build for Android and for Linux with bionic libc.
- - container: linux_bionic
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-android-amd64
-
- # This container contains all required toolsets to build for Android as well as tooling to build docker images.
- - container: android_docker
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-android-docker
-
- - container: linux_x64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64
- env:
- ROOTFS_DIR: /crossrootfs/x64
-
- - container: linux_x86
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-x86
- env:
- ROOTFS_DIR: /crossrootfs/x86
-
- - container: linux_x64_dev_innerloop
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04
-
- # We use a CentOS Stream 8 image here to test building from source on CentOS Stream 8.
- - container: SourceBuild_centos_x64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8
-
- # AlmaLinux 8 is a RHEL 8 rebuild, so we use it to test building from source on RHEL 8.
- - container: SourceBuild_linux_x64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:almalinux-8-source-build
-
- - container: linux_s390x
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-s390x
- env:
- ROOTFS_DIR: /crossrootfs/s390x
-
- - container: linux_ppc64le
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-ppc64le
- env:
- ROOTFS_DIR: /crossrootfs/ppc64le
-
- - container: linux_riscv64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-cross-riscv64
- env:
- ROOTFS_DIR: /crossrootfs/riscv64
-
- - container: debian-12-gcc13-amd64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:debian-12-gcc13-amd64
-
- - container: linux_x64_llvmaot
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8
-
- - container: browser_wasm
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-webassembly-20230913040940-1edc1c6
- env:
- ROOTFS_DIR: /crossrootfs/x64
-
- - container: wasi_wasm
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-webassembly-20230913040940-1edc1c6
- env:
- ROOTFS_DIR: /crossrootfs/x64
-
- - container: freebsd_x64
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-freebsd-12
- env:
- ROOTFS_DIR: /crossrootfs/x64
-
- - container: tizen_armel
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-armel-tizen
- env:
- ROOTFS_DIR: /crossrootfs/armel
-
- - container: debpkg
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-debpkg
-
- - container: rpmpkg
- image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-fpm
-
-stages: ${{ parameters.stages }}
+ - name: isOfficialBuild
+ type: boolean
+ default: false
+
+extends:
+ template: templateDispatch.yml
+ parameters:
+ ${{ if parameters.isOfficialBuild }}:
+ templatePath: template1es.yml
+ ${{ else }}:
+ templatePath: templatePublic.yml
+
+ stages: ${{ parameters.stages }}
+
+ containers:
+ linux_arm:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm
+ env:
+ ROOTFS_DIR: /crossrootfs/arm
+
+ linux_armv6:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-20.04-cross-armv6-raspbian-10
+ env:
+ ROOTFS_DIR: /crossrootfs/armv6
+
+ linux_arm64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-biarch-amd64-arm64
+ env:
+ ROOTFS_HOST_DIR: /crossrootfs/x64
+ ROOTFS_DIR: /crossrootfs/arm64
+
+ linux_musl_x64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-alpine
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
+
+ linux_musl_arm:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm-alpine
+ env:
+ ROOTFS_DIR: /crossrootfs/arm
+
+ linux_musl_arm64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-arm64-alpine
+ env:
+ ROOTFS_DIR: /crossrootfs/arm64
+
+ # This container contains all required toolsets to build for Android and for Linux with bionic libc.
+ linux_bionic:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-android-amd64
+
+ # This container contains all required toolsets to build for Android as well as tooling to build docker images.
+ android_docker:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-android-docker
+
+ linux_x64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
+
+ linux_x86:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-x86
+ env:
+ ROOTFS_DIR: /crossrootfs/x86
+
+ linux_x64_dev_innerloop:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04
+
+ # We use a CentOS Stream 9 image here to test building from source on CentOS Stream 9.
+ SourceBuild_centos_x64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9
+
+ # AlmaLinux 8 is a RHEL 8 rebuild, so we use it to test building from source on RHEL 8.
+ SourceBuild_linux_x64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:almalinux-8-source-build
+
+ linux_s390x:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-s390x
+ env:
+ ROOTFS_DIR: /crossrootfs/s390x
+
+ linux_ppc64le:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-cross-ppc64le
+ env:
+ ROOTFS_DIR: /crossrootfs/ppc64le
+
+ linux_riscv64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-cross-riscv64
+ env:
+ ROOTFS_DIR: /crossrootfs/riscv64
+
+ debian-12-gcc13-amd64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:debian-12-gcc13-amd64
+
+ linux_x64_llvmaot:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9
+
+ browser_wasm:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-webassembly-20230913040940-1edc1c6
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
+
+ wasi_wasm:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-webassembly-20230913040940-1edc1c6
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
+
+ freebsd_x64:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-cross-amd64-freebsd-13
+ env:
+ ROOTFS_DIR: /crossrootfs/x64
+
+ tizen_armel:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-cross-armel-tizen
+ env:
+ ROOTFS_DIR: /crossrootfs/armel
+
+ debpkg:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-debpkg
+
+ rpmpkg:
+ image: mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-fpm
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/publish-build-artifacts.yml b/eng/pipelines/common/templates/publish-build-artifacts.yml
new file mode 100644
index 000000000000..b9b263c361f8
--- /dev/null
+++ b/eng/pipelines/common/templates/publish-build-artifacts.yml
@@ -0,0 +1,22 @@
+parameters:
+ - name: isOfficialBuild
+ type: boolean
+ - name: displayName
+ type: string
+ - name: inputs
+ type: object
+ - name: condition
+ type: string
+ default: ''
+
+steps:
+ - ${{ if parameters.isOfficialBuild }}:
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ inputs: ${{ parameters.inputs }}
+ condition: ${{ parameters.condition }}
+ - ${{ else }}:
+ - task: PublishBuildArtifacts@1
+ displayName: ${{ parameters.displayName }}
+ inputs: ${{ parameters.inputs }}
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/publish-pipeline-artifacts.yml b/eng/pipelines/common/templates/publish-pipeline-artifacts.yml
new file mode 100644
index 000000000000..81f292ec5528
--- /dev/null
+++ b/eng/pipelines/common/templates/publish-pipeline-artifacts.yml
@@ -0,0 +1,17 @@
+parameters:
+- name: displayName
+ type: string
+- name: inputs
+ type: object
+- name: isOfficialBuild
+ type: boolean
+
+steps:
+ - ${{ if parameters.isOfficialBuild }}:
+ - task: 1ES.PublishPipelineArtifact@1
+ displayName: ${{ parameters.displayName }}
+ inputs: ${{ parameters.inputs }}
+ - ${{ else }}:
+ - task: PublishPipelineArtifact@1
+ displayName: ${{ parameters.displayName }}
+ inputs: ${{ parameters.inputs }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/runtimes/build-test-job.yml b/eng/pipelines/common/templates/runtimes/build-test-job.yml
index 2b9871f183f7..2809d1487700 100644
--- a/eng/pipelines/common/templates/runtimes/build-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/build-test-job.yml
@@ -14,7 +14,7 @@ parameters:
dependsOn: []
dependOnEvaluatePaths: false
crossBuild: false
- pgoType: ''
+ isOfficialBuild: false
### Build managed test components (native components are getting built as part
### of the product build job).
@@ -143,12 +143,13 @@ jobs:
artifactName: $(microsoftNetSdkIlArtifactName)
displayName: 'Microsoft.NET.Sdk.IL package'
-
# Publish Logs
- - task: PublishPipelineArtifact@1
- displayName: Publish Logs
- inputs:
- targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: '${{ parameters.runtimeFlavor }}_Common_Runtime_TestBuildLogs_AnyOS_AnyCPU_$(buildConfig)_${{ parameters.testGroup }}'
- continueOnError: true
- condition: always()
+ - template: /eng/pipelines/common/templates/publish-pipeline-artifacts.yml
+ parameters:
+ displayName: Publish Logs
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ inputs:
+ targetPath: $(Build.SourcesDirectory)/artifacts/log
+ ArtifactName: '${{ parameters.runtimeFlavor }}_Common_Runtime_TestBuildLogs_Attempt$(System.JobAttempt)_AnyOS_AnyCPU_$(buildConfig)_${{ parameters.testGroup }}'
+ continueOnError: true
+ condition: always()
diff --git a/eng/pipelines/common/templates/runtimes/run-test-job.yml b/eng/pipelines/common/templates/runtimes/run-test-job.yml
index a137b7c796e6..759011b02e3d 100644
--- a/eng/pipelines/common/templates/runtimes/run-test-job.yml
+++ b/eng/pipelines/common/templates/runtimes/run-test-job.yml
@@ -341,11 +341,7 @@ jobs:
timeoutPerTestInMinutes: $(timeoutPerTestInMinutes)
timeoutPerTestCollectionInMinutes: $(timeoutPerTestCollectionInMinutes)
-
runCrossGen2: ${{ eq(parameters.readyToRun, true) }}
- ${{ if and(ne(parameters.testGroup, 'innerloop'), eq(parameters.runtimeFlavor, 'coreclr')) }}:
- runPALTestsDir: '$(coreClrProductRootFolderPath)/paltests'
-
compositeBuildMode: ${{ parameters.compositeBuildMode }}
runInUnloadableContext: ${{ parameters.runInUnloadableContext }}
tieringTest: ${{ parameters.tieringTest }}
@@ -603,7 +599,7 @@ jobs:
displayName: Publish Logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: '${{ parameters.runtimeFlavor }}_${{ parameters.runtimeVariant }}_$(LogNamePrefix)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.testGroup }}'
+ artifactName: '${{ parameters.runtimeFlavor }}_${{ parameters.runtimeVariant }}_$(LogNamePrefix)_Attempt$(System.JobAttempt)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.testGroup }}'
continueOnError: true
condition: always()
diff --git a/eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml b/eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
index bd147de7e772..12e3f68baafb 100644
--- a/eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
+++ b/eng/pipelines/common/templates/runtimes/send-to-helix-inner-step.yml
@@ -1,6 +1,5 @@
parameters:
osGroup: ''
- restoreParams: ''
sendParams: ''
condition: ''
displayName: ''
@@ -9,28 +8,14 @@ parameters:
steps:
- ${{ if eq(parameters.osGroup, 'windows') }}:
- # TODO: Remove and consolidate this when we move to arcade via init-tools.cmd.
- - powershell: $(Build.SourcesDirectory)\eng\common\build.ps1 -ci -warnaserror 0 ${{ parameters.restoreParams }}
- displayName: Restore blob feed tasks (Windows)
- condition: and(succeeded(), ${{ and(ne(parameters.condition, false), ne(parameters.restoreParams, '')) }})
-
- - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 -ci -warnaserror 0 ${{ parameters.sendParams }}
+ - powershell: $(Build.SourcesDirectory)\eng\common\msbuild.ps1 --restore -ci -warnaserror 0 ${{ parameters.sendParams }}
displayName: ${{ parameters.displayName }} (Windows)
condition: and(succeeded(), ${{ and(ne(parameters.condition, false), ne(parameters.sendParams, '')) }})
env: ${{ parameters.environment }}
continueOnError: ${{ eq(parameters.shouldContinueOnError, true) }}
- ${{ if ne(parameters.osGroup, 'windows') }}:
- # TODO: Remove and consolidate this when we move to arcade via init-tools.sh.
- - script: $(Build.SourcesDirectory)/eng/common/build.sh --ci --warnaserror false ${{ parameters.restoreParams }}
- displayName: Restore blob feed tasks (Unix)
- condition: and(succeeded(), ${{ and(ne(parameters.condition, false), ne(parameters.restoreParams, '')) }})
- ${{ if eq(parameters.osGroup, 'freebsd') }}:
- env:
- # Arcade uses this SDK instead of trying to restore one.
- DotNetCoreSdkDir: /usr/local/dotnet
-
- - script: $(Build.SourcesDirectory)/eng/common/msbuild.sh --ci --warnaserror false ${{ parameters.sendParams }}
+ - script: $(Build.SourcesDirectory)/eng/common/msbuild.sh --restore --ci --warnaserror false ${{ parameters.sendParams }}
displayName: ${{ parameters.displayName }} (Unix)
condition: and(succeeded(), ${{ and(ne(parameters.condition, false), ne(parameters.sendParams, '')) }})
env: ${{ parameters.environment }}
diff --git a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
index 37b455b19d83..4be1378ab3c4 100644
--- a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
+++ b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
@@ -17,7 +17,6 @@ parameters:
timeoutPerTestCollectionInMinutes: ''
timeoutPerTestInMinutes: ''
runCrossGen2: ''
- runPALTestsDir: ''
compositeBuildMode: false
helixProjectArguments: ''
runInUnloadableContext: ''
@@ -38,7 +37,6 @@ steps:
- template: send-to-helix-inner-step.yml
parameters:
osGroup: ${{ parameters.osGroup }}
- restoreParams: /p:DotNetPublishToBlobFeed=true -restore -projects $(Build.SourcesDirectory)$(dir)eng$(dir)empty.csproj
sendParams: ${{ parameters.helixProjectArguments }} ${{ parameters.msbuildParallelism }} /bl:$(Build.SourcesDirectory)/artifacts/log/SendToHelix.binlog /p:TargetArchitecture=${{ parameters.archType }} /p:TargetOS=${{ parameters.osGroup }} /p:TargetOSSubgroup=${{ parameters.osSubgroup }} /p:Configuration=${{ parameters.buildConfig }}
condition: and(succeeded(), ${{ parameters.condition }})
shouldContinueOnError: ${{ parameters.shouldContinueOnError }}
@@ -59,7 +57,6 @@ steps:
_LongRunningGcTests: ${{ parameters.longRunningGcTests }}
_GcSimulatorTests: ${{ parameters.gcSimulatorTests }}
_Scenarios: ${{ join(',', parameters.scenarios) }}
- _PALTestsDir: ${{ parameters.runPALTestsDir }}
_TimeoutPerTestCollectionInMinutes: ${{ parameters.timeoutPerTestCollectionInMinutes }}
_TimeoutPerTestInMinutes: ${{ parameters.timeoutPerTestInMinutes }}
RuntimeFlavor: ${{ parameters.runtimeFlavor }}
diff --git a/eng/pipelines/common/templates/runtimes/xplat-job.yml b/eng/pipelines/common/templates/runtimes/xplat-job.yml
index f4ac7e829571..625d88d63d3e 100644
--- a/eng/pipelines/common/templates/runtimes/xplat-job.yml
+++ b/eng/pipelines/common/templates/runtimes/xplat-job.yml
@@ -20,11 +20,12 @@ parameters:
enableMicrobuild: ''
gatherAssetManifests: false
disableComponentGovernance: false
+ templatePath: 'templates'
variables: {} ## any extra variables to add to the defaults defined below
jobs:
-- template: /eng/common/templates/job/job.yml
+- template: /eng/common/${{ parameters.templatePath }}/job/job.yml
parameters:
name: ${{ parameters.name }}
@@ -77,15 +78,6 @@ jobs:
- name: buildConfig
value: ${{ parameters.buildConfig }}
- - name: archType
- value: ${{ parameters.archType }}
-
- - name: osGroup
- value: ${{ parameters.osGroup }}
-
- - name: osSubgroup
- value: ${{ parameters.osSubgroup }}
-
- ${{ if and(eq(variables['System.TeamProject'], 'internal'), ne(variables['Build.Reason'], 'PullRequest')) }}:
- name: _HelixSource
value: official/dotnet/runtime/$(Build.SourceBranch)
diff --git a/eng/pipelines/common/templates/simple-wasm-build-tests.yml b/eng/pipelines/common/templates/simple-wasm-build-tests.yml
index dcba5522f441..7a593f4f9be2 100644
--- a/eng/pipelines/common/templates/simple-wasm-build-tests.yml
+++ b/eng/pipelines/common/templates/simple-wasm-build-tests.yml
@@ -41,11 +41,12 @@ jobs:
eq(variables['alwaysRunVar'], true),
eq(variables['isDefaultPipeline'], variables['shouldRunWasmBuildTestsOnDefaultPipeline']))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)_$(_hostedOs)
- extraHelixArguments: /p:BrowserHost=$(_hostedOs)
- scenarios:
- - buildwasmapps
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)_$(_hostedOs)
+ extraHelixArguments: /p:BrowserHost=$(_hostedOs)
+ scenarios:
+ - buildwasmapps
diff --git a/eng/pipelines/common/templates/template1es.yml b/eng/pipelines/common/templates/template1es.yml
new file mode 100644
index 000000000000..0770e37d6bd0
--- /dev/null
+++ b/eng/pipelines/common/templates/template1es.yml
@@ -0,0 +1,31 @@
+
+
+parameters:
+ - name: templatePath
+ type: string
+ default: 'templates-official'
+ - name: stages
+ type: stageList
+ - name: containers
+ type: object
+
+
+resources:
+ repositories:
+ - repository: 1ESPipelineTemplates
+ type: git
+ name: 1ESPipelineTemplates/1ESPipelineTemplates
+ ref: refs/tags/release
+
+extends:
+ template: v1/1ES.Official.PipelineTemplate.yml@1ESPipelineTemplates
+ parameters:
+ pool:
+ name: $(DncEngInternalBuildPool)
+ image: 1es-windows-2022
+ os: windows
+
+ containers:
+ ${{ parameters.containers }}
+
+ stages: ${{ parameters.stages }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/templateDispatch.yml b/eng/pipelines/common/templates/templateDispatch.yml
new file mode 100644
index 000000000000..1860af47aeef
--- /dev/null
+++ b/eng/pipelines/common/templates/templateDispatch.yml
@@ -0,0 +1,13 @@
+parameters:
+ - name: templatePath
+ type: string
+ - name: stages
+ type: stageList
+ - name: containers
+ type: object
+
+extends:
+ template: ${{ parameters.templatePath }}
+ parameters:
+ stages: ${{ parameters.stages }}
+ containers: ${{ parameters.containers }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/templatePublic.yml b/eng/pipelines/common/templates/templatePublic.yml
new file mode 100644
index 000000000000..cd7c02720167
--- /dev/null
+++ b/eng/pipelines/common/templates/templatePublic.yml
@@ -0,0 +1,21 @@
+
+parameters:
+ - name: templatePath
+ type: string
+ default: 'templates'
+ - name: stages
+ type: stageList
+ - name: containers
+ type: object
+
+resources:
+ containers:
+ - ${{ each container_pair in parameters.containers }}:
+ - ${{ if container_pair.value.image }}:
+ - container: ${{ container_pair.key }}
+ ${{ each pair in container_pair.value }}:
+ ${{ if notIn(pair.key, 'tenantId', 'identityType', 'registry') }}:
+ ${{ pair.key }}: ${{ pair.value }}
+
+
+stages: ${{ parameters.stages }}
\ No newline at end of file
diff --git a/eng/pipelines/common/templates/wasm-build-only.yml b/eng/pipelines/common/templates/wasm-build-only.yml
index 9e9b0cb332c8..4f2588e49856 100644
--- a/eng/pipelines/common/templates/wasm-build-only.yml
+++ b/eng/pipelines/common/templates/wasm-build-only.yml
@@ -38,7 +38,8 @@ jobs:
buildArgs: -s mono+libs+packs+libs.tests$(workloadSubsetArg) -c $(_BuildConfig) /p:BrowserHost=$(_hostedOs) ${{ parameters.extraBuildArgs }} /p:TestAssemblies=false $(extraBuildArgs)
timeoutInMinutes: 120
condition: ${{ parameters.condition }}
- extraStepsTemplate: /eng/pipelines/common/wasm-post-build-steps.yml
- extraStepsParameters:
- publishArtifactsForWorkload: ${{ parameters.publishArtifactsForWorkload }}
- publishWBT: ${{ parameters.publishWBT }}
+ postBuildSteps:
+ - template: /eng/pipelines/common/wasm-post-build-steps.yml
+ parameters:
+ publishArtifactsForWorkload: ${{ parameters.publishArtifactsForWorkload }}
+ publishWBT: ${{ parameters.publishWBT }}
diff --git a/eng/pipelines/common/templates/wasm-debugger-tests.yml b/eng/pipelines/common/templates/wasm-debugger-tests.yml
index fd19fe5385cb..17c0f415cf1b 100644
--- a/eng/pipelines/common/templates/wasm-debugger-tests.yml
+++ b/eng/pipelines/common/templates/wasm-debugger-tests.yml
@@ -52,10 +52,11 @@ jobs:
and(
eq(variables['isDefaultPipeline'], variables['shouldRunOnDefaultPipelines']),
eq(${{ parameters.isWasmOnlyBuild }}, ${{ parameters.runOnlyOnWasmOnlyPipelines }})))
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_${{ parameters.browser }}_$(_BuildConfig)
- extraHelixArguments: /p:BrowserHost=$(_hostedOs) /p:_DebuggerHosts=${{ parameters.browser }}
- scenarios:
- - wasmdebuggertests
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_${{ parameters.browser }}_$(_BuildConfig)
+ extraHelixArguments: /p:BrowserHost=$(_hostedOs) /p:_DebuggerHosts=${{ parameters.browser }}
+ scenarios:
+ - wasmdebuggertests
diff --git a/eng/pipelines/common/templates/wasm-library-tests.yml b/eng/pipelines/common/templates/wasm-library-tests.yml
index a848e25e1a28..4260e56ba2cb 100644
--- a/eng/pipelines/common/templates/wasm-library-tests.yml
+++ b/eng/pipelines/common/templates/wasm-library-tests.yml
@@ -62,9 +62,10 @@ jobs:
eq(variables['alwaysRunVar'], true),
eq(variables['isDefaultPipeline'], variables['shouldRunOnDefaultPipelines']))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:BrowserHost=$(_hostedOs) $(_wasmRunSmokeTestsOnlyArg) ${{ parameters.extraHelixArgs }}
- scenarios: ${{ parameters.scenarios }}
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ extraHelixArguments: /p:BrowserHost=$(_hostedOs) $(_wasmRunSmokeTestsOnlyArg) ${{ parameters.extraHelixArgs }}
+ scenarios: ${{ parameters.scenarios }}
diff --git a/eng/pipelines/common/templates/wasm-runtime-tests.yml b/eng/pipelines/common/templates/wasm-runtime-tests.yml
index 43671a546ba0..2b006bb2db3c 100644
--- a/eng/pipelines/common/templates/wasm-runtime-tests.yml
+++ b/eng/pipelines/common/templates/wasm-runtime-tests.yml
@@ -42,9 +42,10 @@ jobs:
or(
eq(variables['alwaysRunVar'], true),
eq(variables['isDefaultPipeline'], variables['shouldRunOnDefaultPipelines']))
- extraStepsTemplate: //eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
diff --git a/eng/pipelines/common/upload-artifact-step.yml b/eng/pipelines/common/upload-artifact-step.yml
index 249da066c7aa..d4091a7cc192 100644
--- a/eng/pipelines/common/upload-artifact-step.yml
+++ b/eng/pipelines/common/upload-artifact-step.yml
@@ -7,6 +7,7 @@ parameters:
artifactName: ''
displayName: ''
condition: succeeded()
+ isOfficialBuild: false
steps:
# Zip Artifact
@@ -20,9 +21,11 @@ steps:
includeRootFolder: ${{ parameters.includeRootFolder }}
condition: ${{ parameters.condition }}
- - task: PublishBuildArtifacts@1
- displayName: 'Publish ${{ parameters.displayName }}'
- inputs:
- pathtoPublish: $(Build.StagingDirectory)/${{ parameters.artifactName }}${{ parameters.archiveExtension }}
- artifactName: ${{ parameters.artifactName }}
- condition: ${{ parameters.condition }}
+ - template: /eng/pipelines/common/templates/publish-build-artifacts.yml
+ parameters:
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ displayName: 'Publish ${{ parameters.displayName }}'
+ inputs:
+ PathtoPublish: $(Build.StagingDirectory)/${{ parameters.artifactName }}${{ parameters.archiveExtension }}
+ artifactName: ${{ parameters.artifactName }}
+ condition: ${{ parameters.condition }}
\ No newline at end of file
diff --git a/eng/pipelines/common/upload-intermediate-artifacts-step.yml b/eng/pipelines/common/upload-intermediate-artifacts-step.yml
index bde6c61a0a04..da9b1ef0b627 100644
--- a/eng/pipelines/common/upload-intermediate-artifacts-step.yml
+++ b/eng/pipelines/common/upload-intermediate-artifacts-step.yml
@@ -25,9 +25,11 @@ steps:
TargetFolder: '$(Build.StagingDirectory)/IntermediateArtifacts/${{ parameters.name }}'
CleanTargetFolder: true
-- task: PublishBuildArtifacts@1
- displayName: Publish intermediate artifacts
- inputs:
- pathToPublish: '$(Build.StagingDirectory)/IntermediateArtifacts'
- artifactName: IntermediateArtifacts
- artifactType: container
+- template: /eng/pipelines/common/templates/publish-build-artifacts.yml
+ parameters:
+ isOfficialBuild: true
+ displayName: Publish intermediate artifacts
+ inputs:
+ PathtoPublish: '$(Build.StagingDirectory)/IntermediateArtifacts'
+ ArtifactName: IntermediateArtifacts
+ ArtifactType: container
diff --git a/eng/pipelines/common/variables.yml b/eng/pipelines/common/variables.yml
index 2a757f82572a..075dea178ab6 100644
--- a/eng/pipelines/common/variables.yml
+++ b/eng/pipelines/common/variables.yml
@@ -1,3 +1,8 @@
+parameters:
+ - name: templatePath
+ type: string
+ default: 'templates'
+
variables:
# These values enable longer delays, configurable number of retries, and special understanding of TCP hang-up
@@ -54,3 +59,5 @@ variables:
eq(variables['isRollingBuild'], true))) ]
- template: /eng/pipelines/common/perf-variables.yml
+
+- template: /eng/common/${{ parameters.templatePath }}/variables/pool-providers.yml
\ No newline at end of file
diff --git a/eng/pipelines/common/xplat-setup.yml b/eng/pipelines/common/xplat-setup.yml
index 1f0c10166950..794a23bb218b 100644
--- a/eng/pipelines/common/xplat-setup.yml
+++ b/eng/pipelines/common/xplat-setup.yml
@@ -22,7 +22,7 @@ jobs:
dependOnEvaluatePaths: ${{ and(eq(variables['Build.Reason'], 'PullRequest'), in(variables['Build.DefinitionName'], 'runtime', 'runtime-community', 'runtime-extra-platforms', 'runtime-wasm', 'runtime-wasm-libtests', 'runtime-wasm-non-libtests', 'dotnet-linker-tests', 'runtime-dev-innerloop', 'runtime-coreclr superpmi-replay', 'runtime-coreclr superpmi-diffs')) }}
variables:
- - template: /eng/common/templates/variables/pool-providers.yml
+ - template: /eng/common/${{ coalesce(parameters.jobParameters.templatePath, 'templates') }}/variables/pool-providers.yml
# Disable component governance in our CI builds. These builds are not shipping nor
# are they a service. Also the component governance jobs issue lots of inconsequential
# warnings and errors into our build timelines that make it hard to track down
@@ -45,6 +45,15 @@ jobs:
- name: _BuildConfig
value: $(buildConfigUpper)
+
+ - name: archType
+ value: ${{ parameters.archType }}
+
+ - name: osGroup
+ value: ${{ parameters.osGroup }}
+
+ - name: osSubgroup
+ value: ${{ parameters.osSubgroup }}
- name: _runSmokeTestsOnlyArg
value: '/p:RunSmokeTestsOnly=$(isRunSmokeTestsOnly)'
@@ -159,21 +168,28 @@ jobs:
# Official Build Linux Pool
${{ if and(or(in(parameters.osGroup, 'linux', 'freebsd', 'android', 'tizen'), eq(parameters.jobParameters.hostedOs, 'linux')), ne(variables['System.TeamProject'], 'public')) }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals Build.Ubuntu.1804.Amd64
+ demands: ImageOverride -equals 1es-ubuntu-2204
+ os: linux
- # OSX Build Pool (we don't have on-prem OSX BuildPool).
- ${{ if in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator') }}:
+ # OSX Public Build Pool (we don't have on-prem OSX BuildPool).
+ ${{ if and(in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator'), eq(variables['System.TeamProject'], 'public')) }}:
vmImage: 'macos-12'
+ # OSX Internal Pool
+ ${{ if and(in(parameters.osGroup, 'osx', 'maccatalyst', 'ios', 'iossimulator', 'tvos', 'tvossimulator'), ne(variables['System.TeamProject'], 'public')) }}:
+ name: "Azure Pipelines"
+ vmImage: 'macOS-12'
+ os: macOS
+
# Official Build Windows Pool
${{ if and(or(eq(parameters.osGroup, 'windows'), eq(parameters.jobParameters.hostedOs, 'windows')), ne(variables['System.TeamProject'], 'public')) }}:
name: $(DncEngInternalBuildPool)
- demands: ImageOverride -equals windows.vs2022preview.amd64
+ demands: ImageOverride -equals windows.vs2022.amd64
# Public Windows Build Pool
${{ if and(or(eq(parameters.osGroup, 'windows'), eq(parameters.jobParameters.hostedOs, 'windows')), eq(variables['System.TeamProject'], 'public')) }}:
name: $(DncEngPublicBuildPool)
- demands: ImageOverride -equals windows.vs2022preview.amd64.open
+ demands: ImageOverride -equals windows.vs2022.amd64.open
${{ if eq(parameters.helixQueuesTemplate, '') }}:
diff --git a/eng/pipelines/coreclr/ci.yml b/eng/pipelines/coreclr/ci.yml
index adf052b014f4..afff0abf00b5 100644
--- a/eng/pipelines/coreclr/ci.yml
+++ b/eng/pipelines/coreclr/ci.yml
@@ -155,3 +155,24 @@ extends:
readyToRun: true
displayNameArgs: R2R_CG2
liveLibrariesBuildConfig: Release
+
+ #
+ # PAL Tests
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: Debug
+ platforms:
+ - linux_x64
+ - linux_musl_x64
+ - linux_arm64
+ - linux_musl_arm64
+ - osx_x64
+ - osx_arm64
+ jobParameters:
+ buildArgs: -s clr.paltests+clr.paltestlist
+ nameSuffix: PALTests
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/run-paltests-step.yml
diff --git a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
index b7f87d7cf89d..3e28bb7bcae6 100644
--- a/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-non-wasm-jobs.yml
@@ -48,15 +48,16 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:BuildMonoAOTCrossCompiler=true /p:MonoLibClang="/usr/local/lib/libclang.so.16" /p:AotHostArchitecture=x64 /p:AotHostOS=linux
nameSuffix: AOT
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/common/upload-artifact-step.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: AOT Mono Artifacts
- artifactName: LinuxMonoAOTx64
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: AOT Mono Artifacts
+ artifactName: LinuxMonoAOTx64
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
# build mono Android scenarios
- template: /eng/pipelines/common/platform-matrix.yml
@@ -70,15 +71,16 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig)
nameSuffix: AndroidMono
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: Android Mono Artifacts
- artifactName: AndroidMonoarm64
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: Android Mono Artifacts
+ artifactName: AndroidMonoarm64
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
# build mono iOS scenarios
- template: /eng/pipelines/common/platform-matrix.yml
@@ -92,15 +94,16 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig)
nameSuffix: iOSMono
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: iOS Mono Artifacts
- artifactName: iOSMonoarm64
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: iOS Mono Artifacts
+ artifactName: iOSMonoarm64
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
# build NativeAOT iOS scenarios
- template: /eng/pipelines/common/platform-matrix.yml
@@ -114,15 +117,16 @@ jobs:
buildArgs: --cross -s clr.alljits+clr.tools+clr.nativeaotruntime+clr.nativeaotlibs+libs -c $(_BuildConfig)
nameSuffix: iOSNativeAOT
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: iOS NativeAOT Artifacts
- artifactName: iOSNativeAOTarm64
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/build-perf-sample-apps.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: iOS NativeAOT Artifacts
+ artifactName: iOSNativeAOTarm64
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
# build mono
- template: /eng/pipelines/common/platform-matrix.yml
@@ -409,9 +413,10 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig)
nameSuffix: Mono_Packs
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
# build PerfBDN app
- template: /eng/pipelines/common/platform-matrix.yml
@@ -429,12 +434,13 @@ jobs:
isOfficialBuild: false
pool:
vmImage: 'macos-12'
- extraStepsTemplate: /eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: Android BDN App Artifacts
- artifactName: PerfBDNAppArm
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/build-perf-bdn-app.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: Android BDN App Artifacts
+ artifactName: PerfBDNAppArm
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
diff --git a/eng/pipelines/coreclr/perf-wasm-jobs.yml b/eng/pipelines/coreclr/perf-wasm-jobs.yml
index 7758eac4d12c..bbfba4883475 100644
--- a/eng/pipelines/coreclr/perf-wasm-jobs.yml
+++ b/eng/pipelines/coreclr/perf-wasm-jobs.yml
@@ -25,9 +25,10 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
nameSuffix: wasm
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/coreclr/perf-wasm-prepare-artifacts-steps.yml
- extraStepsParameters:
- configForBuild: Release
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/perf-wasm-prepare-artifacts-steps.yml
+ parameters:
+ configForBuild: Release
#run mono wasm microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
@@ -94,9 +95,10 @@ jobs:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
nameSuffix: wasm
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/coreclr/perf-wasm-prepare-artifacts-steps.yml
- extraStepsParameters:
- configForBuild: Release
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/perf-wasm-prepare-artifacts-steps.yml
+ parameters:
+ configForBuild: Release
# run mono wasm interpreter (default) microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
diff --git a/eng/pipelines/coreclr/perf_slow.yml b/eng/pipelines/coreclr/perf_slow.yml
index 91e3ad34f38c..d6124364554b 100644
--- a/eng/pipelines/coreclr/perf_slow.yml
+++ b/eng/pipelines/coreclr/perf_slow.yml
@@ -143,15 +143,16 @@ extends:
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:MonoAOTEnableLLVM=true /p:MonoBundleLLVMOptimizer=true /p:BuildMonoAOTCrossCompiler=true /p:MonoLibClang="/usr/local/lib/libclang.so.16" /p:AotHostArchitecture=arm64 /p:AotHostOS=linux
nameSuffix: AOT
isOfficialBuild: false
- extraStepsTemplate: /eng/pipelines/common/upload-artifact-step.yml
- extraStepsParameters:
- rootFolder: '$(Build.SourcesDirectory)/artifacts/'
- includeRootFolder: true
- displayName: AOT Mono Artifacts
- artifactName: LinuxMonoAOTarm64
- archiveExtension: '.tar.gz'
- archiveType: tar
- tarCompression: gz
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-artifact-step.yml
+ parameters:
+ rootFolder: '$(Build.SourcesDirectory)/artifacts/'
+ includeRootFolder: true
+ displayName: AOT Mono Artifacts
+ artifactName: LinuxMonoAOTarm64
+ archiveExtension: '.tar.gz'
+ archiveType: tar
+ tarCompression: gz
# run mono aot microbenchmarks perf job
- template: /eng/pipelines/common/platform-matrix.yml
diff --git a/eng/pipelines/coreclr/templates/build-jit-job.yml b/eng/pipelines/coreclr/templates/build-jit-job.yml
index 5566f228b2f0..2f78d87bf816 100644
--- a/eng/pipelines/coreclr/templates/build-jit-job.yml
+++ b/eng/pipelines/coreclr/templates/build-jit-job.yml
@@ -132,6 +132,6 @@ jobs:
displayName: Publish Logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: '$(publishLogsArtifactPrefix)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ artifactName: '$(publishLogsArtifactPrefix)_Attempt$(System.JobAttempt)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
continueOnError: true
condition: always()
diff --git a/eng/pipelines/coreclr/templates/build-job.yml b/eng/pipelines/coreclr/templates/build-job.yml
index 365c1432aa41..ebaeae5e1240 100644
--- a/eng/pipelines/coreclr/templates/build-job.yml
+++ b/eng/pipelines/coreclr/templates/build-job.yml
@@ -17,7 +17,6 @@ parameters:
testGroup: ''
timeoutInMinutes: ''
variables: {}
- pgoType: ''
### Product build
jobs:
@@ -36,21 +35,18 @@ jobs:
dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
disableComponentGovernance: ${{ parameters.disableComponentGovernance }}
disableClrTest: ${{ parameters.disableClrTest }}
- pgoType: ${{ parameters.pgoType }}
# Compute job name from template parameters
- name: ${{ format('coreclr_{0}_product_build_{1}{2}_{3}_{4}{5}',
+ name: ${{ format('coreclr_{0}_product_build_{1}{2}_{3}_{4}',
parameters.runtimeVariant,
parameters.osGroup,
parameters.osSubgroup,
parameters.archType,
- parameters.buildConfig,
- parameters.pgoType) }}
- displayName: ${{ format('CoreCLR {0} Product Build {1}{2} {3} {4} {5}',
+ parameters.buildConfig) }}
+ displayName: ${{ format('CoreCLR {0} Product Build {1}{2} {3} {4}',
parameters.runtimeVariant, parameters.osGroup, parameters.osSubgroup,
parameters.archType,
- parameters.buildConfig,
- parameters.pgoType) }}
+ parameters.buildConfig) }}
# Run all steps in the container.
# Note that the containers are defined in platform-matrix.yml
@@ -70,15 +66,6 @@ jobs:
value: ''
- name: publishLogsArtifactPrefix
value: 'BuildLogs_CoreCLR'
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), ne(variables['Build.Reason'], 'PullRequest')) }}:
- # Variables used to publish packages to blob feed
- - name: dotnetfeedUrl
- value: https://dotnetfeed.blob.core.windows.net/dotnet-coreclr/index.json
- - name: dotnetfeedPAT
- value: $(dotnetfeed-storage-access-key-1)
- # Variables used by arcade to gather asset manifests
- - name: _DotNetPublishToBlobFeed
- value: true
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- group: AzureDevOps-Artifact-Feeds-Pats
- name: officialBuildIdArg
@@ -89,7 +76,7 @@ jobs:
- name: enforcePgoArg
value: ''
# The EnforcePGO script is only supported on Windows and is not supported on arm64.
- - ${{ if and(eq(parameters.buildConfig, 'Release'), and(eq(parameters.osGroup, 'windows'), ne(parameters.archType, 'arm64')), ne(parameters.pgoType, 'pgo')) }}:
+ - ${{ if and(eq(parameters.buildConfig, 'Release'), and(eq(parameters.osGroup, 'windows'), ne(parameters.archType, 'arm64'))) }}:
- name: enforcePgoArg
value: '-enforcepgo'
@@ -103,26 +90,11 @@ jobs:
value: ''
- ${{ if ne(parameters.testGroup, 'innerloop') }}:
- name: clrRuntimeComponentsBuildArg
- value: '-component runtime -component alljits -component paltests -component nativeaot -component spmi '
- - ${{ if and(eq(parameters.osGroup, 'linux'), eq(parameters.archType, 'x86')) }}:
- - name: clrRuntimeComponentsBuildArg
- value: '-component runtime -component jit -component iltools -component spmi '
-
- - name: pgoInstrumentArg
- value: ''
- - ${{ if eq(parameters.pgoType, 'PGO' )}}:
- - name: pgoInstrumentArg
- value: '-pgoinstrument '
+ value: '-component runtime -component alljits -component nativeaot -component spmi '
- name: SignType
value: $[ coalesce(variables.OfficialSignType, 'real') ]
- - name: clrRuntimePortableBuildArg
- value: ''
- - ${{ if eq(parameters.osGroup, 'tizen') }}:
- - name: clrRuntimePortableBuildArg
- value: '-portablebuild=false'
-
# Set a default empty argument for the pgo path.
# This will be set during the 'native prerequisites' step if PGO optimization is enabled.
- name: CoreClrPgoDataArg
@@ -137,7 +109,6 @@ jobs:
- ${{ parameters.variables }}
steps:
-
# Install native dependencies
# Linux builds use docker images with dependencies preinstalled,
# and FreeBSD builds use a build agent with dependencies
@@ -194,14 +165,14 @@ jobs:
# Build CoreCLR Runtime
- ${{ if ne(parameters.osGroup, 'windows') }}:
- - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) $(crossArg) $(osArg) -ci $(compilerArg) $(clrRuntimeComponentsBuildArg) $(pgoInstrumentArg) $(officialBuildIdArg) $(clrInterpreterBuildArg) $(clrRuntimePortableBuildArg) $(CoreClrPgoDataArg) $(nativeSymbols)
+ - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) $(crossArg) $(osArg) -ci $(compilerArg) $(clrRuntimeComponentsBuildArg) $(officialBuildIdArg) $(clrInterpreterBuildArg) $(CoreClrPgoDataArg) $(nativeSymbols)
displayName: Build CoreCLR Runtime
- ${{ if eq(parameters.osGroup, 'windows') }}:
- - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) -ci $(enforcePgoArg) $(pgoInstrumentArg) $(officialBuildIdArg) $(clrInterpreterBuildArg) $(CoreClrPgoDataArg)
+ - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) -ci $(enforcePgoArg) $(officialBuildIdArg) $(clrInterpreterBuildArg) $(CoreClrPgoDataArg)
displayName: Build CoreCLR Runtime
- ${{ if or(eq(parameters.crossBuild, 'true'), ne(parameters.archType, 'x64')) }}:
- - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) -hostarch x64 $(osArg) -ci $(compilerArg) -component crosscomponents -cmakeargs "-DCLR_CROSS_COMPONENTS_BUILD=1" $(officialBuildIdArg) $(clrRuntimePortableBuildArg)
+ - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) $(archType) -hostarch x64 $(osArg) -ci $(compilerArg) -component crosscomponents -cmakeargs "-DCLR_CROSS_COMPONENTS_BUILD=1" $(officialBuildIdArg)
displayName: Build CoreCLR Cross-Arch Tools (Tools that run on x64 targeting x86)
- ${{ if in(parameters.osGroup, 'osx', 'ios', 'tvos') }}:
@@ -211,12 +182,8 @@ jobs:
displayName: Disk Usage after Build
# Build CoreCLR Managed Components
- - ${{ if or(ne(parameters.osGroup, 'linux'), ne(parameters.archType, 'x86')) }}:
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -subset clr.corelib+clr.nativecorelib+clr.nativeaotlibs+clr.tools+clr.packages+clr.paltestlist $(crossArg) $(compilerArg) -arch $(archType) $(osArg) -c $(buildConfig) $(pgoInstrumentArg) $(officialBuildIdArg) -ci
- displayName: Build managed product components and packages
- - ${{ if and(eq(parameters.osGroup, 'linux'), eq(parameters.archType, 'x86')) }}:
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -subset clr.corelib $(crossArg) -arch $(archType) $(osArg) -c $(buildConfig) $(pgoInstrumentArg) $(officialBuildIdArg) -ci
- displayName: Build managed product components and packages
+ - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -subset clr.corelib+clr.nativecorelib+clr.nativeaotlibs+clr.tools+clr.packages $(crossArg) $(compilerArg) -arch $(archType) $(osArg) -c $(buildConfig) $(officialBuildIdArg) -ci
+ displayName: Build managed product components and packages
# Build native test components
- ${{ if and(ne(parameters.isOfficialBuild, true), ne(parameters.disableClrTest, true)) }}:
@@ -264,30 +231,9 @@ jobs:
artifactName: $(buildProductArtifactName)
displayName: 'product build'
- - ${{ if and(in(parameters.osGroup, 'windows', 'linux'), ne(parameters.archType, 'x86'), eq(parameters.pgoType, '')) }}:
- - template: /eng/pipelines/coreclr/templates/crossdac-build.yml
- parameters:
- archType: ${{ parameters.archType }}
- osGroup: ${{ parameters.osGroup }}
- osSubgroup: ${{ parameters.osSubgroup }}
- isOfficialBuild: ${{ parameters.signBinaries }}
- ${{ if eq(parameters.archType, 'arm') }}:
- hostArchType: x86
- ${{ else }}:
- hostArchType: x64
-
- - ${{ if and(in(parameters.osGroup, 'windows'), eq(parameters.archType, 'x86'), eq(parameters.pgoType, '')) }}:
- - template: /eng/pipelines/coreclr/templates/crossdac-build.yml
- parameters:
- archType: arm
- osGroup: ${{ parameters.osGroup }}
- osSubgroup: ${{ parameters.osSubgroup }}
- isOfficialBuild: ${{ parameters.signBinaries }}
- hostArchType: x86
-
- ${{ if and(ne(parameters.testGroup, ''), ne(parameters.disableClrTest, true)) }}:
# Publish test native components for consumption by test execution.
- - ${{ if and(ne(parameters.isOfficialBuild, true), eq(parameters.pgoType, '')) }}:
+ - ${{ if ne(parameters.isOfficialBuild, true) }}:
- template: /eng/pipelines/common/upload-artifact-step.yml
parameters:
rootFolder: $(nativeTestArtifactRootFolderPath)
@@ -298,25 +244,23 @@ jobs:
artifactName: $(nativeTestArtifactName)
displayName: 'native test components'
- # Get key vault secrets for publishing
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), ne(variables['Build.Reason'], 'PullRequest')) }}:
- - task: AzureKeyVault@1
- inputs:
- azureSubscription: 'DotNet-Engineering-Services_KeyVault'
- KeyVaultName: EngKeyVault
- SecretsFilter: 'dotnetfeed-storage-access-key-1,microsoft-symbol-server-pat,symweb-symbol-server-pat'
-
# Save packages using the prepare-signed-artifacts format.
- - ${{ if and(eq(parameters.isOfficialBuild, true), eq(parameters.pgoType, '')) }}:
+ - ${{ if eq(parameters.isOfficialBuild, true) }}:
- template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
parameters:
name: ${{ parameters.platform }}
+ - ${{ if and(eq(parameters.isOfficialBuild, true), eq(parameters.osGroup, 'windows')) }}:
+ - powershell: ./eng/collect_vsinfo.ps1 -ArchiveRunName postbuild_log
+ displayName: Collect vslogs on exit
+ condition: always()
+
+
# Publish Logs
- task: PublishPipelineArtifact@1
displayName: Publish Logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: '$(publishLogsArtifactPrefix)${{ parameters.pgoType }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ artifactName: '$(publishLogsArtifactPrefix)_Attempt$(System.JobAttempt)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
continueOnError: true
condition: always()
diff --git a/eng/pipelines/coreclr/templates/crossdac-build.yml b/eng/pipelines/coreclr/templates/crossdac-build.yml
deleted file mode 100644
index a8accb9d2e3d..000000000000
--- a/eng/pipelines/coreclr/templates/crossdac-build.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-parameters:
- archType: ''
- isOfficialBuild: false
- osGroup: ''
- osSubgroup: ''
- hostArchType: ''
-
-steps:
- # Always build the crossdac, that way we know in CI/PR if things break to build.
- - ${{ if and(eq(parameters.osGroup, 'windows'), notin(parameters.archType, 'x86')) }}:
- - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) ${{ parameters.archType }} -hostarch ${{ parameters.hostArchType }} -ci -os linux -cmakeargs "-DCLR_CROSS_COMPONENTS_BUILD=1" -ninja $(officialBuildIdArg) -component crosscomponents
- displayName: Build Cross OS Linux DAC for Windows
-
- - script: $(Build.SourcesDirectory)/src/coreclr/build-runtime$(scriptExt) $(buildConfig) ${{ parameters.archType }} -hostarch ${{ parameters.hostArchType }} -ci -os alpine -cmakeargs "-DCLR_CROSS_COMPONENTS_BUILD=1" -ninja $(officialBuildIdArg) -component crosscomponents
- displayName: Build Cross OS Linux-musl DAC for Windows
-
- - powershell: |
- function CopyAndVerifyCrossOsAssets {
- [CmdletBinding()]
- param (
- [Parameter(Mandatory)][string]$crossDacDir,
- [Parameter(Mandatory)][string]$targetDir
- )
-
- $crossDacDir = Join-Path $crossDacDir -ChildPath '${{ parameters.hostArchType }}'
-
- $availableFiles = ls -File $crossDacDir
-
- Write-Host "Probed for files in ""$crossDacDir"", found:"
- $availableFiles | fl
-
- if (-not ("mscordaccore.dll" -in $availableFiles.Name `
- -and "mscordaccore.pdb" -in $availableFiles.Name `
- -and "mscordbi.dll" -in $availableFiles.Name `
- -and "mscordbi.pdb" -in $availableFiles.Name`
- ))
- {
- Write-Error "Couldn't find one of the expected crossdac files."
- }
-
- New-Item $targetDir -ItemType 'Directory' -Force -ea 0
- $availableFiles | %{ cp $_.FullName $targetDir -v }
- }
-
- $buildMuslDacRootFolderPath = "$(Build.SourcesDirectory)/artifacts/bin/coreclr/alpine.${{ parameters.archType }}.$(buildConfigUpper)"
- $buildMuslStagingPath = "$(crossDacArtifactPath)/Linux_musl.${{ parameters.archType }}.$(buildConfigUpper)/${{ parameters.hostArchType }}"
-
- $buildLinuxDacRootFolderPath = "$(Build.SourcesDirectory)/artifacts/bin/coreclr/Linux.${{ parameters.archType }}.$(buildConfigUpper)"
- $buildLinuxDacStagingPath = "$(crossDacArtifactPath)/Linux.${{ parameters.archType }}.$(buildConfigUpper)/${{ parameters.hostArchType }}"
-
-
- CopyAndVerifyCrossOsAssets -CrossDacDir $buildMuslDacRootFolderPath -TargetDir $buildMuslStagingPath
- CopyAndVerifyCrossOsAssets -CrossDacDir $buildLinuxDacRootFolderPath -TargetDir $buildLinuxDacStagingPath
-
- Write-Host "Final directory contents:"
- ls -R $(crossDacArtifactPath)
-
- displayName: Gather CrossDac Artifacts
-
- - template: /eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
- parameters:
- basePath: $(crossDacArtifactPath)
- isOfficialBuild: ${{ parameters.isOfficialBuild }}
- timeoutInMinutes: 30
-
- - ${{ if eq(parameters.osGroup, 'linux') }}:
- - task: CopyFiles@2
- displayName: Gather runtime for CrossDac
- inputs:
- SourceFolder: $(coreClrProductRootFolderPath)
- Contents: libcoreclr.so
- TargetFolder: '$(crossDacArtifactPath)/${{ parameters.osGroup }}${{ parameters.osSubgroup }}.$(archType).$(buildConfigUpper)/${{ parameters.hostArchType }}'
-
- # Make the assets available in a single container for the packaging job.
- - task: PublishBuildArtifacts@1
- displayName: Publish runtime for CrossDac
- inputs:
- pathtoPublish: $(crossDacArtifactPath)
- PublishLocation: Container
- artifactName: $(buildCrossDacArtifactName)
diff --git a/eng/pipelines/coreclr/templates/crossdac-hostarch.yml b/eng/pipelines/coreclr/templates/crossdac-hostarch.yml
new file mode 100644
index 000000000000..e100217a4815
--- /dev/null
+++ b/eng/pipelines/coreclr/templates/crossdac-hostarch.yml
@@ -0,0 +1,9 @@
+parameters:
+ archType: ''
+
+variables:
+ - name: crossDacHostArch
+ value: x64
+ - ${{ if eq(parameters.archType, 'arm') }}:
+ - name: crossDacHostArch
+ value: x86
\ No newline at end of file
diff --git a/eng/pipelines/coreclr/templates/crossdac-pack.yml b/eng/pipelines/coreclr/templates/crossdac-pack.yml
deleted file mode 100644
index 7489ceeedad0..000000000000
--- a/eng/pipelines/coreclr/templates/crossdac-pack.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-parameters:
- archType: ''
- buildConfig: ''
- container: ''
- crossDacPlatforms: {}
- dependOnEvaluatePaths: false
- isOfficialBuild: false
- osGroup: ''
- osSubgroup: ''
- platform: ''
- pool: ''
- runtimeVariant: ''
- testGroup: ''
- timeoutInMinutes: ''
- variables: {}
-
-jobs:
-- template: xplat-pipeline-job.yml
- parameters:
- archType: ${{ parameters.archType }}
- buildConfig: ${{ parameters.buildConfig }}
- container: ${{ parameters.container }}
- condition: ${{ parameters.isOfficialBuild }}
- helixType: 'build/product/'
- osGroup: ${{ parameters.osGroup }}
- osSubgroup: ${{ parameters.osSubgroup }}
- pool: ${{ parameters.pool }}
- runtimeVariant: ${{ parameters.runtimeVariant }}
- timeoutInMinutes: ${{ parameters.timeoutInMinutes }}
- dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
-
- name: crossdacpack
- displayName: CrossDac Packaging
-
- variables:
- - name: officialBuildIdArg
- value: ''
- - name: crossDacArgs
- value: '/p:CrossDacArtifactsDir=$(crossDacArtifactPath)/$(buildCrossDacArtifactName)'
- - ${{ if and(eq(variables['System.TeamProject'], 'internal'), ne(variables['Build.Reason'], 'PullRequest')) }}:
- - name: officialBuildIdArg
- value: '/p:OfficialBuildId=$(Build.BuildNumber)'
- - name: SignType
- value: $[ coalesce(variables.OfficialSignType, 'real') ]
- - ${{ parameters.variables }}
-
- dependsOn:
- - ${{ if ne(parameters.crossDacPlatforms, '') }}:
- - ${{ each platform in parameters.crossDacPlatforms }}:
- - ${{ parameters.runtimeFlavor }}_${{ parameters.runtimeVariant }}_product_build_${{ platform }}_${{ parameters.buildConfig }}
-
- steps:
- - task: DownloadBuildArtifacts@0
- displayName: Download CrossDac artifacts
- inputs:
- artifactName: $(buildCrossDacArtifactName)
- downloadPath: $(crossDacArtifactPath)
- checkDownloadedFiles: true
-
- - script: $(Build.SourcesDirectory)$(dir)build$(scriptExt) -subset crossdacpack -arch $(archType) $(osArg) -c $(buildConfig) $(officialBuildIdArg) $(crossDacArgs) -ci
- displayName: Build crossdac packaging
-
- # Save packages using the prepare-signed-artifacts format.
- - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- parameters:
- name: ${{ parameters.platform }}
-
- # Upload to artifacts to be signed
- - task: PublishPipelineArtifact@1
- displayName: Publish Logs
- inputs:
- targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: 'CrossDacPackagingLogs'
- continueOnError: true
- condition: always()
diff --git a/eng/pipelines/coreclr/templates/helix-queues-setup.yml b/eng/pipelines/coreclr/templates/helix-queues-setup.yml
index 8e5ddd367595..378396bc323a 100644
--- a/eng/pipelines/coreclr/templates/helix-queues-setup.yml
+++ b/eng/pipelines/coreclr/templates/helix-queues-setup.yml
@@ -52,7 +52,7 @@ jobs:
# Browser wasm
- ${{ if eq(parameters.platform, 'browser_wasm') }}:
- - (Ubuntu.1804.Amd64)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
+ - (Ubuntu.1804.Amd64)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
# iOS devices
- ${{ if in(parameters.platform, 'ios_arm64') }}:
@@ -65,44 +65,44 @@ jobs:
# Linux arm
- ${{ if eq(parameters.platform, 'linux_arm') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Ubuntu.1804.Arm32.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7
+ - (Ubuntu.1804.Arm32.Open)Ubuntu.2204.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Ubuntu.1804.Arm32)Ubuntu.1804.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7
+ - (Ubuntu.1804.Arm32)Ubuntu.2204.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7
# Linux arm64
- ${{ if eq(parameters.platform, 'linux_arm64') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Ubuntu.1804.Arm64.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
+ - (Ubuntu.2204.Arm64.Open)Ubuntu.2204.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-helix-arm64v8
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Ubuntu.1804.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
+ - (Ubuntu.1804.Arm64)Ubuntu.2204.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
# Linux musl x64
- ${{ if eq(parameters.platform, 'linux_musl_x64') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Alpine.315.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-amd64
+ - (Alpine.317.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-amd64
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Alpine.315.Amd64)ubuntu.1804.amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-amd64
+ - (Alpine.319.Amd64)Ubuntu.2204.Amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-helix-amd64
# Linux musl arm32
- ${{ if eq(parameters.platform, 'linux_musl_arm') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Alpine.315.Arm32.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-arm32v7
+ - (Alpine.317.Arm32.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm32v7
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Alpine.315.Arm32)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-arm32v7
+ - (Alpine.319.Arm32)Ubuntu.2204.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-helix-arm32v7
# Linux musl arm64
- ${{ if eq(parameters.platform, 'linux_musl_arm64') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - (Alpine.315.Arm64.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-arm64v8
+ - (Alpine.317.Arm64.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm64v8
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - (Alpine.315.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-arm64v8
+ - (Alpine.319.Arm64)Ubuntu.2204.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-helix-arm64v8
# Linux x64
- ${{ if eq(parameters.platform, 'linux_x64') }}:
- ${{ if eq(variables['System.TeamProject'], 'public') }}:
- - Ubuntu.1804.Amd64.Open
+ - Ubuntu.2004.Amd64.Open
- ${{ if eq(variables['System.TeamProject'], 'internal') }}:
- - Ubuntu.1804.Amd64
+ - Ubuntu.2004.Amd64
# OSX arm64
- ${{ if eq(parameters.platform, 'osx_arm64') }}:
diff --git a/eng/pipelines/coreclr/templates/run-paltests-step.yml b/eng/pipelines/coreclr/templates/run-paltests-step.yml
new file mode 100644
index 000000000000..3b3881986f7d
--- /dev/null
+++ b/eng/pipelines/coreclr/templates/run-paltests-step.yml
@@ -0,0 +1,23 @@
+parameters:
+ continueOnError: 'false' # optional -- determines whether to continue the build if the step errors
+ helixQueues: '' # required -- Helix queues
+ buildConfig: '' # required -- build configuration
+ archType: '' # required -- targeting CPU architecture
+ osGroup: '' # required -- operating system for the job
+ osSubgroup: '' # optional -- operating system subgroup
+ dependOnEvaluatePaths: false
+
+steps:
+ - template: /eng/pipelines/common/templates/runtimes/send-to-helix-step.yml
+ parameters:
+ displayName: 'Send job to Helix'
+ helixBuild: $(Build.BuildNumber)
+ helixSource: $(_HelixSource)
+ helixType: 'build/tests/'
+ helixQueues: ${{ parameters.helixQueues }}
+ creator: dotnet-bot
+ helixProjectArguments: '$(Build.SourcesDirectory)/src/coreclr/scripts/paltests.proj'
+ BuildConfig: ${{ parameters.buildConfig }}
+ osGroup: ${{ parameters.osGroup }}
+ archType: ${{ parameters.archType }}
+ shouldContinueOnError: ${{ parameters.continueOnError }}
diff --git a/eng/pipelines/coreclr/templates/sign-diagnostic-files.yml b/eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
index 89cbbec6390d..ed1f85239258 100644
--- a/eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
+++ b/eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
@@ -12,10 +12,15 @@ steps:
version: '6.0.x'
installationPath: '$(Agent.TempDirectory)/dotnet'
- - task: EsrpCodeSigning@1
+ - task: EsrpCodeSigning@5
displayName: Sign Diagnostic Binaries
inputs:
- ConnectedServiceName: 'dotnetesrp-diagnostics-dnceng'
+ ConnectedServiceName: 'diagnostics-esrp-kvcertuser'
+ AppRegistrationClientId: '2234cdec-a13f-4bb2-aa63-04c57fd7a1f9'
+ AppRegistrationTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47'
+ AuthAKVName: 'clrdiag-esrp-id'
+ AuthCertName: 'dotnetesrp-diagnostics-aad-ssl-cert'
+ AuthSignCertName: 'dotnet-diagnostics-esrp-pki-onecert'
FolderPath: ${{ parameters.basePath }}
Pattern: |
**/mscordaccore*.dll
@@ -48,6 +53,7 @@ steps:
SessionTimeout: ${{ parameters.timeoutInMinutes }}
MaxConcurrency: '50'
MaxRetryAttempts: '5'
+ PendingAnalysisWaitTimeoutMinutes: '5'
env:
DOTNET_MULTILEVEL_LOOKUP: 0
DOTNET_ROOT: '$(Agent.TempDirectory)/dotnet'
diff --git a/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml b/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
index 396fe5077f59..659483c9bc30 100644
--- a/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
+++ b/eng/pipelines/coreclr/templates/superpmi-asmdiffs-checked-release-job.yml
@@ -34,7 +34,7 @@ jobs:
- name: releaseProductRootFolderPath
value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).Release'
- name: releaseProductArtifactName
- value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
+ value: 'CoreCLRProduct_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
steps:
diff --git a/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml b/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
index aace201eb966..9c2290c43a3d 100644
--- a/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
+++ b/eng/pipelines/coreclr/templates/superpmi-diffs-job.yml
@@ -49,7 +49,7 @@ jobs:
- name: releaseProductRootFolderPath
value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).Release'
- name: releaseProductArtifactName
- value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
+ value: 'CoreCLRProduct_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_release'
steps:
diff --git a/eng/pipelines/coreclr/templates/xplat-pipeline-job.yml b/eng/pipelines/coreclr/templates/xplat-pipeline-job.yml
index 83347d78b8b1..82d6346a60d7 100644
--- a/eng/pipelines/coreclr/templates/xplat-pipeline-job.yml
+++ b/eng/pipelines/coreclr/templates/xplat-pipeline-job.yml
@@ -11,6 +11,7 @@ parameters:
liveLibrariesBuildConfig: ''
strategy: ''
pool: ''
+ templatePath: 'templates'
# arcade-specific parameters
condition: true
@@ -28,6 +29,7 @@ parameters:
jobs:
- template: /eng/pipelines/common/templates/runtimes/xplat-job.yml
parameters:
+ templatePath: ${{ parameters.templatePath }}
buildConfig: ${{ parameters.buildConfig }}
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
@@ -38,7 +40,6 @@ jobs:
crossBuild: ${{ parameters.crossBuild }}
strategy: ${{ parameters.strategy }}
pool: ${{ parameters.pool }}
- pgoType: ${{ parameters.pgoType }}
# arcade-specific parameters
condition: and(succeeded(), ${{ parameters.condition }})
@@ -65,26 +66,20 @@ jobs:
# Build product defines what we are trying to build, either coreclr or mono
- name: buildProductArtifactName
- value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value: 'CoreCLRProduct_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
- name: buildProductRootFolderPath
value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(buildConfigUpper)'
- - name: buildCrossDacArtifactName
- value: CoreCLRCrossDacArtifacts
-
- - name: crossDacArtifactPath
- value: $(Build.SourcesDirectory)/artifacts/$(buildCrossDacArtifactName)
-
# We need this because both mono and coreclr build currently depends on CoreClr
- name: coreClrProductArtifactName
- value: 'CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value: 'CoreCLRProduct_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
- name: coreClrProductRootFolderPath
value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(buildConfigUpper)'
- name: corelibProductArtifactName
- value: 'CoreLib_${{ parameters.pgoType }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value: 'CoreLib_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
- name: managedGenericTestArtifactName
value: 'CoreCLRManagedTestArtifacts_AnyOS_AnyCPU_$(buildConfig)'
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-android.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-android.yml
index 23c57b87fe53..9fd1769fe18f 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-android.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-android.yml
@@ -44,10 +44,11 @@ jobs:
# Turn off the testing for now, until https://github.com/dotnet/runtime/issues/60128 gets resolved
# ${{ if eq(variables['isRollingBuild'], true) }}:
# # extra steps, run tests
- # extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- # extraStepsParameters:
- # creator: dotnet-bot
- # testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ # postBuildSteps:
+ # - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ # parameters:
+ # creator: dotnet-bot
+ # testRunNamePrefixSuffix: Mono_$(_BuildConfig)
# extraVariablesTemplates:
# - template: /eng/pipelines/common/templates/runtimes/test-variables.yml
@@ -77,7 +78,8 @@ jobs:
buildArgs: -s mono+libs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true $(_runSmokeTestsOnlyArg) /p:EnableAdditionalTimezoneChecks=true
timeoutInMinutes: 480
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-androidemulator.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-androidemulator.yml
index 65a890976e73..a114b1b744a1 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-androidemulator.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-androidemulator.yml
@@ -40,10 +40,11 @@ jobs:
buildArgs: -s mono+libs -c $(_BuildConfig)
timeoutInMinutes: 240
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
@@ -78,10 +79,11 @@ jobs:
buildArgs: -s mono+libs -c $(_BuildConfig)
timeoutInMinutes: 240
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
@@ -111,7 +113,8 @@ jobs:
buildArgs: -s mono+libs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true $(_runSmokeTestsOnlyArg)
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
index 84477d2c25a8..ef0425042b5e 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslike.yml
@@ -38,11 +38,12 @@ jobs:
buildArgs: -s mono+libs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true /p:DevTeamProvisioning=- /p:RunAOTCompilation=true $(_runSmokeTestsOnlyArg) /p:BuildTestsOnHelix=true /p:EnableAdditionalTimezoneChecks=true /p:UsePortableRuntimePack=true /p:BuildDarwinFrameworks=true /p:IsManualOrRollingBuild=true
timeoutInMinutes: 480
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
#
# iOS/tvOS devices
@@ -80,14 +81,15 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
testGroup: innerloop
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- compileOnHelix: true
- interpreter: true
- testBuildArgs: /p:ArchiveTests=true /p:DevTeamProvisioning=- /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildTestsOnHelix=true
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ compileOnHelix: true
+ interpreter: true
+ testBuildArgs: /p:ArchiveTests=true /p:DevTeamProvisioning=- /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildTestsOnHelix=true
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
#
# iOS/tvOS devices
@@ -116,11 +118,12 @@ jobs:
buildArgs: --cross -s clr.alljits+clr.tools+clr.nativeaotruntime+clr.nativeaotlibs+libs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true /p:RunSmokeTestsOnly=true /p:DevTeamProvisioning=- /p:BuildTestsOnHelix=true /p:UseNativeAOTRuntime=true /p:RunAOTCompilation=false /p:ContinuousIntegrationBuild=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
#
# Build the whole product using NativeAOT for iOS/tvOS and run runtime tests with iOS/tvOS devices
@@ -151,8 +154,9 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
testGroup: innerloop
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
index c7bcfc15e132..b11b4be72ed6 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-ioslikesimulator.yml
@@ -40,11 +40,12 @@ jobs:
buildArgs: -s mono+libs+host+packs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true $(_runSmokeTestsOnlyArg) /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildDarwinFrameworks=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- interpreter: true
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ interpreter: true
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
#
# Build the whole product using Mono for iOSSimulator/tvOSSimulator and run runtime tests with iOS/tvOS simulators
@@ -84,14 +85,15 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
testGroup: innerloop
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- compileOnHelix: true
- interpreter: true
- testBuildArgs: /p:ArchiveTests=true /p:DevTeamProvisioning=- /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildTestsOnHelix=true
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ compileOnHelix: true
+ interpreter: true
+ testBuildArgs: /p:ArchiveTests=true /p:DevTeamProvisioning=- /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildTestsOnHelix=true
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
#
# Build the whole product using Native AOT for iOSSimulator/tvOSSimulator and run runtime tests with iOS/tvOS simulators
@@ -131,8 +133,9 @@ jobs:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
testGroup: innerloop
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: tree nativeaot/SmokeTests /p:BuildNativeAOTRuntimePack=true
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-linuxbionic.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-linuxbionic.yml
index 352eabe9d22d..7eb5fa47f808 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-linuxbionic.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-linuxbionic.yml
@@ -42,7 +42,8 @@ jobs:
buildArgs: -s mono+libs+host+packs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true
timeoutInMinutes: 480
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)_LinuxBionic
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)_LinuxBionic
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
index 736bf0516dfd..936fe60bb483 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-maccatalyst.yml
@@ -37,10 +37,11 @@ jobs:
buildArgs: -s mono+libs+host+packs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true /p:DevTeamProvisioning=adhoc /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildDarwinFrameworks=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
#
# MacCatalyst interp - requires AOT Compilation and Interp flags
@@ -70,8 +71,9 @@ jobs:
buildArgs: -s mono+libs+host+packs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true $(_runSmokeTestsOnlyArg) /p:DevTeamProvisioning=adhoc /p:RunAOTCompilation=true /p:MonoForceInterpreter=true /p:BuildDarwinFrameworks=true /p:EnableAppSandbox=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- interpreter: true
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ interpreter: true
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/extra-platforms/runtime-extra-platforms-other.yml b/eng/pipelines/extra-platforms/runtime-extra-platforms-other.yml
index c279c318e34d..e2e95c6bac57 100644
--- a/eng/pipelines/extra-platforms/runtime-extra-platforms-other.yml
+++ b/eng/pipelines/extra-platforms/runtime-extra-platforms-other.yml
@@ -279,15 +279,16 @@ jobs:
eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['monoContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['monoContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# Build the whole product using Mono and run runtime tests
@@ -354,39 +355,41 @@ jobs:
# Mono CoreCLR runtime Test executions using live libraries and LLVM Full AOT
# Only when Mono is changed
#
-- template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/common/global-build-job.yml
- helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
- buildConfig: Release
- runtimeFlavor: mono
- platforms:
- - linux_x64
- # - linux_arm64
- variables:
- - name: timeoutPerTestInMinutes
- value: 60
- - name: timeoutPerTestCollectionInMinutes
- value: 180
- jobParameters:
- testGroup: innerloop
- nameSuffix: AllSubsets_Mono_LLVMFullAot_RuntimeTests
- runtimeVariant: llvmfullaot
- buildArgs: -s mono+libs+clr.hosts+clr.iltools -c Release /p:MonoEnableLLVM=true /p:MonoBundleLLVMOptimizer=true
- timeoutInMinutes: 300
+# Disabled due to OOM errors: https://github.com/dotnet/runtime/issues/90427
+# - template: /eng/pipelines/common/platform-matrix.yml
+# parameters:
+# jobTemplate: /eng/pipelines/common/global-build-job.yml
+# helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml
+# buildConfig: Release
+# runtimeFlavor: mono
+# platforms:
+# - linux_x64
+# # - linux_arm64
+# variables:
+# - name: timeoutPerTestInMinutes
+# value: 60
+# - name: timeoutPerTestCollectionInMinutes
+# value: 180
+# jobParameters:
+# testGroup: innerloop
+# nameSuffix: AllSubsets_Mono_LLVMFullAot_RuntimeTests
+# runtimeVariant: llvmfullaot
+# buildArgs: -s mono+libs+clr.hosts+clr.iltools -c Release /p:MonoEnableLLVM=true /p:MonoBundleLLVMOptimizer=true
+# timeoutInMinutes: 300
- condition: >-
- or(
- eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
- eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
- eq(variables['isRollingBuild'], true))
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- llvmAotStepContainer: linux_x64_llvmaot
- testRunNamePrefixSuffix: Mono_Release
- extraVariablesTemplates:
- - template: /eng/pipelines/common/templates/runtimes/test-variables.yml
+# condition: >-
+# or(
+# eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
+# eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
+# eq(variables['isRollingBuild'], true))
+# postBuildSteps:
+# - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+# parameters:
+# creator: dotnet-bot
+# llvmAotStepContainer: linux_x64_llvmaot
+# testRunNamePrefixSuffix: Mono_Release
+# extraVariablesTemplates:
+# - template: /eng/pipelines/common/templates/runtimes/test-variables.yml
#
# Mono CoreCLR runtime Test executions using live libraries in interpreter mode
diff --git a/eng/pipelines/global-build.yml b/eng/pipelines/global-build.yml
index 6f80e6ad1045..8b6291cd0db0 100644
--- a/eng/pipelines/global-build.yml
+++ b/eng/pipelines/global-build.yml
@@ -30,6 +30,7 @@ variables:
extends:
template: /eng/pipelines/common/templates/pipeline-with-resources.yml
parameters:
+ isOfficialBuild: false
stages:
- stage: Build
jobs:
@@ -177,3 +178,97 @@ extends:
timeoutInMinutes: 95
condition:
eq(dependencies.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true)
+
+ #
+ # Build CoreCLR as a non-portable build
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
+ buildConfig: checked
+ runtimeFlavor: coreclr
+ platforms:
+ - tizen_armel
+ jobParameters:
+ testScope: innerloop
+ nameSuffix: CoreCLR_NonPortable
+ buildArgs: -s clr.native+clr.tools+clr.corelib+clr.nativecorelib+clr.aot+clr.packages -c $(_BuildConfig) /p:PortableBuild=false
+ timeoutInMinutes: 120
+ condition: >-
+ or(
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
+ eq(variables['isRollingBuild'], true))
+
+ #
+ # Build CoreCLR with no R2R
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
+ buildConfig: checked
+ runtimeFlavor: coreclr
+ platforms:
+ - linux_x86
+ jobParameters:
+ testScope: innerloop
+ nameSuffix: CoreCLR_NoR2R
+ buildArgs: -s clr.runtime+clr.jit+clr.iltools+clr.spmi+clr.corelib -c $(_BuildConfig)
+ timeoutInMinutes: 120
+ condition: >-
+ or(
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
+ eq(variables['isRollingBuild'], true))
+
+ #
+ # Build CoreCLR release
+ # Always as they are needed by Installer and we always build and test the Installer.
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/coreclr/templates/build-job.yml
+ buildConfig: release
+ platforms:
+ - freebsd_x64
+ jobParameters:
+ testGroup: innerloop
+ # Mono/runtimetests also need this, but skip for wasm
+ condition:
+ or(
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
+ eq(variables['isRollingBuild'], true))
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/libraries/build-job.yml
+ buildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
+ platforms:
+ - freebsd_x64
+ jobParameters:
+ testScope: innerloop
+ condition:
+ or(
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
+ eq(variables['isRollingBuild'], true))
+
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/installer/jobs/build-job.yml
+ buildConfig: Release
+ platforms:
+ - freebsd_x64
+ jobParameters:
+ liveRuntimeBuildConfig: release
+ liveLibrariesBuildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
+ runOnlyIfDependenciesSucceeded: true
+ condition:
+ or(
+ eq(dependencies.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
+ eq(variables['isRollingBuild'], true))
diff --git a/eng/pipelines/installer/jobs/build-job.yml b/eng/pipelines/installer/jobs/build-job.yml
index 1d89cfad70eb..43f19c69ccac 100644
--- a/eng/pipelines/installer/jobs/build-job.yml
+++ b/eng/pipelines/installer/jobs/build-job.yml
@@ -19,7 +19,6 @@ parameters:
displayName: ''
runtimeVariant: ''
pool: ''
- pgoType: ''
runOnlyIfDependenciesSucceeded: false
# The target names here should match container names in the resources section in our pipelines, like runtime.yml
@@ -59,11 +58,10 @@ jobs:
dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }}
disableClrTest: ${{ parameters.disableClrTest }}
- pgoType: ${{ parameters.pgoType }}
# Compute job name from template parameters
- name: ${{ format('installer_{0}_{1}_{2}_{3}_{4}_', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }}
- displayName: ${{ format('{0} Installer Build and Test {1} {2} {3} {4}', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }}
+ name: ${{ format('installer_{0}_{1}_{2}_{3}_', parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }}
+ displayName: ${{ format('Installer Build and Test {0} {1} {2} {3}', parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }}
# Run all steps in the container.
# Note that the containers are defined in platform-matrix.yml
@@ -86,30 +84,31 @@ jobs:
- name: OfficialBuildArg
value: ''
+ # Explicitly enable tests for linux even though it is a cross build using mariner
+ # They still work in this configuration and until they run on Helix, it is our only
+ # linux test coverage in the installer pipeline
- name: SkipTests
value: ${{ or(
not(in(parameters.archType, 'x64', 'x86')),
eq(parameters.runtimeFlavor, 'mono'),
eq(parameters.isOfficialBuild, true),
- eq(parameters.crossBuild, true),
- eq(parameters.pgoType, 'PGO')) }}
+ and(
+ eq(parameters.crossBuild, true),
+ not(and(
+ eq(parameters.osGroup, 'linux'),
+ eq(parameters.osSubgroup, ''))
+ ))) }}
- name: BuildAction
value: -test
- - ${{ if eq(or(not(in(parameters.archType, 'x64', 'x86')), eq(parameters.runtimeFlavor, 'mono'), eq(parameters.isOfficialBuild, true), eq(parameters.crossBuild, true), eq(parameters.pgoType, 'PGO')), true) }}:
+ - ${{ if eq(or(not(in(parameters.archType, 'x64', 'x86')), eq(parameters.runtimeFlavor, 'mono'), eq(parameters.isOfficialBuild, true), and(eq(parameters.crossBuild, true), not(and(eq(parameters.osGroup, 'linux'), eq(parameters.osSubgroup, ''))))), true) }}:
- name: BuildAction
value: ''
- name: SignType
value: test
- - name: pgoInstrumentArg
- value: ''
- - ${{ if eq(parameters.pgoType, 'PGO' )}}:
- - name: pgoInstrumentArg
- value: '-pgoinstrument '
-
# Set up non-PR build from internal project
- ${{ if eq(parameters.isOfficialBuild, true) }}:
- name: SignType
@@ -145,7 +144,6 @@ jobs:
build.cmd -subset host+packs -ci
$(BuildAction)
-configuration $(_BuildConfig)
- $(pgoInstrumentArg)
$(LiveOverridePathArgs)
$(CommonMSBuildArgs)
$(MsbuildSigningArguments)
@@ -217,7 +215,6 @@ jobs:
/p:CrossBuild=${{ parameters.crossBuild }}
/p:PortableBuild=$(_PortableBuild)
/p:SkipTests=$(SkipTests)
- $(pgoInstrumentArg)
$(LiveOverridePathArgs)
$(CommonMSBuildArgs)
@@ -257,7 +254,7 @@ jobs:
/p:RuntimeArtifactsPath=$(buildCommandSourcesDirectory)$(RuntimeDownloadPath)
/p:RuntimeConfiguration=${{ parameters.liveRuntimeBuildConfig }}
- name: RuntimeArtifactName
- value: $(runtimeFlavorName)Product_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(liveRuntimeLegName)
+ value: $(runtimeFlavorName)Product_${{ parameters.runtimeVariant }}_$(liveRuntimeLegName)
- ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}:
- name: liveLibrariesLegName
@@ -278,14 +275,13 @@ jobs:
- evaluate_paths
- ${{ parameters.dependsOn }}
- ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}:
- - ${{ format('{0}_{1}_product_build_{2}{3}_{4}_{5}{6}',
+ - ${{ format('{0}_{1}_product_build_{2}{3}_{4}_{5}',
parameters.runtimeFlavor,
parameters.runtimeVariant,
parameters.osGroup,
parameters.osSubgroup,
parameters.archType,
- parameters.liveRuntimeBuildConfig,
- parameters.pgoType) }}
+ parameters.liveRuntimeBuildConfig) }}
- ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}:
- libraries_build_${{ format('{0}{1}_{2}_{3}',
parameters.osGroup,
@@ -343,32 +339,15 @@ jobs:
df -h
displayName: Disk Usage before Build
- # Build the default subset non-MacOS platforms
- - ${{ if ne(parameters.osGroup, 'osx') }}:
- - script: $(BaseJobBuildCommand)
- displayName: Build
- continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }}
-
- # Build corehost, sign and add entitlements to MacOS binaries
- - ${{ if eq(parameters.osGroup, 'osx') }}:
- - script: $(BaseJobBuildCommand) -subset host.native
- displayName: Build CoreHost
- continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }}
-
- - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
- - template: /eng/pipelines/common/macos-sign-with-entitlements.yml
- parameters:
- filesToSign:
- - name: dotnet
- path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost
- entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist
- - name: apphost
- path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost
- entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist
-
- - script: $(BaseJobBuildCommand) -subset host.pkg+host.tools+host.tests+packs
- displayName: Build and Package
- continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }}
+ - script: $(BaseJobBuildCommand)
+ displayName: Build
+ continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }}
+
+ - ${{ if and(eq(parameters.isOfficialBuild, true), eq(parameters.osGroup, 'windows')) }}:
+ - powershell: ./eng/collect_vsinfo.ps1 -ArchiveRunName postbuild_log
+ displayName: Collect vslogs on exit
+ condition: always()
+
- ${{ if in(parameters.osGroup, 'osx', 'ios', 'tvos') }}:
- script: |
@@ -377,7 +356,7 @@ jobs:
displayName: Disk Usage after Build
# Only in glibc leg, we produce RPMs and Debs
- - ${{ if and(eq(parameters.runtimeFlavor, 'coreclr'), or(eq(parameters.platform, 'linux_x64'), eq(parameters.platform, 'linux_arm64')), eq(parameters.osSubgroup, ''), eq(parameters.pgoType, ''))}}:
+ - ${{ if and(eq(parameters.runtimeFlavor, 'coreclr'), or(eq(parameters.platform, 'linux_x64'), eq(parameters.platform, 'linux_arm64')), eq(parameters.osSubgroup, ''))}}:
- ${{ each packageBuild in parameters.packageDistroList }}:
# This leg's RID matches the build image. Build its distro-dependent packages, as well as
# the distro-independent installers. (There's no particular reason to build the distro-
@@ -398,7 +377,7 @@ jobs:
runtimeFlavor: ${{ parameters.runtimeFlavor }}
runtimeVariant: ${{ parameters.runtimeVariant }}
isOfficialBuild: ${{ eq(parameters.isOfficialBuild, true) }}
- pgoType: ${{ parameters.pgoType }}
+ skipTests: ${{ eq(variables.SkipTests, true) }}
- ${{ if ne(parameters.osGroup, 'windows') }}:
- script: set -x && df -h
diff --git a/eng/pipelines/installer/jobs/steps/build-linux-package.yml b/eng/pipelines/installer/jobs/steps/build-linux-package.yml
index a5645af3d9cc..102eab770c27 100644
--- a/eng/pipelines/installer/jobs/steps/build-linux-package.yml
+++ b/eng/pipelines/installer/jobs/steps/build-linux-package.yml
@@ -4,6 +4,7 @@ parameters:
packageStepDescription: null
packagingArgs: ''
subsetArg: ''
+ condition: succeeded()
steps:
## Run NuGet Authentication for each of the side containers
@@ -20,6 +21,7 @@ steps:
/bl:artifacts/log/$(_BuildConfig)/msbuild.${{ parameters.packageType }}.installers.binlog
displayName: Package ${{ parameters.packageStepDescription }} - ${{ parameters.packageType }}
target: ${{ parameters.target }}
+ condition: ${{ parameters.condition }}
# Broken symbolic links break the SBOM processing
# We make some symlinks during the installer generation process,
# but they aren't always valid on disk afterwards. Some of our tooling,
diff --git a/eng/pipelines/installer/jobs/steps/upload-job-artifacts.yml b/eng/pipelines/installer/jobs/steps/upload-job-artifacts.yml
index 6029ba005719..4012b9a4fa34 100644
--- a/eng/pipelines/installer/jobs/steps/upload-job-artifacts.yml
+++ b/eng/pipelines/installer/jobs/steps/upload-job-artifacts.yml
@@ -3,7 +3,6 @@ parameters:
runtimeFlavor: 'coreclr'
runtimeVariant: ''
isOfficialBuild: false
- pgoType: ''
steps:
# Upload build artifacts (packages) to pipeline only if official, to save storage space.
@@ -51,7 +50,7 @@ steps:
displayName: Publish binaries
inputs:
pathtoPublish: '$(Build.StagingDirectory)/corehost-bin-${{ parameters.name }}-$(_BuildConfig)$(archiveExtension)'
- artifactName: Installer-Binaries-${{parameters.pgoType }}${{ parameters.runtimeFlavor }}-${{ parameters.runtimeVariant }}-${{ parameters.name }}-$(_BuildConfig)
+ artifactName: Installer-Binaries-${{ parameters.runtimeFlavor }}-${{ parameters.runtimeVariant }}-${{ parameters.name }}-$(_BuildConfig)
continueOnError: true
condition: failed()
@@ -71,6 +70,6 @@ steps:
displayName: Publish BuildLogs
inputs:
targetPath: '$(Build.StagingDirectory)/BuildLogs'
- artifactName: Installer-Logs-${{parameters.pgoType }}${{ parameters.runtimeFlavor }}-${{ parameters.runtimeVariant }}-${{ parameters.name }}-$(_BuildConfig)
+ artifactName: Installer-Logs_Attempt$(System.JobAttempt)-${{ parameters.runtimeFlavor }}-${{ parameters.runtimeVariant }}-${{ parameters.name }}-$(_BuildConfig)
continueOnError: true
condition: always()
diff --git a/eng/pipelines/libraries/base-job.yml b/eng/pipelines/libraries/base-job.yml
index 2448124a7bc6..54fe98e7d85b 100644
--- a/eng/pipelines/libraries/base-job.yml
+++ b/eng/pipelines/libraries/base-job.yml
@@ -23,7 +23,6 @@ parameters:
testScope: ''
pool: ''
runTests: false
- pgoType: ''
jobs:
- template: /eng/common/templates/job/job.yml
@@ -98,7 +97,7 @@ jobs:
- _runtimeDownloadPath: '$(Build.SourcesDirectory)/artifacts/transport/${{ parameters.runtimeFlavor }}'
- _runtimeConfigurationArg: -rc ${{ parameters.liveRuntimeBuildConfig }}
- ${{ if eq(parameters.runTests, true) }}:
- - _runtimeArtifactName: '$(runtimeFlavorName)Product_${{ parameters.pgoType }}_${{ parameters.runtimeVariant}}_${{ parameters.osGroup }}${{ parameters.osSubgroup }}_${{ parameters.archType }}_${{ parameters.liveRuntimeBuildConfig }}'
+ - _runtimeArtifactName: '$(runtimeFlavorName)Product_${{ parameters.runtimeVariant}}_${{ parameters.osGroup }}${{ parameters.osSubgroup }}_${{ parameters.archType }}_${{ parameters.liveRuntimeBuildConfig }}'
- _runtimeArtifactsPathArg: ' /p:RuntimeArtifactsPath=$(_runtimeDownloadPath)'
- ${{ if eq(parameters.testDisplayName, '') }}:
- _testRunNamePrefixSuffix: $(runtimeFlavorName)_${{ parameters.liveRuntimeBuildConfig }}
diff --git a/eng/pipelines/libraries/helix-queues-setup.yml b/eng/pipelines/libraries/helix-queues-setup.yml
index 5b37825df086..d3d7e7b2e64f 100644
--- a/eng/pipelines/libraries/helix-queues-setup.yml
+++ b/eng/pipelines/libraries/helix-queues-setup.yml
@@ -28,58 +28,58 @@ jobs:
# Linux arm
- ${{ if eq(parameters.platform, 'linux_arm') }}:
- ${{ if or(eq(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Debian.11.Arm32.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm32v7
+ - (Debian.11.Arm32.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm32v7
# Linux armv6
- ${{ if eq(parameters.platform, 'linux_armv6') }}:
# - ${{ if eq(parameters.jobParameters.isFullMatrix, true) }}:
- - (Raspbian.10.Armv6.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:raspbian-10-helix-arm32v6
+ - (Raspbian.10.Armv6.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:raspbian-10-helix-arm32v6
# Linux arm64
- ${{ if eq(parameters.platform, 'linux_arm64') }}:
- ${{ if or(eq(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Ubuntu.2204.Arm64.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-helix-arm64v8
+ - (Ubuntu.2204.Arm64.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-helix-arm64v8
- ${{ if or(ne(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Ubuntu.1804.Arm64.Open)Ubuntu.1804.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
+ - (Ubuntu.1804.Arm64.Open)Ubuntu.2204.ArmArch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8
- ${{ if or(ne(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Debian.11.Arm64.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm64v8
+ - (Debian.11.Arm64.Open)Ubuntu.2204.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm64v8
# Linux musl x64
- ${{ if eq(parameters.platform, 'linux_musl_x64') }}:
- ${{ if or(ne(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Alpine.315.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-amd64
+ - (Alpine.317.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-amd64
- ${{ if or(eq(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Alpine.317.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-amd64
+ - (Alpine.319.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-helix-amd64
# Linux musl arm64
- ${{ if and(eq(parameters.platform, 'linux_musl_arm64'), or(eq(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true))) }}:
- - (Alpine.317.Arm64.Open)ubuntu.1804.armarch.open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm64v8
- - (Alpine.315.Arm64.Open)ubuntu.1804.armarch.open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.15-helix-arm64v8
+ - (Alpine.317.Arm64.Open)ubuntu.2204.armarch.open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.17-helix-arm64v8
+ - (Alpine.319.Arm64.Open)ubuntu.2204.armarch.open@mcr.microsoft.com/dotnet-buildtools/prereqs:alpine-3.19-helix-arm64v8
# Linux x64
- ${{ if eq(parameters.platform, 'linux_x64') }}:
- ${{ if and(eq(parameters.jobParameters.interpreter, ''), ne(parameters.jobParameters.isSingleFile, true)) }}:
- ${{ if and(eq(parameters.jobParameters.testScope, 'outerloop'), eq(parameters.jobParameters.runtimeFlavor, 'mono')) }}:
- SLES.15.Amd64.Open
- - (Centos.8.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-helix
- - (Fedora.38.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-38-helix
- - (Ubuntu.2204.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-helix-amd64
+ - (Centos.9.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9-helix
+ - (Fedora.39.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-39-helix
+ - (Ubuntu.2204.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-22.04-helix-amd64
- (Debian.11.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
- ${{ if or(ne(parameters.jobParameters.testScope, 'outerloop'), ne(parameters.jobParameters.runtimeFlavor, 'mono')) }}:
- ${{ if or(eq(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- SLES.15.Amd64.Open
- - (Fedora.38.Amd64.Open)ubuntu.1804.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-38-helix
+ - (Fedora.39.Amd64.Open)Ubuntu.2204.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:fedora-39-helix
- Ubuntu.2204.Amd64.Open
- - (Debian.11.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
- - (Mariner.2.0.Amd64.Open)ubuntu.1804.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-helix-amd64
- - (openSUSE.15.2.Amd64.Open)ubuntu.1804.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:opensuse-15.2-helix-amd64
+ - (Debian.11.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
+ - (Mariner.2.0.Amd64.Open)Ubuntu.2204.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:cbl-mariner-2.0-helix-amd64
+ - (openSUSE.15.2.Amd64.Open)Ubuntu.2204.amd64.open@mcr.microsoft.com/dotnet-buildtools/prereqs:opensuse-15.2-helix-amd64
- ${{ if or(ne(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- - (Centos.8.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream8-helix
- - (Debian.11.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
+ - (Centos.9.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-stream9-helix
+ - (Debian.11.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
- Ubuntu.1804.Amd64.Open
- ${{ if or(eq(parameters.jobParameters.interpreter, 'true'), eq(parameters.jobParameters.isSingleFile, true)) }}:
# Limiting interp runs as we don't need as much coverage.
- - (Debian.11.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
+ - (Debian.11.Amd64.Open)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64
# Linux s390x
- ${{ if eq(parameters.platform, 'linux_s390x') }}:
@@ -152,7 +152,6 @@ jobs:
- ${{ if notIn(parameters.jobParameters.framework, 'net48') }}:
# mono outerloop
- ${{ if and(eq(parameters.jobParameters.testScope, 'outerloop'), eq(parameters.jobParameters.runtimeFlavor, 'mono')) }}:
- - Windows.7.Amd64.Open
- Windows.11.Amd64.Client.Open
# libraries on coreclr (outerloop and innerloop), or libraries on mono innerloop
- ${{ if or(ne(parameters.jobParameters.testScope, 'outerloop'), ne(parameters.jobParameters.runtimeFlavor, 'mono')) }}:
@@ -161,7 +160,6 @@ jobs:
- Windows.Amd64.Server2022.Open
- ${{ if or(ne(parameters.jobParameters.isExtraPlatforms, true), eq(parameters.jobParameters.includeAllPlatforms, true)) }}:
- Windows.Amd64.Server2022.Open
- - Windows.7.Amd64.Open
# .NETFramework
- ${{ if eq(parameters.jobParameters.framework, 'net48') }}:
@@ -173,15 +171,15 @@ jobs:
# WASI
- ${{ if eq(parameters.platform, 'wasi_wasm') }}:
- - (Ubuntu.2004.Amd64)Ubuntu.2004.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-20.04-helix-wasm-amd64
+ - (Ubuntu.2004.Amd64)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-20.04-helix-wasm-amd64
# Browser WebAssembly
- ${{ if eq(parameters.platform, 'browser_wasm') }}:
- - (Ubuntu.1804.Amd64)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
+ - (Ubuntu.1804.Amd64)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
# Browser WebAssembly Firefox
- ${{ if eq(parameters.platform, 'browser_wasm_firefox') }}:
- - (Ubuntu.1804.Amd64)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
+ - (Ubuntu.1804.Amd64)Ubuntu.2204.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-webassembly
# Browser WebAssembly windows
- ${{ if in(parameters.platform, 'browser_wasm_win', 'wasi_wasm_win') }}:
diff --git a/eng/pipelines/libraries/outerloop-mono.yml b/eng/pipelines/libraries/outerloop-mono.yml
index e15fc35f9d41..34b1af3c71b5 100644
--- a/eng/pipelines/libraries/outerloop-mono.yml
+++ b/eng/pipelines/libraries/outerloop-mono.yml
@@ -39,11 +39,12 @@ extends:
timeoutInMinutes: 180
includeAllPlatforms: ${{ variables['isRollingBuild'] }}
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- testScope: outerloop
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ testScope: outerloop
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -60,10 +61,11 @@ extends:
timeoutInMinutes: 180
includeAllPlatforms: ${{ variables['isRollingBuild'] }}
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- scenarios:
- - normal
- testScope: outerloop
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ scenarios:
+ - normal
+ testScope: outerloop
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/libraries/outerloop.yml b/eng/pipelines/libraries/outerloop.yml
index 121d405fc7c7..597f298c37a3 100644
--- a/eng/pipelines/libraries/outerloop.yml
+++ b/eng/pipelines/libraries/outerloop.yml
@@ -45,11 +45,12 @@ extends:
timeoutInMinutes: 180
includeAllPlatforms: ${{ variables['isRollingBuild'] }}
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- testScope: outerloop
- creator: dotnet-bot
- testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ testScope: outerloop
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
- ${{ if eq(variables['isRollingBuild'], false) }}:
- template: /eng/pipelines/common/platform-matrix.yml
@@ -73,11 +74,12 @@ extends:
timeoutInMinutes: 180
includeAllPlatforms: ${{ variables['isRollingBuild'] }}
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- testScope: outerloop
- creator: dotnet-bot
- testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ testScope: outerloop
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
- ${{ if eq(variables['includeWindowsOuterloop'], true) }}:
- template: /eng/pipelines/common/platform-matrix.yml
@@ -97,8 +99,9 @@ extends:
timeoutInMinutes: 180
includeAllPlatforms: ${{ variables['isRollingBuild'] }}
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- testScope: outerloop
- creator: dotnet-bot
- extraHelixArguments: /p:BuildTargetFramework=net48
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ testScope: outerloop
+ creator: dotnet-bot
+ extraHelixArguments: /p:BuildTargetFramework=net48
diff --git a/eng/pipelines/libraries/stress/http.yml b/eng/pipelines/libraries/stress/http.yml
index f4f9c45de36e..68bfcef6c508 100644
--- a/eng/pipelines/libraries/stress/http.yml
+++ b/eng/pipelines/libraries/stress/http.yml
@@ -119,6 +119,9 @@ extends:
lfs: false
- powershell: |
+ # Workaround for https://github.com/microsoft/azure-pipelines-agent/issues/4554. Undo when the image bug is fixed.
+ Remove-Item -Path "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt"
+
$(dockerfilesFolder)/build-docker-sdk.ps1 -w -t $(sdkBaseImage) -c $(BUILD_CONFIGURATION)
echo "##vso[task.setvariable variable=succeeded;isOutput=true]true"
name: buildRuntime
diff --git a/eng/pipelines/libraries/stress/ssl.yml b/eng/pipelines/libraries/stress/ssl.yml
index ab93994400d3..a70a18e828f0 100644
--- a/eng/pipelines/libraries/stress/ssl.yml
+++ b/eng/pipelines/libraries/stress/ssl.yml
@@ -76,6 +76,9 @@ extends:
lfs: false
- powershell: |
+ # Workaround for https://github.com/microsoft/azure-pipelines-agent/issues/4554. Undo when the image bug is fixed.
+ Remove-Item -Path "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\Microsoft.VCToolsVersion.v143.default.txt"
+
$(dockerfilesFolder)/build-docker-sdk.ps1 -w -t $(sdkBaseImage) -c $(BUILD_CONFIGURATION)
displayName: Build CLR and Libraries
diff --git a/eng/pipelines/mono/templates/build-job.yml b/eng/pipelines/mono/templates/build-job.yml
index b1ac9c1cc655..86e0813c7c7e 100644
--- a/eng/pipelines/mono/templates/build-job.yml
+++ b/eng/pipelines/mono/templates/build-job.yml
@@ -15,7 +15,6 @@ parameters:
dependsOn: []
monoCrossAOTTargetOS: []
dependOnEvaluatePaths: false
- pgoType: ''
### Product build
jobs:
@@ -171,30 +170,15 @@ jobs:
- script: build$(scriptExt) -subset mono+clr.hosts -c $(buildConfig) -arch $(archType) $(osOverride) -ci $(officialBuildIdArg) $(aotCrossParameter) $(llvmParameter) -pack $(OutputRidArg)
displayName: Build nupkg
- # Publish official build
- - ${{ if eq(parameters.publishToBlobFeed, 'true') }}:
- - ${{ if ne(parameters.osGroup, 'windows') }}:
- - script: $(Build.SourcesDirectory)/eng/common/build.sh --ci --restore --publish --configuration $(_BuildConfig) /p:DotNetPublishUsingPipelines=true /p:DotNetPublishToBlobFeed=true /p:DotNetPublishBlobFeedUrl=$(dotnetfeedUrl) /p:DotNetPublishBlobFeedKey=$(dotnetfeedPAT) /p:Configuration=$(_BuildConfig) /p:TargetArchitecture=$(archType) /p:TargetOS=$(osGroup) /p:OSIdentifier=$(osGroup)$(osSubgroup) /bl:"$(Build.SourcesDirectory)/artifacts/log/publish-pkgs.binlog" --projects $(Build.SourcesDirectory)/eng/empty.csproj
- displayName: Publish packages to blob feed
- env:
- # TODO: remove NUGET_PACKAGES once https://github.com/dotnet/arcade/issues/1578 is fixed
- NUGET_PACKAGES: $(Build.SourcesDirectory)/.packages
- ${{ if eq(parameters.osGroup, 'freebsd') }}:
- # Arcade uses this SDK instead of trying to restore one.
- DotNetCoreSdkDir: /usr/local/dotnet
- - ${{ if eq(parameters.osGroup, 'windows') }}:
- # TODO: pass publish feed url and access token in from the internal pipeline
- - powershell: eng\common\build.ps1 -ci -restore -publish -configuration $(_BuildConfig) /p:DotNetPublishUsingPipelines=true /p:DotNetPublishToBlobFeed=true /p:DotNetPublishBlobFeedUrl=$(dotnetfeedUrl) /p:DotNetPublishBlobFeedKey=$(dotnetfeedPAT) /p:Configuration=$(_BuildConfig) /p:TargetArchitecture=$(archType) /p:TargetOS=$(osGroup) /p:OSIdentifier=$(osGroup)$(osSubgroup) /bl:"$(Build.SourcesDirectory)\artifacts\log\publish-pkgs.binlog" -projects $(Build.SourcesDirectory)\eng\empty.csproj
- displayName: Publish packages to blob feed
- env:
- # TODO: remove NUGET_PACKAGES once https://github.com/dotnet/arcade/issues/1578 is fixed
- NUGET_PACKAGES: $(Build.SourcesDirectory)\.packages
-
+ - ${{ if and(eq(parameters.isOfficialBuild, true), eq(parameters.osGroup, 'windows')) }}:
+ - powershell: ./eng/collect_vsinfo.ps1 -ArchiveRunName postbuild_log
+ displayName: Collect vslogs on exit
+ condition: always()
# Publish Logs
- task: PublishPipelineArtifact@1
displayName: Publish Logs
inputs:
targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: 'BuildLogs_Mono_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ artifactName: 'BuildLogs_Attempt$(System.JobAttempt)_Mono_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
continueOnError: true
condition: always()
diff --git a/eng/pipelines/mono/templates/generate-offsets.yml b/eng/pipelines/mono/templates/generate-offsets.yml
index ddc67f0cc0b7..909ca9a99c54 100644
--- a/eng/pipelines/mono/templates/generate-offsets.yml
+++ b/eng/pipelines/mono/templates/generate-offsets.yml
@@ -9,11 +9,13 @@ parameters:
pool: ''
condition: true
isOfficialBuild: false
+ templatePath: 'templates'
### Product build
jobs:
- template: xplat-pipeline-job.yml
parameters:
+ templatePath: ${{ parameters.templatePath }}
buildConfig: ${{ parameters.buildConfig }}
osGroup: ${{ parameters.osGroup }}
osSubGroup: ${{ parameters.osSubGroup }}
@@ -76,17 +78,21 @@ jobs:
contents: '**/offsets-*.h'
targetFolder: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles/'
- - task: PublishPipelineArtifact@1
- displayName: Upload offset files
- inputs:
- targetPath: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
- artifactName: 'Mono_Offsets_$(osGroup)$(osSubGroup)'
+ - template: /eng/pipelines/common/templates/publish-pipeline-artifacts.yml
+ parameters:
+ displayName: Upload offset files
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ inputs:
+ targetPath: '$(Build.SourcesDirectory)/artifacts/obj/mono/offsetfiles'
+ artifactName: 'Mono_Offsets_$(osGroup)$(osSubGroup)'
# Publish Logs
- - task: PublishPipelineArtifact@1
- displayName: Publish Logs
- inputs:
- targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: 'BuildLogs_Mono_Offsets_$(osGroup)$(osSubGroup)'
- continueOnError: true
- condition: always()
+ - template: /eng/pipelines/common/templates/publish-pipeline-artifacts.yml
+ parameters:
+ displayName: Publish Logs
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ inputs:
+ targetPath: $(Build.SourcesDirectory)/artifacts/log
+ artifactName: 'BuildLogs_Attempt$(System.JobAttempt)_Mono_Offsets_$(osGroup)$(osSubGroup)'
+ continueOnError: true
+ condition: always()
diff --git a/eng/pipelines/mono/templates/workloads-build.yml b/eng/pipelines/mono/templates/workloads-build.yml
index b962dcb65d17..a10bf343fa4f 100644
--- a/eng/pipelines/mono/templates/workloads-build.yml
+++ b/eng/pipelines/mono/templates/workloads-build.yml
@@ -12,11 +12,13 @@ parameters:
runtimeVariant: ''
testGroup: ''
timeoutInMinutes: ''
+ templatePath: 'templates'
variables: {}
jobs:
- template: xplat-pipeline-job.yml
parameters:
+ templatePath: ${{ parameters.templatePath }}
archType: ${{ parameters.archType }}
buildConfig: ${{ parameters.buildConfig }}
container: ${{ parameters.container }}
@@ -92,13 +94,15 @@ jobs:
name: workloads
# Publish Logs
- - task: PublishPipelineArtifact@1
- displayName: Publish Logs
- inputs:
- targetPath: $(Build.SourcesDirectory)/artifacts/log
- artifactName: 'WorkloadLogs'
- continueOnError: true
- condition: always()
+ - template: /eng/pipelines/common/templates/publish-pipeline-artifacts.yml
+ parameters:
+ displayName: Publish Logs
+ isOfficialBuild: ${{ parameters.isOfficialBuild }}
+ inputs:
+ targetPath: $(Build.SourcesDirectory)/artifacts/log
+ artifactName: 'WorkloadLogs_Attempt$(System.JobAttempt)'
+ continueOnError: true
+ condition: always()
# Delete wixpdb files before they are uploaded to artifacts
- task: DeleteFiles@1
diff --git a/eng/pipelines/mono/templates/xplat-pipeline-job.yml b/eng/pipelines/mono/templates/xplat-pipeline-job.yml
index 2c369f71f30b..67b43722e0c5 100644
--- a/eng/pipelines/mono/templates/xplat-pipeline-job.yml
+++ b/eng/pipelines/mono/templates/xplat-pipeline-job.yml
@@ -12,6 +12,7 @@ parameters:
pool: ''
runtimeVariant: ''
liveRuntimeBuildConfig: 'release'
+ templatePath: 'templates'
# arcade-specific parameters
condition: true
@@ -28,6 +29,7 @@ parameters:
jobs:
- template: /eng/pipelines/common/templates/runtimes/xplat-job.yml
parameters:
+ templatePath: ${{ parameters.templatePath }}
buildConfig: ${{ parameters.buildConfig }}
archType: ${{ parameters.archType }}
osGroup: ${{ parameters.osGroup }}
@@ -52,22 +54,22 @@ jobs:
variables:
- name: coreClrProductArtifactName
- value: 'CoreCLRProduct___$(osGroup)$(osSubgroup)_$(archType)_${{ parameters.liveRuntimeBuildConfig }}'
+ value: 'CoreCLRProduct__$(osGroup)$(osSubgroup)_$(archType)_${{ parameters.liveRuntimeBuildConfig }}'
- name: coreClrProductRootFolderPath
value: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(liveRuntimeBuildConfigUpper)'
- name: buildProductArtifactName
- value: 'MonoProduct__${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value: 'MonoProduct_${{ parameters.runtimeVariant }}_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
# minijit and monointerpreter do not use separate product builds.
- ${{ if or(eq(parameters.runtimeVariant, 'minijit'), eq(parameters.runtimeVariant, 'monointerpreter')) }}:
- name : buildProductArtifactName
- value : 'MonoProduct___$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value : 'MonoProduct__$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
- ${{ if eq(parameters.runtimeVariant, 'llvmfullaot') }}:
- name : buildProductArtifactName
- value : 'MonoProduct__llvmaot_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
+ value : 'MonoProduct_llvmaot_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)'
- name: binTestsPath
value: '$(Build.SourcesDirectory)/artifacts/tests/coreclr'
diff --git a/eng/pipelines/official/jobs/prepare-signed-artifacts.yml b/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
index 908f2b64c71c..24fd2df48d74 100644
--- a/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
+++ b/eng/pipelines/official/jobs/prepare-signed-artifacts.yml
@@ -20,6 +20,14 @@ jobs:
- name: SignType
value: $[ coalesce(variables.OfficialSignType, 'real') ]
+ templateContext:
+ outputs:
+ - output: pipelineArtifact
+ displayName: 'Publish BuildLogs'
+ condition: succeededOrFailed()
+ targetPath: '$(Build.StagingDirectory)\BuildLogs'
+ artifactName: ${{ parameters.logArtifactName }}
+
steps:
- checkout: self
clean: true
@@ -65,11 +73,4 @@ jobs:
**/*.binlog
TargetFolder: '$(Build.StagingDirectory)\BuildLogs'
continueOnError: true
- condition: succeededOrFailed()
-
- - task: PublishPipelineArtifact@1
- displayName: Publish BuildLogs
- inputs:
- targetPath: '$(Build.StagingDirectory)\BuildLogs'
- artifactName: ${{ parameters.logArtifactName }}
- condition: succeededOrFailed()
+ condition: succeededOrFailed()
\ No newline at end of file
diff --git a/eng/pipelines/official/stages/publish.yml b/eng/pipelines/official/stages/publish.yml
index d23afa7003b2..9553baae305f 100644
--- a/eng/pipelines/official/stages/publish.yml
+++ b/eng/pipelines/official/stages/publish.yml
@@ -7,7 +7,7 @@ stages:
- stage: PrepareForPublish
displayName: Prepare for Publish
variables:
- - template: /eng/common/templates/variables/pool-providers.yml
+ - template: /eng/common/templates-official/variables/pool-providers.yml
jobs:
# Prep artifacts: sign them and upload pipeline artifacts expected by stages-based publishing.
- template: /eng/pipelines/official/jobs/prepare-signed-artifacts.yml
@@ -15,7 +15,7 @@ stages:
PublishRidAgnosticPackagesFromPlatform: ${{ parameters.PublishRidAgnosticPackagesFromPlatform }}
# Publish to Build Asset Registry in order to generate the ReleaseConfigs artifact.
- - template: /eng/common/templates/job/publish-build-assets.yml
+ - template: /eng/common/templates-official/job/publish-build-assets.yml
parameters:
publishUsingPipelines: true
publishAssetsImmediately: true
@@ -26,7 +26,7 @@ stages:
symbolPublishingAdditionalParameters: '/p:PublishSpecialClrFiles=true'
# Stages-based publishing entry point
-- template: /eng/common/templates/post-build/post-build.yml
+- template: /eng/common/templates-official/post-build/post-build.yml
parameters:
publishingInfraVersion: ${{ parameters.publishingInfraVersion }}
validateDependsOn:
diff --git a/eng/pipelines/runtime-android-grpc-client-tests.yml b/eng/pipelines/runtime-android-grpc-client-tests.yml
index 00e51f766d95..707b1d78ebc0 100644
--- a/eng/pipelines/runtime-android-grpc-client-tests.yml
+++ b/eng/pipelines/runtime-android-grpc-client-tests.yml
@@ -43,8 +43,9 @@ extends:
buildArgs: -s mono+libs+host+packs+libs.tests -c $(_BuildConfig) /p:ArchiveTests=true /p:RunGrpcTestsOnly=true /p:BuildGrpcServerDockerImage=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- extraHelixArguments: /p:RunGrpcTestsOnly=true /p:BuildGrpcServerDockerImage=true
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ extraHelixArguments: /p:RunGrpcTestsOnly=true /p:BuildGrpcServerDockerImage=true
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/runtime-community.yml b/eng/pipelines/runtime-community.yml
index bab086f75c23..a91388e244b0 100644
--- a/eng/pipelines/runtime-community.yml
+++ b/eng/pipelines/runtime-community.yml
@@ -71,15 +71,16 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['monoContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['monoContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# Build the whole product using Mono
@@ -138,7 +139,8 @@ extends:
eq(variables['isRollingBuild'], true))
${{ if eq(variables['isRollingBuild'], true) }}:
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
diff --git a/eng/pipelines/runtime-linker-tests.yml b/eng/pipelines/runtime-linker-tests.yml
index 9db2062a953d..1cb0f8428ef7 100644
--- a/eng/pipelines/runtime-linker-tests.yml
+++ b/eng/pipelines/runtime-linker-tests.yml
@@ -103,7 +103,8 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true),
eq(variables['isRollingBuild'], true))
buildArgs: -s clr+libs+tools.illink -c $(_BuildConfig)
- extraStepsTemplate: /eng/pipelines/libraries/execute-trimming-tests-steps.yml
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/execute-trimming-tests-steps.yml
#
# Build Release config vertical for Browser-wasm
@@ -126,6 +127,7 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_wasm_specific_except_wbt_dbg.containsChange'], true),
eq(dependencies.evaluate_paths.outputs['SetPathVars_tools_illink.containsChange'], true),
eq(dependencies.evaluate_paths.outputs['DarcDependenciesChanged.Microsoft_NET_ILLink_Tasks'], true))
- extraStepsTemplate: /eng/pipelines/libraries/execute-trimming-tests-steps.yml
- extraStepsParameters:
- extraTestArgs: '/p:WasmBuildNative=false'
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/execute-trimming-tests-steps.yml
+ parameters:
+ extraTestArgs: '/p:WasmBuildNative=false'
diff --git a/eng/pipelines/runtime-official.yml b/eng/pipelines/runtime-official.yml
index 3a9fd8d89ac4..7cadf6a800f6 100644
--- a/eng/pipelines/runtime-official.yml
+++ b/eng/pipelines/runtime-official.yml
@@ -23,6 +23,8 @@ pr: none
variables:
- template: /eng/pipelines/common/variables.yml
+ parameters:
+ templatePath: 'templates-official'
- template: /eng/pipelines/common/internal-variables.yml
parameters:
teamName: dotnet-core-acquisition
@@ -33,6 +35,7 @@ variables:
extends:
template: /eng/pipelines/common/templates/pipeline-with-resources.yml
parameters:
+ isOfficialBuild: true
stages:
- stage: Build
jobs:
@@ -41,64 +44,267 @@ extends:
# Localization build
#
- - ${{ if eq(variables['Build.SourceBranch'], 'refs/heads/release/8.0') }}:
- - template: /eng/common/templates/job/onelocbuild.yml
- parameters:
- MirrorRepo: runtime
- MirrorBranch: release/8.0
- LclSource: lclFilesfromPackage
- LclPackageId: 'LCL-JUNO-PROD-RUNTIME'
+ - template: /eng/common/templates-official/job/onelocbuild.yml
+ parameters:
+ MirrorRepo: runtime
+ MirrorBranch: main
+ LclSource: lclFilesfromPackage
+ LclPackageId: 'LCL-JUNO-PROD-RUNTIME'
#
# Source Index Build
#
- ${{ if eq(variables['Build.SourceBranch'], 'refs/heads/main') }}:
- - template: /eng/common/templates/job/source-index-stage1.yml
+ - template: /eng/common/templates-official/job/source-index-stage1.yml
parameters:
sourceIndexBuildCommand: build.cmd -subset libs.sfx+libs.oob -binarylog -os linux -ci /p:SkipLibrariesNativeRuntimePackages=true
#
- # Build CoreCLR
+ # Build CoreCLR runtime packs
+ # Windows x64/arm64
+ # Sign diagnostic files after native build
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/coreclr/templates/build-job.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: release
+ platforms:
+ - windows_x64
+ - windows_arm64
+ jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s clr.runtime+clr.alljits+clr.nativeaotruntime -c $(_BuildConfig) /bl:$(Build.SourcesDirectory)/artifacts/logs/$(_BuildConfig)/CoreClrNativeBuild.binlog
+ nameSuffix: CoreCLR
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 120
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
+ parameters:
+ basePath: $(Build.SourcesDirectory)/artifacts/bin/coreclr
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 30
+ # Now that we've signed the diagnostic files, do the rest of the build.
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s clr.corelib+clr.nativecorelib+clr.nativeaotlibs+clr.tools+clr.packages+libs+host+packs -c $(_BuildConfig)
+ displayName: Build managed CoreCLR components, all libraries, hosts, and packs
+
+ # Upload the results.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: $(osGroup)$(osSubgroup)_$(archType)
+
+
+ #
+ # Build CoreCLR runtime packs
+ # Windows x86
+ # No NativeAOT as NativeAOT is not supported on x86
+ # Sign diagnostic files after native build
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: release
+ platforms:
+ - windows_x86
+ jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s clr.runtime+clr.alljits -c $(_BuildConfig) /bl:$(Build.SourcesDirectory)/artifacts/logs/$(_BuildConfig)/CoreClrNativeBuild.binlog
+ nameSuffix: CoreCLR
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 120
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
+ parameters:
+ basePath: $(Build.SourcesDirectory)/artifacts/bin/coreclr
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 30
+ # Now that we've signed the diagnostic files, do the rest of the build.
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s clr.corelib+clr.nativecorelib+clr.tools+clr.packages+libs+host+packs -c $(_BuildConfig)
+ displayName: Build managed CoreCLR components, all libraries, hosts, and packs
+
+ # Upload the results.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: $(osGroup)$(osSubgroup)_$(archType)
+ #
+ # Build CoreCLR runtime packs
+ # Mac x64/arm64
+ # Sign and entitle createdump and corerun after native build.
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
platforms:
- osx_arm64
- osx_x64
+ jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s clr.runtime+clr.alljits+clr.nativeaotruntime+host.native -c $(_BuildConfig) /bl:$(Build.SourcesDirectory)/artifacts/logs/$(_BuildConfig)/CoreClrNativeBuild.binlog
+ nameSuffix: CoreCLR
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 120
+ postBuildSteps:
+ - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
+ - template: /eng/pipelines/common/macos-sign-with-entitlements.yml
+ parameters:
+ filesToSign:
+ - name: createdump
+ path: $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(_BuildConfig)
+ entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/createdump-entitlements.plist
+ - name: corerun
+ path: $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(_BuildConfig)
+ entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist
+ - name: dotnet
+ path: $(Build.SourcesDirectory)/artifacts/bin/$(osGroup)-$(archType).$(_BuildConfig)/corehost
+ entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist
+ - name: apphost
+ path: $(Build.SourcesDirectory)/artifacts/bin/$(osGroup)-$(archType).$(_BuildConfig)/corehost
+ entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist
+
+ - task: CopyFiles@2
+ displayName: 'Copy signed createdump to sharedFramework'
+ inputs:
+ contents: createdump
+ sourceFolder: $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(_BuildConfig)
+ targetFolder: $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(_BuildConfig)/sharedFramework
+ overWrite: true
+
+ # Now that we've entitled and signed createdump, we can build the rest.
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s clr.corelib+clr.nativecorelib+clr.nativeaotlibs+clr.tools+clr.packages+libs+host.tools+host.pkg+packs -c $(_BuildConfig)
+ displayName: Build managed CoreCLR and host components, all libraries, and packs
+
+ # Upload the results.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: $(osGroup)$(osSubgroup)_$(archType)
+
+ #
+ # Build CoreCLR runtime packs
+ # Linux and Linux_musl
+ # CoreCLR runtime for CrossDac packaging
+ # Create Linux installers
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: release
+ platforms:
- linux_x64
- linux_arm
- linux_arm64
- linux_musl_x64
- linux_musl_arm
- linux_musl_arm64
- - windows_x86
- - windows_x64
- - windows_arm64
jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s clr.runtime+clr.alljits+clr.corelib+clr.nativecorelib+clr.tools+clr.aot+clr.packages+libs+host+packs -c $(_BuildConfig)
+ nameSuffix: CoreCLR
isOfficialBuild: ${{ variables.isOfficialBuild }}
- signBinaries: ${{ variables.isOfficialBuild }}
timeoutInMinutes: 120
+ postBuildSteps:
+ # Upload libcoreclr.so for CrossDac packaging
+ - task: CopyFiles@2
+ displayName: Gather runtime for CrossDac
+ inputs:
+ SourceFolder: $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).$(archType).$(_BuildConfig)
+ Contents: libcoreclr.so
+ TargetFolder: $(Build.SourcesDirectory)/artifacts/CoreCLRCrossDacArtifacts/$(osGroup)$(osSubgroup).$(archType).$(_BuildConfig)/$(crossDacHostArch)
+ - task: 1ES.PublishBuildArtifacts@1
+ displayName: Publish runtime for CrossDac
+ inputs:
+ PathtoPublish: $(Build.SourcesDirectory)/artifacts/CoreCLRCrossDacArtifacts
+ PublishLocation: Container
+ ArtifactName: CoreCLRCrossDacArtifacts
+ # Create RPMs and DEBs
+ - template: /eng/pipelines/installer/jobs/steps/build-linux-package.yml
+ parameters:
+ packageType: deb
+ target: debpkg
+ packageStepDescription: Runtime Deps, Runtime, Framework Packs Deb installers
+ subsetArg: -s packs.installers
+ packagingArgs: -c $(_BuildConfig) --arch $(archType) --os $(osGroup) --ci /p:OfficialBuildId=$(Build.BuildNumber) /p:BuildDebPackage=true
+ condition: and(succeeded(), eq(variables.osSubgroup, ''), eq(variables.archType, 'x64'))
+ - template: /eng/pipelines/installer/jobs/steps/build-linux-package.yml
+ parameters:
+ packageType: rpm
+ target: rpmpkg
+ packageStepDescription: Runtime Deps, Runtime, Framework Packs RPM installers
+ subsetArg: -s packs.installers
+ packagingArgs: -c $(_BuildConfig) --arch $(archType) --os $(osGroup) --ci /p:OfficialBuildId=$(Build.BuildNumber) /p:BuildRpmPackage=true
+ condition: and(succeeded(), eq(variables.osSubgroup, ''), in(variables.archType, 'x64', 'arm64'))
+
+ # Upload the results.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: $(osGroup)$(osSubgroup)_$(archType)
+ extraVariablesTemplates:
+ - template: /eng/pipelines/coreclr/templates/crossdac-hostarch.yml
+ #
+ # Build and Pack CrossDac
+ #
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/coreclr/templates/crossdac-pack.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
platforms:
- windows_x64
jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s crossdacpack -c $(_BuildConfig) /p:CrossDacArtifactsDir=$(crossDacArtifactsPath)
+ nameSuffix: CrossDac
isOfficialBuild: ${{ variables.isOfficialBuild }}
timeoutInMinutes: 120
- crossDacPlatforms:
- - linux_x64
- - linux_arm
- - linux_arm64
- - linux_musl_x64
- - linux_musl_arm
- - linux_musl_arm64
- - windows_x64
- - windows_arm64
+ preBuildSteps:
+ - task: DownloadBuildArtifacts@0
+ displayName: Download Runtimes for CrossDac packaging
+ inputs:
+ artifactName: $(crossDacArtifactsContainer)
+ downloadPath: $(crossDacArtifactsBasePath)
+ checkDownloadedFiles: true
+ - template: /eng/pipelines/common/templates/global-build-step.yml
+ parameters:
+ buildArgs: -s linuxdac+alpinedac -c $(_BuildConfig)
+ archParameter: -arch x64,x86,arm,arm64
+ - task: CopyFiles@2
+ displayName: Gather CrossDacs
+ inputs:
+ SourceFolder: $(Build.SourcesDirectory)/artifacts/bin/coreclr
+ Contents: |
+ **
+ !**\sharedFramework\**
+ TargetFolder: $(crossDacArtifactsPath)
+ - template: /eng/pipelines/coreclr/templates/sign-diagnostic-files.yml
+ parameters:
+ basePath: $(crossDacArtifactsPath)
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ timeoutInMinutes: 30
+ postBuildSteps:
+ # Save packages using the prepare-signed-artifacts format.
+ # CrossDac packages are expected to be in the windows_x64 folder.
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: windows_x64
+ dependsOn:
+ - build_linux_x64_release_CoreCLR
+ - build_linux_arm_release_CoreCLR
+ - build_linux_arm64_release_CoreCLR
+ - build_linux_musl_x64_release_CoreCLR
+ - build_linux_musl_arm_release_CoreCLR
+ - build_linux_musl_arm64_release_CoreCLR
+ variables:
+ - name: crossDacArtifactsContainer
+ value: CoreCLRCrossDacArtifacts
+ - name: crossDacArtifactsBasePath
+ value: $(Build.StagingDirectory)/CrossDac
+ - name: crossDacArtifactsPath
+ value: $(crossDacArtifactsBasePath)/$(crossDacArtifactsContainer)
#
# Build NativeAOT runtime packs
@@ -121,12 +327,14 @@ extends:
- linux_bionic_arm64
- linux_bionic_x64
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s clr.nativeaotlibs+clr.nativeaotruntime+libs+packs -c $(_BuildConfig) /p:BuildNativeAOTRuntimePack=true /p:SkipLibrariesNativeRuntimePackages=true
- nameSuffix: AllSubsets_NativeAOT
+ nameSuffix: NativeAOT
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: NativeAOTRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: NativeAOTRuntimePacks
#
# Build Mono runtime packs
@@ -163,12 +371,14 @@ extends:
- windows_x86
# - windows_arm64
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:BuildMonoAOTCrossCompiler=false
- nameSuffix: AllSubsets_Mono
+ nameSuffix: Mono
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -179,12 +389,14 @@ extends:
- browser_wasm
- wasi_wasm
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
- nameSuffix: AllSubsets_Mono
+ nameSuffix: Mono
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -194,13 +406,15 @@ extends:
platforms:
- browser_wasm
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig) /p:MonoWasmBuildVariant=multithread /p:AotHostArchitecture=x64 /p:AotHostOS=$(_hostedOS)
- nameSuffix: AllSubsets_Mono_multithread
+ nameSuffix: Mono_multithread
isOfficialBuild: ${{ variables.isOfficialBuild }}
runtimeVariant: multithread
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
# Build Mono AOT offset headers once, for consumption elsewhere
#
@@ -215,6 +429,7 @@ extends:
- ios_arm64
- maccatalyst_x64
jobParameters:
+ templatePath: 'templates-official'
isOfficialBuild: ${{ variables.isOfficialBuild }}
#
@@ -231,6 +446,7 @@ extends:
- linux_arm64
- linux_musl_arm64
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+packs -c $(_BuildConfig)
/p:MonoCrossAOTTargetOS=android+browser /p:SkipMonoCrossJitConfigure=true /p:BuildMonoAOTCrossCompilerOnly=true
nameSuffix: CrossAOT_Mono
@@ -242,9 +458,10 @@ extends:
- android
- browser
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -254,6 +471,7 @@ extends:
platforms:
- windows_x64
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+packs -c $(_BuildConfig)
/p:MonoCrossAOTTargetOS=android+browser /p:SkipMonoCrossJitConfigure=true /p:BuildMonoAOTCrossCompilerOnly=true
nameSuffix: CrossAOT_Mono
@@ -265,9 +483,10 @@ extends:
- android
- browser
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
@@ -278,6 +497,7 @@ extends:
- osx_x64
- osx_arm64
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+packs -c $(_BuildConfig)
/p:MonoCrossAOTTargetOS=android+browser+tvos+ios+maccatalyst /p:SkipMonoCrossJitConfigure=true /p:BuildMonoAOTCrossCompilerOnly=true
nameSuffix: CrossAOT_Mono
@@ -295,9 +515,10 @@ extends:
- ios
- maccatalyst
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
#
# Build Mono LLVM runtime packs
@@ -320,72 +541,54 @@ extends:
buildConfig: release
runtimeFlavor: mono
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig)
/p:MonoEnableLLVM=true /p:MonoBundleLLVMOptimizer=false
- nameSuffix: AllSubsets_Mono_LLVMJIT
+ nameSuffix: Mono_LLVMJIT
runtimeVariant: LLVMJIT
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
#LLVMAOT
- jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: release
runtimeFlavor: mono
jobParameters:
+ templatePath: 'templates-official'
buildArgs: -s mono+libs+host+packs -c $(_BuildConfig)
/p:MonoEnableLLVM=true /p:MonoAOTEnableLLVM=true /p:MonoBundleLLVMOptimizer=true
- nameSuffix: AllSubsets_Mono_LLVMAOT
+ nameSuffix: Mono_LLVMAOT
runtimeVariant: LLVMAOT
isOfficialBuild: ${{ variables.isOfficialBuild }}
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: MonoRuntimePacks
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: MonoRuntimePacks
#
- # Build libraries using live CoreLib from CoreCLR
+ # Build libraries AllConfigurations for packages
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/libraries/build-job.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: Release
platforms:
- - osx_arm64
- - osx_x64
- - linux_x64
- - linux_arm
- - linux_arm64
- - linux_musl_x64
- - linux_musl_arm
- - linux_musl_arm64
- - windows_x86
- windows_x64
- - windows_arm64
jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s tools+libs -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
+ nameSuffix: Libraries_AllConfigurations
isOfficialBuild: ${{ variables.isOfficialBuild }}
- liveRuntimeBuildConfig: release
- # Official builds don't run tests, locally or on Helix
- runTests: false
- useHelix: false
-
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: Libraries_AllConfigurations
+ timeoutInMinutes: 95
#
- # Build libraries AllConfigurations for packages
+ # Build SourceBuild packages
#
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/libraries/build-job.yml
- buildConfig: Release
- platforms:
- - windows_x64
- jobParameters:
- framework: allConfigurations
- isOfficialBuild: ${{ variables.isOfficialBuild }}
- isOfficialAllConfigurations: true
- liveRuntimeBuildConfig: release
- # Official builds don't run tests, locally or on Helix
- runTests: false
- useHelix: false
-
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/common/global-build-job.yml
@@ -394,43 +597,23 @@ extends:
platforms:
- SourceBuild_linux_x64
jobParameters:
+ templatePath: 'templates-official'
nameSuffix: PortableSourceBuild
- extraStepsTemplate: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
- extraStepsParameters:
- name: SourceBuildPackages
+ isOfficialBuild: ${{ variables.isOfficialBuild }}
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: SourceBuildPackages
timeoutInMinutes: 95
#
- # Installer Build
+ # Build PGO Instrumented CoreCLR Release
#
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/installer/jobs/build-job.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: Release
- jobParameters:
- liveRuntimeBuildConfig: release
- liveLibrariesBuildConfig: Release
- isOfficialBuild: ${{ variables.isOfficialBuild }}
- platforms:
- - osx_arm64
- - osx_x64
- - linux_x64
- - linux_arm
- - linux_arm64
- - linux_musl_x64
- - linux_musl_arm
- - linux_musl_arm64
- - windows_x86
- - windows_x64
- - windows_arm64
-
- #
- # Build PGO CoreCLR release
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/coreclr/templates/build-job.yml
- buildConfig: release
+ helixQueueGroup: ci
platforms:
- windows_x64
- windows_x86
@@ -438,29 +621,15 @@ extends:
- windows_arm64
- linux_arm64
jobParameters:
+ templatePath: 'templates-official'
+ buildArgs: -s clr.native+clr.corelib+clr.tools+clr.nativecorelib+libs+host+packs -c $(_BuildConfig) -pgoinstrument /p:SkipLibrariesNativeRuntimePackages=true
isOfficialBuild: ${{ variables.isOfficialBuild }}
- signBinaries: false
- testGroup: innerloop
- pgoType: 'PGO'
-
- #
- # PGO Build
- #
- - template: /eng/pipelines/common/platform-matrix.yml
- parameters:
- jobTemplate: /eng/pipelines/installer/jobs/build-job.yml
- buildConfig: Release
- jobParameters:
- isOfficialBuild: ${{ variables.isOfficialBuild }}
- liveRuntimeBuildConfig: release
- liveLibrariesBuildConfig: Release
- pgoType: 'PGO'
- platforms:
- - windows_x64
- - windows_x86
- - linux_x64
- - windows_arm64
- - linux_arm64
+ nameSuffix: PGO
+ postBuildSteps:
+ - template: /eng/pipelines/common/upload-intermediate-artifacts-step.yml
+ parameters:
+ name: PGO
+ timeoutInMinutes: 95
#
# Build Workloads
@@ -472,27 +641,28 @@ extends:
platforms:
- windows_x64
jobParameters:
+ templatePath: 'templates-official'
isOfficialBuild: ${{ variables.isOfficialBuild }}
timeoutInMinutes: 120
dependsOn:
- - Build_android_arm_release_AllSubsets_Mono
- - Build_android_arm64_release_AllSubsets_Mono
- - Build_android_x86_release_AllSubsets_Mono
- - Build_android_x64_release_AllSubsets_Mono
- - Build_browser_wasm_Linux_release_AllSubsets_Mono
- - Build_wasi_wasm_linux_release_AllSubsets_Mono
- - Build_ios_arm64_release_AllSubsets_Mono
- - Build_iossimulator_x64_release_AllSubsets_Mono
- - Build_iossimulator_arm64_release_AllSubsets_Mono
- - Build_maccatalyst_arm64_release_AllSubsets_Mono
- - Build_maccatalyst_x64_release_AllSubsets_Mono
- - Build_tvos_arm64_release_AllSubsets_Mono
- - Build_tvossimulator_arm64_release_AllSubsets_Mono
- - Build_tvossimulator_x64_release_AllSubsets_Mono
+ - Build_android_arm_release_Mono
+ - Build_android_arm64_release_Mono
+ - Build_android_x86_release_Mono
+ - Build_android_x64_release_Mono
+ - Build_browser_wasm_Linux_release_Mono
+ - Build_wasi_wasm_linux_release_Mono
+ - Build_ios_arm64_release_Mono
+ - Build_iossimulator_x64_release_Mono
+ - Build_iossimulator_arm64_release_Mono
+ - Build_maccatalyst_arm64_release_Mono
+ - Build_maccatalyst_x64_release_Mono
+ - Build_tvos_arm64_release_Mono
+ - Build_tvossimulator_arm64_release_Mono
+ - Build_tvossimulator_x64_release_Mono
- Build_windows_x64_release_CrossAOT_Mono
- - installer__coreclr__windows_x64_Release_
- - installer__coreclr__windows_x86_Release_
- - installer__coreclr__windows_arm64_Release_
+ - Build_windows_x64_release_CoreCLR
+ - Build_windows_x86_release_CoreCLR
+ - Build_windows_arm64_release_CoreCLR
- ${{ if eq(variables.isOfficialBuild, true) }}:
- template: /eng/pipelines/official/stages/publish.yml
diff --git a/eng/pipelines/runtime-sanitized.yml b/eng/pipelines/runtime-sanitized.yml
index 3bc49fec6908..5db421762ed0 100644
--- a/eng/pipelines/runtime-sanitized.yml
+++ b/eng/pipelines/runtime-sanitized.yml
@@ -38,13 +38,14 @@ extends:
buildArgs: -s clr+libs -c $(_BuildConfig) $(_nativeSanitizersArg)
timeoutInMinutes: 300
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
- scenarios:
- - normal
- - no_tiered_compilation
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: CoreCLR_$(_BuildConfig)
+ scenarios:
+ - normal
+ - no_tiered_compilation
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
@@ -72,12 +73,13 @@ extends:
buildArgs: -s clr+libs+libs.tests -c $(_BuildConfig) -rc Checked $(_nativeSanitizersArg) /p:ArchiveTests=true
timeoutInMinutes: 180
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Libraries_$(_BuildConfig)
- scenarios:
- - normal
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Libraries_$(_BuildConfig)
+ scenarios:
+ - normal
#
# NativeAOT release build and smoke tests with AddressSanitizer
@@ -98,11 +100,12 @@ extends:
timeoutInMinutes: 120
nameSuffix: NativeAOT
buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release $(_nativeSanitizersArg)
- extraStepsTemplate: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: nativeaot tree nativeaot
- liveLibrariesBuildConfig: Release
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: nativeaot tree nativeaot
+ liveLibrariesBuildConfig: Release
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
diff --git a/eng/pipelines/runtime.yml b/eng/pipelines/runtime.yml
index 3aa0b6504819..8f001c490300 100644
--- a/eng/pipelines/runtime.yml
+++ b/eng/pipelines/runtime.yml
@@ -52,6 +52,7 @@ variables:
extends:
template: /eng/pipelines/common/templates/pipeline-with-resources.yml
parameters:
+ isOfficialBuild: false
stages:
- stage: Build
jobs:
@@ -70,7 +71,6 @@ extends:
jobTemplate: /eng/pipelines/coreclr/templates/build-job.yml
buildConfig: checked
platforms:
- - linux_x86
- linux_x64
- linux_arm
- linux_arm64
@@ -79,7 +79,6 @@ extends:
- linux_musl_arm64
- linux_musl_x64
- osx_arm64
- - tizen_armel
- windows_x86
- windows_x64
- windows_arm64
@@ -105,9 +104,10 @@ extends:
testGroup: innerloop
nameSuffix: Native_GCC
buildArgs: -s clr.native+libs.native+mono+host.native -c $(_BuildConfig) -gcc
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
- extraStepsParameters:
- testBuildArgs: skipmanaged skipgeneratelayout skiprestorepackages -gcc
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests.yml
+ parameters:
+ testBuildArgs: skipmanaged skipgeneratelayout skiprestorepackages -gcc
condition: >-
or(
eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
@@ -156,7 +156,6 @@ extends:
- windows_x64
- windows_x86
- windows_arm64
- - freebsd_x64
jobParameters:
testGroup: innerloop
# Mono/runtimetests also need this, but skip for wasm
@@ -209,12 +208,13 @@ extends:
timeoutInMinutes: 120
nameSuffix: NativeAOT
buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release
- extraStepsTemplate: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: nativeaot tree nativeaot
- liveLibrariesBuildConfig: Release
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: nativeaot tree nativeaot
+ liveLibrariesBuildConfig: Release
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
@@ -246,12 +246,13 @@ extends:
timeoutInMinutes: 180
nameSuffix: NativeAOT
buildArgs: -s clr.aot+host.native+libs -rc $(_BuildConfig) -lc Release -hc Release
- extraStepsTemplate: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: 'nativeaot tree ";nativeaot;Loader;Interop;tracing;" /p:BuildNativeAotFrameworkObjects=true'
- liveLibrariesBuildConfig: Release
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: 'nativeaot tree ";nativeaot;Loader;Interop;tracing;" /p:BuildNativeAotFrameworkObjects=true'
+ liveLibrariesBuildConfig: Release
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
@@ -289,12 +290,13 @@ extends:
timeoutInMinutes: 120
nameSuffix: NativeAOT
buildArgs: -s clr.aot+host.native+libs+tools.illink -c $(_BuildConfig) -rc $(_BuildConfig) -lc Release -hc Release
- extraStepsTemplate: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
- extraStepsParameters:
- creator: dotnet-bot
- testBuildArgs: 'nativeaot tree ";nativeaot;tracing/eventpipe/providervalidation;"'
- liveLibrariesBuildConfig: Release
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/coreclr/nativeaot-post-build-steps.yml
+ parameters:
+ creator: dotnet-bot
+ testBuildArgs: 'nativeaot tree ";nativeaot;tracing/eventpipe/providervalidation;"'
+ liveLibrariesBuildConfig: Release
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
parameters:
@@ -326,10 +328,11 @@ extends:
buildArgs: -s clr.aot+host.native+libs+libs.tests -c $(_BuildConfig) /p:TestNativeAot=true /p:RunSmokeTestsOnly=true /p:ArchiveTests=true
timeoutInMinutes: 240 # Doesn't actually take long, but we've seen the ARM64 Helix queue often get backlogged for 2+ hours
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
condition: >-
or(
eq(dependencies.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
@@ -355,6 +358,27 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true),
eq(dependencies.evaluate_paths.outputs['SetPathVars_tools_illink.containsChange'], true),
eq(variables['isRollingBuild'], true))
+ #
+ # Build CrossDacs
+ #
+ - template: /eng/pipelines/common/platform-matrix.yml
+ parameters:
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
+ buildConfig: release
+ platforms:
+ - windows_x64
+ variables:
+ - name: _archParameter
+ value: -arch x64,x86,arm,arm64
+ jobParameters:
+ buildArgs: -s linuxdac+alpinedac -c Checked,$(_BuildConfig)
+ nameSuffix: CrossDac
+ isOfficialBuild: false
+ timeoutInMinutes: 60
+ postBuildSteps:
+ - publish: $(Build.SourcesDirectory)/artifacts/bin/coreclr
+ displayName: Publish CrossDacs for diagnostics
+ artifact: CoreCLRCrossDacArtifacts
# Build Mono AOT offset headers once, for consumption elsewhere
# Only when mono changed
@@ -362,6 +386,7 @@ extends:
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
jobTemplate: /eng/pipelines/mono/templates/generate-offsets.yml
+ templatePath: 'templates'
buildConfig: release
platforms:
- android_x64
@@ -370,7 +395,7 @@ extends:
- ios_arm64
- maccatalyst_x64
jobParameters:
- isOfficialBuild: ${{ variables.isOfficialBuild }}
+ isOfficialBuild: false
# needed by crossaot
condition: >-
or(
@@ -587,15 +612,16 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['monoContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['monoContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# iOS/tvOS devices - Full AOT + AggressiveTrimming to reduce size
@@ -628,16 +654,17 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['monoContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['monoContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# iOS/tvOS devices
@@ -670,16 +697,17 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
- extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['coreclrContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: NativeAOT_$(_BuildConfig)
+ extraHelixArguments: /p:NeedsToBuildAppsOnHelix=true
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['coreclrContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# MacCatalyst interp - requires AOT Compilation and Interp flags
@@ -713,15 +741,16 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_installer.containsChange'], true),
eq(variables['isRollingBuild'], true))
# extra steps, run tests
- extraStepsTemplate: /eng/pipelines/libraries/helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_$(_BuildConfig)
- condition: >-
- or(
- eq(variables['librariesContainsChange'], true),
- eq(variables['monoContainsChange'], true),
- eq(variables['isRollingBuild'], true))
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_$(_BuildConfig)
+ condition: >-
+ or(
+ eq(variables['librariesContainsChange'], true),
+ eq(variables['monoContainsChange'], true),
+ eq(variables['isRollingBuild'], true))
#
# Build Mono and Installer on LLVMJIT mode
@@ -989,7 +1018,6 @@ extends:
- osx_arm64
- osx_x64
- windows_x64
- - freebsd_x64
jobParameters:
testScope: innerloop
condition:
@@ -1033,32 +1061,45 @@ extends:
condition: >-
eq(dependencies.evaluate_paths.outputs['SetPathVars_coreclr.containsChange'], true)
+ #
+ # Build and test libraries for .NET Framework
+ #
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/libraries/build-job.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: Release
platforms:
- windows_x86
helixQueuesTemplate: /eng/pipelines/libraries/helix-queues-setup.yml
jobParameters:
framework: net48
- runTests: true
- testScope: innerloop
+ buildArgs: -s tools+libs+libs.tests -framework net48 -c $(_BuildConfig) -testscope innerloop /p:ArchiveTests=true
+ nameSuffix: Libraries_NET48
+ timeoutInMinutes: 150
+ postBuildSteps:
+ - template: /eng/pipelines/libraries/helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: NET48_$(_BuildConfig)
+ extraHelixArguments: /p:BuildTargetFramework=net48
condition: >-
or(
eq(dependencies.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
eq(variables['isRollingBuild'], true))
+ #
+ # Build and test libraries AllConfigurations
+ #
- template: /eng/pipelines/common/platform-matrix.yml
parameters:
- jobTemplate: /eng/pipelines/libraries/build-job.yml
+ jobTemplate: /eng/pipelines/common/global-build-job.yml
buildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
platforms:
- windows_x64
jobParameters:
- framework: allConfigurations
- runTests: true
- useHelix: false
+ buildArgs: -test -s tools+libs+libs.tests -allConfigurations -c $(_BuildConfig) /p:TestAssemblies=false /p:TestPackages=true
+ nameSuffix: Libraries_AllConfigurations
+ timeoutInMinutes: 150
condition: >-
or(
eq(dependencies.evaluate_paths.outputs['SetPathVars_libraries.containsChange'], true),
@@ -1100,7 +1141,6 @@ extends:
- linux_arm64
- linux_musl_x64
- windows_x64
- - freebsd_x64
jobParameters:
liveRuntimeBuildConfig: release
liveLibrariesBuildConfig: ${{ variables.debugOnPrReleaseOnRolling }}
@@ -1254,10 +1294,11 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
eq(variables['isRollingBuild'], true))
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_Release
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_Release
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
@@ -1289,10 +1330,11 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
eq(variables['isRollingBuild'], true))
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- testRunNamePrefixSuffix: Mono_Release
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ testRunNamePrefixSuffix: Mono_Release
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
#
@@ -1326,11 +1368,12 @@ extends:
eq(dependencies.evaluate_paths.outputs['SetPathVars_mono_excluding_wasm.containsChange'], true),
eq(dependencies.evaluate_paths.outputs['SetPathVars_runtimetests.containsChange'], true),
eq(variables['isRollingBuild'], true))
- extraStepsTemplate: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
- extraStepsParameters:
- creator: dotnet-bot
- llvmAotStepContainer: linux_x64_llvmaot
- testRunNamePrefixSuffix: Mono_Release
+ postBuildSteps:
+ - template: /eng/pipelines/common/templates/runtimes/build-runtime-tests-and-send-to-helix.yml
+ parameters:
+ creator: dotnet-bot
+ llvmAotStepContainer: linux_x64_llvmaot
+ testRunNamePrefixSuffix: Mono_Release
extraVariablesTemplates:
- template: /eng/pipelines/common/templates/runtimes/test-variables.yml
@@ -1525,8 +1568,6 @@ extends:
- SourceBuild_centos8_x64
jobParameters:
nameSuffix: centos8SourceBuild
- extraStepsParameters:
- name: SourceBuildPackages
timeoutInMinutes: 95
condition: eq(variables['isRollingBuild'], true)
@@ -1539,7 +1580,5 @@ extends:
- SourceBuild_banana24_x64
jobParameters:
nameSuffix: banana24SourceBuild
- extraStepsParameters:
- name: SourceBuildPackages
timeoutInMinutes: 95
condition: eq(variables['isRollingBuild'], true)
diff --git a/eng/pipelines/runtimelab.yml b/eng/pipelines/runtimelab.yml
index a5c4e0334331..7c34126757d7 100644
--- a/eng/pipelines/runtimelab.yml
+++ b/eng/pipelines/runtimelab.yml
@@ -65,9 +65,10 @@ extends:
timeoutInMinutes: 100
testGroup: innerloop
buildArgs: -s clr+libs+host+packs -c debug -runtimeConfiguration Checked
- extraStepsTemplate: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
- extraStepsParameters:
- uploadRuntimeTests: true
+ postBuildSteps:
+ - template: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
+ parameters:
+ uploadRuntimeTests: true
#
# Build with Release config and Release runtimeConfiguration
@@ -83,10 +84,11 @@ extends:
timeoutInMinutes: 100
isOfficialBuild: ${{ variables.isOfficialBuild }}
testGroup: innerloop
- extraStepsTemplate: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
- extraStepsParameters:
- uploadLibrariesTests: ${{ eq(variables.isOfficialBuild, false) }}
- uploadIntermediateArtifacts: false
+ postBuildSteps:
+ - template: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
+ parameters:
+ uploadLibrariesTests: ${{ eq(variables.isOfficialBuild, false) }}
+ uploadIntermediateArtifacts: false
${{ if eq(variables.isOfficialBuild, false) }}:
buildArgs: -s clr+libs+libs.tests+host+packs -c $(_BuildConfig) /p:ArchiveTests=true
${{ if eq(variables.isOfficialBuild, true) }}:
@@ -107,11 +109,12 @@ extends:
nameSuffix: AllConfigurations
buildArgs: -s libs -c $(_BuildConfig) -allConfigurations
${{ if eq(variables.isOfficialBuild, true) }}:
- extraStepsTemplate: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
- extraStepsParameters:
- uploadIntermediateArtifacts: true
- isOfficialBuild: true
- librariesBinArtifactName: libraries_bin_official_allconfigurations
+ postBuildSteps:
+ - template: /eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
+ parameters:
+ uploadIntermediateArtifacts: true
+ isOfficialBuild: true
+ librariesBinArtifactName: libraries_bin_official_allconfigurations
# Installer official builds need to build installers and need the libraries all configurations build
- ${{ if eq(variables.isOfficialBuild, true) }}:
diff --git a/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml b/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
index a6187638cd34..97c007ba4b30 100644
--- a/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
+++ b/eng/pipelines/runtimelab/runtimelab-post-build-steps.yml
@@ -5,7 +5,6 @@ parameters:
osSubgroup: ''
nameSuffix: ''
platform: ''
- pgoType: ''
runtimeVariant: ''
librariesBinArtifactName: ''
isOfficialBuild: false
@@ -32,7 +31,7 @@ steps:
tarCompression: $(tarCompression)
includeRootFolder: false
archiveExtension: $(archiveExtension)
- artifactName: CoreCLRProduct_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_${{ parameters.osGroup }}${{ parameters.osSubgroup }}_${{ parameters.archType }}_${{ parameters.buildConfig }}
+ artifactName: CoreCLRProduct_${{ parameters.runtimeVariant }}_${{ parameters.osGroup }}${{ parameters.osSubgroup }}_${{ parameters.archType }}_${{ parameters.buildConfig }}
displayName: 'CoreCLR product build'
# Zip Test Build
diff --git a/eng/targetingpacks.targets b/eng/targetingpacks.targets
index b3ad8560d63b..e9ca4e116152 100644
--- a/eng/targetingpacks.targets
+++ b/eng/targetingpacks.targets
@@ -35,7 +35,7 @@
LatestRuntimeFrameworkVersion="$(ProductVersion)"
RuntimeFrameworkName="$(LocalFrameworkOverrideName)"
RuntimePackNamePatterns="$(LocalFrameworkOverrideName).Runtime.**RID**"
- RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;rhel.6-x64;tizen.4.0.0-armel;tizen.5.0.0-armel;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;maccatalyst-x64;maccatalyst-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86"
+ RuntimePackRuntimeIdentifiers="linux-arm;linux-arm64;linux-musl-arm64;linux-musl-x64;linux-x64;osx-x64;rhel.6-x64;tizen.4.0.0-armel;tizen.5.0.0-armel;win-arm64;win-x64;win-x86;linux-musl-arm;osx-arm64;maccatalyst-x64;maccatalyst-arm64;linux-s390x;linux-bionic-arm;linux-bionic-arm64;linux-bionic-x64;linux-bionic-x86;freebsd-x64;freebsd-arm64"
TargetFramework="$(NetCoreAppCurrent)"
TargetingPackName="$(LocalFrameworkOverrideName).Ref"
TargetingPackVersion="$(ProductVersion)"
@@ -104,6 +104,10 @@
Condition="'$(UsePackageDownload)' == 'true' and $([System.String]::Copy('%(Identity)').StartsWith('$(LocalFrameworkOverrideName).Runtime'))" />
+
+
m_copyConstructor;
+
+ public delegate* m_destructor;
+
+ public CopyConstructorCookie* m_next;
+
+ [StackTraceHidden]
+ public void ExecuteCopy(void* destinationBase)
+ {
+ if (m_copyConstructor != null)
+ {
+ m_copyConstructor((byte*)destinationBase + m_destinationOffset, m_source);
+ }
+
+ if (m_destructor != null)
+ {
+ m_destructor(m_source);
+ }
+ }
+ }
+
+ internal unsafe struct CopyConstructorChain
+ {
+ public void* m_realTarget;
+ public CopyConstructorCookie* m_head;
+
+ public void Add(CopyConstructorCookie* cookie)
+ {
+ cookie->m_next = m_head;
+ m_head = cookie;
+ }
+
+ [ThreadStatic]
+ private static CopyConstructorChain s_copyConstructorChain;
+
+ public void Install(void* realTarget)
+ {
+ m_realTarget = realTarget;
+ s_copyConstructorChain = this;
+ }
+
+ [StackTraceHidden]
+ private void ExecuteCopies(void* destinationBase)
+ {
+ for (CopyConstructorCookie* current = m_head; current != null; current = current->m_next)
+ {
+ current->ExecuteCopy(destinationBase);
+ }
+ }
+
+ [UnmanagedCallersOnly]
+ [StackTraceHidden]
+ public static void* ExecuteCurrentCopiesAndGetTarget(void* destinationBase)
+ {
+ void* target = s_copyConstructorChain.m_realTarget;
+ s_copyConstructorChain.ExecuteCopies(destinationBase);
+ // Reset this instance to ensure we don't accidentally execute the copies again.
+ // All of the pointers point to the stack, so we don't need to free any memory.
+ s_copyConstructorChain = default;
+ return target;
+ }
+ }
+
internal static class StubHelpers
{
[MethodImpl(MethodImplOptions.InternalCall)]
diff --git a/src/coreclr/build-runtime.cmd b/src/coreclr/build-runtime.cmd
index ec6887c78d84..d85e6c95909f 100644
--- a/src/coreclr/build-runtime.cmd
+++ b/src/coreclr/build-runtime.cmd
@@ -211,8 +211,13 @@ if NOT "%__BuildType%"=="Release" (
set __PgoOptimize=0
)
-set "__BinDir=%__RootBinDir%\bin\coreclr\%__TargetOS%.%__TargetArch%.%__BuildType%"
-set "__IntermediatesDir=%__RootBinDir%\obj\coreclr\%__TargetOS%.%__TargetArch%.%__BuildType%"
+set __TargetOSDirName=%__TargetOS%
+if "%__TargetOS%"=="alpine" (
+ set __TargetOSDirName=linux_musl
+)
+
+set "__BinDir=%__RootBinDir%\bin\coreclr\%__TargetOSDirName%.%__TargetArch%.%__BuildType%"
+set "__IntermediatesDir=%__RootBinDir%\obj\coreclr\%__TargetOSDirName%.%__TargetArch%.%__BuildType%"
set "__LogsDir=%__RootBinDir%\log\!__BuildType!"
set "__MsbuildDebugLogsDir=%__LogsDir%\MsbuildDebugLogs"
set "__ArtifactsIntermediatesDir=%__RepoRootDir%\artifacts\obj\coreclr\"
diff --git a/src/coreclr/crossgen-corelib.proj b/src/coreclr/crossgen-corelib.proj
index 2dc62b8151a3..12d2f17ddcc5 100644
--- a/src/coreclr/crossgen-corelib.proj
+++ b/src/coreclr/crossgen-corelib.proj
@@ -5,8 +5,7 @@
-
-
+
diff --git a/src/coreclr/debug/createdump/crashinfo.cpp b/src/coreclr/debug/createdump/crashinfo.cpp
index 996f3a81935d..d1d1b0ea0218 100644
--- a/src/coreclr/debug/createdump/crashinfo.cpp
+++ b/src/coreclr/debug/createdump/crashinfo.cpp
@@ -803,7 +803,7 @@ CrashInfo::PageMappedToPhysicalMemory(uint64_t start)
}
uint64_t pagemapOffset = (start / PAGE_SIZE) * sizeof(uint64_t);
- uint64_t seekResult = lseek64(m_fdPagemap, (off64_t) pagemapOffset, SEEK_SET);
+ uint64_t seekResult = lseek(m_fdPagemap, (off_t) pagemapOffset, SEEK_SET);
if (seekResult != pagemapOffset)
{
int seekErrno = errno;
diff --git a/src/coreclr/debug/createdump/crashinfounix.cpp b/src/coreclr/debug/createdump/crashinfounix.cpp
index 24b975e3d655..2f4ea079de3b 100644
--- a/src/coreclr/debug/createdump/crashinfounix.cpp
+++ b/src/coreclr/debug/createdump/crashinfounix.cpp
@@ -516,7 +516,7 @@ CrashInfo::ReadProcessMemory(void* address, void* buffer, size_t size, size_t* r
// performance optimization.
m_canUseProcVmReadSyscall = false;
assert(m_fdMem != -1);
- *read = pread64(m_fdMem, buffer, size, (off64_t)address);
+ *read = pread(m_fdMem, buffer, size, (off_t)address);
}
if (*read == (size_t)-1)
diff --git a/src/coreclr/debug/daccess/CMakeLists.txt b/src/coreclr/debug/daccess/CMakeLists.txt
index 9a867c078019..5332e957c9ec 100644
--- a/src/coreclr/debug/daccess/CMakeLists.txt
+++ b/src/coreclr/debug/daccess/CMakeLists.txt
@@ -53,7 +53,7 @@ if(CLR_CMAKE_HOST_FREEBSD OR CLR_CMAKE_HOST_NETBSD OR CLR_CMAKE_HOST_SUNOS)
DEPENDS coreclr
VERBATIM
COMMAND_EXPAND_LISTS
- COMMAND ${CLR_DIR}/pal/tools/gen-dactable-rva.sh ${args}
+ COMMAND ${CMAKE_COMMAND} -E env NM=${CMAKE_NM} ${CLR_DIR}/pal/tools/gen-dactable-rva.sh ${args}
COMMENT Generating ${GENERATED_INCLUDE_DIR}/dactablerva.h
)
diff --git a/src/coreclr/gc/env/gcenv.os.h b/src/coreclr/gc/env/gcenv.os.h
index c38892b0cadc..753b751f798c 100644
--- a/src/coreclr/gc/env/gcenv.os.h
+++ b/src/coreclr/gc/env/gcenv.os.h
@@ -417,7 +417,7 @@ class GCToOSInterface
// Remarks:
// If a process runs with a restricted memory limit, it returns the limit. If there's no limit
// specified, it returns amount of actual physical memory.
- static uint64_t GetPhysicalMemoryLimit(bool* is_restricted=NULL);
+ static uint64_t GetPhysicalMemoryLimit(bool* is_restricted=NULL, bool refresh=false);
// Get memory status
// Parameters:
diff --git a/src/coreclr/gc/gc.cpp b/src/coreclr/gc/gc.cpp
index 4be93e48e902..4913ff3cd018 100644
--- a/src/coreclr/gc/gc.cpp
+++ b/src/coreclr/gc/gc.cpp
@@ -52808,7 +52808,7 @@ int gc_heap::refresh_memory_limit()
size_t old_heap_hard_limit_poh = heap_hard_limit_oh[poh];
bool old_hard_limit_config_p = hard_limit_config_p;
- total_physical_mem = GCToOSInterface::GetPhysicalMemoryLimit (&is_restricted_physical_mem);
+ total_physical_mem = GCToOSInterface::GetPhysicalMemoryLimit (&is_restricted_physical_mem, true);
bool succeed = true;
diff --git a/src/coreclr/gc/unix/gcenv.unix.cpp b/src/coreclr/gc/unix/gcenv.unix.cpp
index b45cd40d8073..0dc9790e0662 100644
--- a/src/coreclr/gc/unix/gcenv.unix.cpp
+++ b/src/coreclr/gc/unix/gcenv.unix.cpp
@@ -792,28 +792,30 @@ bool ReadMemoryValueFromFile(const char* filename, uint64_t* val)
return result;
}
-#define UPDATE_CACHE_SIZE_AND_LEVEL(NEW_CACHE_SIZE, NEW_CACHE_LEVEL) if (NEW_CACHE_SIZE > cacheSize) { cacheSize = NEW_CACHE_SIZE; cacheLevel = NEW_CACHE_LEVEL; }
+#define UPDATE_CACHE_SIZE_AND_LEVEL(NEW_CACHE_SIZE, NEW_CACHE_LEVEL) if (NEW_CACHE_SIZE > ((long)cacheSize)) { cacheSize = NEW_CACHE_SIZE; cacheLevel = NEW_CACHE_LEVEL; }
static size_t GetLogicalProcessorCacheSizeFromOS()
{
size_t cacheLevel = 0;
size_t cacheSize = 0;
- size_t size;
+ long size;
+ // sysconf can return -1 if the cache size is unavailable in some distributions and 0 in others.
+ // UPDATE_CACHE_SIZE_AND_LEVEL should handle both the cases by not updating cacheSize if either of cases are met.
#ifdef _SC_LEVEL1_DCACHE_SIZE
- size = ( size_t) sysconf(_SC_LEVEL1_DCACHE_SIZE);
+ size = sysconf(_SC_LEVEL1_DCACHE_SIZE);
UPDATE_CACHE_SIZE_AND_LEVEL(size, 1)
#endif
#ifdef _SC_LEVEL2_CACHE_SIZE
- size = ( size_t) sysconf(_SC_LEVEL2_CACHE_SIZE);
+ size = sysconf(_SC_LEVEL2_CACHE_SIZE);
UPDATE_CACHE_SIZE_AND_LEVEL(size, 2)
#endif
#ifdef _SC_LEVEL3_CACHE_SIZE
- size = ( size_t) sysconf(_SC_LEVEL3_CACHE_SIZE);
+ size = sysconf(_SC_LEVEL3_CACHE_SIZE);
UPDATE_CACHE_SIZE_AND_LEVEL(size, 3)
#endif
#ifdef _SC_LEVEL4_CACHE_SIZE
- size = ( size_t) sysconf(_SC_LEVEL4_CACHE_SIZE);
+ size = sysconf(_SC_LEVEL4_CACHE_SIZE);
UPDATE_CACHE_SIZE_AND_LEVEL(size, 4)
#endif
@@ -836,17 +838,22 @@ static size_t GetLogicalProcessorCacheSizeFromOS()
{
path_to_size_file[index] = (char)(48 + i);
- if (ReadMemoryValueFromFile(path_to_size_file, &size))
+ uint64_t cache_size_from_sys_file = 0;
+
+ if (ReadMemoryValueFromFile(path_to_size_file, &cache_size_from_sys_file))
{
+ // uint64_t to long conversion as ReadMemoryValueFromFile takes a uint64_t* as an argument for the val argument.
+ size = (long)cache_size_from_sys_file;
path_to_level_file[index] = (char)(48 + i);
if (ReadMemoryValueFromFile(path_to_level_file, &level))
{
UPDATE_CACHE_SIZE_AND_LEVEL(size, level)
}
+
else
{
- cacheSize = std::max(cacheSize, size);
+ cacheSize = std::max((long)cacheSize, size);
}
}
}
@@ -1114,14 +1121,13 @@ size_t GCToOSInterface::GetVirtualMemoryLimit()
// Remarks:
// If a process runs with a restricted memory limit, it returns the limit. If there's no limit
// specified, it returns amount of actual physical memory.
-uint64_t GCToOSInterface::GetPhysicalMemoryLimit(bool* is_restricted)
+uint64_t GCToOSInterface::GetPhysicalMemoryLimit(bool* is_restricted, bool refresh)
{
size_t restricted_limit;
if (is_restricted)
*is_restricted = false;
- // The limit was not cached
- if (g_RestrictedPhysicalMemoryLimit == 0)
+ if (g_RestrictedPhysicalMemoryLimit == 0 || refresh)
{
restricted_limit = GetRestrictedPhysicalMemoryLimit();
VolatileStore(&g_RestrictedPhysicalMemoryLimit, restricted_limit);
diff --git a/src/coreclr/gc/windows/gcenv.windows.cpp b/src/coreclr/gc/windows/gcenv.windows.cpp
index f12a64d7ed1a..0d2af7904c89 100644
--- a/src/coreclr/gc/windows/gcenv.windows.cpp
+++ b/src/coreclr/gc/windows/gcenv.windows.cpp
@@ -960,7 +960,7 @@ size_t GCToOSInterface::GetVirtualMemoryLimit()
// Remarks:
// If a process runs with a restricted memory limit, it returns the limit. If there's no limit
// specified, it returns amount of actual physical memory.
-uint64_t GCToOSInterface::GetPhysicalMemoryLimit(bool* is_restricted)
+uint64_t GCToOSInterface::GetPhysicalMemoryLimit(bool* is_restricted, bool refresh)
{
if (is_restricted)
*is_restricted = false;
diff --git a/src/coreclr/jit/codegencommon.cpp b/src/coreclr/jit/codegencommon.cpp
index 72e31c388154..c6892747e054 100644
--- a/src/coreclr/jit/codegencommon.cpp
+++ b/src/coreclr/jit/codegencommon.cpp
@@ -6154,8 +6154,14 @@ void CodeGen::genFnProlog()
};
#if defined(TARGET_AMD64) || defined(TARGET_ARM64) || defined(TARGET_ARM)
- assignIncomingRegisterArgs(&intRegState);
+ // Handle float parameters first; in the presence of struct promotion
+ // we can have parameters that are homed into float registers but
+ // passed in integer registers. So make sure we get those out of the
+ // integer registers before we potentially override those as part of
+ // handling integer parameters.
+
assignIncomingRegisterArgs(&floatRegState);
+ assignIncomingRegisterArgs(&intRegState);
#else
assignIncomingRegisterArgs(&intRegState);
#endif
diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h
index 5a06113fa8d7..2720a1e2b183 100644
--- a/src/coreclr/jit/gentree.h
+++ b/src/coreclr/jit/gentree.h
@@ -8635,7 +8635,7 @@ struct GenCondition
NONE, NONE, SGE, SGT, SLT, SLE, NS, S,
NE, EQ, UGE, UGT, ULT, ULE, NC, C,
FNEU, FEQU, FGEU, FGTU, FLTU, FLEU, NO, O,
- FNE, FEQ, FGE, FGT, FLT, FGT, NP, P
+ FNE, FEQ, FGE, FGT, FLT, FLE, NP, P
};
// clang-format on
diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp
index 6087b1499301..9bf654ffc35a 100644
--- a/src/coreclr/jit/importer.cpp
+++ b/src/coreclr/jit/importer.cpp
@@ -9393,7 +9393,8 @@ void Compiler::impImportBlockCode(BasicBlock* block)
CORINFO_FIELD_INFO fi;
eeGetFieldInfo(&fldToken, CORINFO_ACCESS_SET, &fi);
unsigned flagsToCheck = CORINFO_FLG_FIELD_STATIC | CORINFO_FLG_FIELD_FINAL;
- if ((fi.fieldFlags & flagsToCheck) == flagsToCheck)
+ if (((fi.fieldFlags & flagsToCheck) == flagsToCheck) &&
+ ((info.compCompHnd->getClassAttribs(info.compClassHnd) & CORINFO_FLG_SHAREDINST) == 0))
{
#ifdef FEATURE_READYTORUN
if (opts.IsReadyToRun())
diff --git a/src/coreclr/jit/lowerxarch.cpp b/src/coreclr/jit/lowerxarch.cpp
index f0e7fc6322f2..dad1df9ffcfb 100644
--- a/src/coreclr/jit/lowerxarch.cpp
+++ b/src/coreclr/jit/lowerxarch.cpp
@@ -7652,15 +7652,17 @@ bool Lowering::IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* parentNode, GenTre
}
case NI_SSE2_ConvertToVector128Double:
- case NI_SSE3_MoveAndDuplicate:
case NI_AVX_ConvertToVector256Double:
+ case NI_AVX512F_ConvertToVector512Double:
+ case NI_AVX512F_VL_ConvertToVector128Double:
+ case NI_AVX512F_VL_ConvertToVector256Double:
{
assert(!supportsSIMDScalarLoads);
// Most instructions under the non-VEX encoding require aligned operands.
// Those used for Sse2.ConvertToVector128Double (CVTDQ2PD and CVTPS2PD)
- // and Sse3.MoveAndDuplicate (MOVDDUP) are exceptions and don't fail for
- // unaligned inputs as they read mem64 (half the vector width) instead
+ // are exceptions and don't fail for unaligned inputs as they read half
+ // the vector width instead
supportsAlignedSIMDLoads = !comp->opts.MinOpts();
supportsUnalignedSIMDLoads = true;
@@ -7668,10 +7670,29 @@ bool Lowering::IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* parentNode, GenTre
const unsigned expectedSize = genTypeSize(parentNode->TypeGet()) / 2;
const unsigned operandSize = genTypeSize(childNode->TypeGet());
- // For broadcasts we can only optimize constants and memory operands
- const bool broadcastIsContainable = childNode->OperIsConst() || childNode->isMemoryOp();
- supportsGeneralLoads =
- broadcastIsContainable && supportsUnalignedSIMDLoads && (operandSize >= expectedSize);
+ if (childNode->OperIsConst() || childNode->isMemoryOp())
+ {
+ // For broadcasts we can only optimize constants and memory operands
+ // since we're going from a smaller base type to a larger base type
+ supportsGeneralLoads = supportsUnalignedSIMDLoads && (operandSize >= expectedSize);
+ }
+ break;
+ }
+
+ case NI_SSE3_MoveAndDuplicate:
+ {
+ // Most instructions under the non-VEX encoding require aligned operands.
+ // Those used for Sse3.MoveAndDuplicate (MOVDDUP) are exceptions and don't
+ // fail for unaligned inputs as they read half the vector width instead
+
+ supportsAlignedSIMDLoads = !comp->opts.MinOpts();
+ supportsUnalignedSIMDLoads = true;
+
+ const unsigned expectedSize = genTypeSize(parentNode->TypeGet()) / 2;
+ const unsigned operandSize = genTypeSize(childNode->TypeGet());
+
+ supportsGeneralLoads = supportsUnalignedSIMDLoads && (operandSize >= expectedSize);
+ supportsSIMDScalarLoads = true;
break;
}
@@ -7697,8 +7718,6 @@ bool Lowering::IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* parentNode, GenTre
break;
}
}
-
- assert(supportsSIMDScalarLoads == false);
break;
}
diff --git a/src/coreclr/jit/lsra.cpp b/src/coreclr/jit/lsra.cpp
index 0a33ba3faba9..c4f2d68e00d7 100644
--- a/src/coreclr/jit/lsra.cpp
+++ b/src/coreclr/jit/lsra.cpp
@@ -1663,6 +1663,21 @@ bool LinearScan::isRegCandidate(LclVarDsc* varDsc)
return false;
}
+ // Avoid allocating parameters that are passed in float regs into integer
+ // registers. We currently home float registers before integer registers,
+ // so that kind of enregistration can trash integer registers containing
+ // other parameters.
+ // We assume that these cases will be homed to float registers if they are
+ // promoted.
+ // TODO-CQ: Combine integer and float register homing to handle these kinds
+ // of conflicts.
+ if ((varDsc->TypeGet() == TYP_STRUCT) && varDsc->lvIsRegArg && !varDsc->lvPromoted &&
+ varTypeUsesIntReg(varDsc->GetRegisterType()) && genIsValidFloatReg(varDsc->GetArgReg()))
+ {
+ compiler->lvaSetVarDoNotEnregister(lclNum DEBUGARG(DoNotEnregisterReason::IsStructArg));
+ return false;
+ }
+
// Are we not optimizing and we have exception handlers?
// if so mark all args and locals as volatile, so that they
// won't ever get enregistered.
diff --git a/src/coreclr/jit/optcse.cpp b/src/coreclr/jit/optcse.cpp
index 4e7fc81df64b..505352250f4e 100644
--- a/src/coreclr/jit/optcse.cpp
+++ b/src/coreclr/jit/optcse.cpp
@@ -3066,8 +3066,11 @@ class CSE_Heuristic
assert(vnStore->IsVNCompareCheckedBoundArith(oldCmpVN));
vnStore->GetCompareCheckedBoundArithInfo(oldCmpVN, &info);
- newCmpArgVN = vnStore->VNForFunc(vnStore->TypeOfVN(info.arrOp), (VNFunc)info.arrOper,
- info.arrOp, theConservativeVN);
+ ValueNum arrOp1 = info.arrOpLHS ? info.arrOp : theConservativeVN;
+ ValueNum arrOp2 = info.arrOpLHS ? theConservativeVN : info.arrOp;
+
+ newCmpArgVN =
+ vnStore->VNForFunc(vnStore->TypeOfVN(info.arrOp), (VNFunc)info.arrOper, arrOp1, arrOp2);
}
ValueNum newCmpVN = vnStore->VNForFunc(vnStore->TypeOfVN(oldCmpVN), (VNFunc)info.cmpOper,
info.cmpOp, newCmpArgVN);
diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp
index fa60633afbc2..fbc80330eb92 100644
--- a/src/coreclr/jit/valuenum.cpp
+++ b/src/coreclr/jit/valuenum.cpp
@@ -6576,15 +6576,17 @@ void ValueNumStore::GetCheckedBoundArithInfo(ValueNum vn, CompareCheckedBoundAri
bool isOp1CheckedBound = IsVNCheckedBound(funcArith.m_args[1]);
if (isOp1CheckedBound)
{
- info->arrOper = funcArith.m_func;
- info->arrOp = funcArith.m_args[0];
- info->vnBound = funcArith.m_args[1];
+ info->arrOper = funcArith.m_func;
+ info->arrOp = funcArith.m_args[0];
+ info->vnBound = funcArith.m_args[1];
+ info->arrOpLHS = true;
}
else
{
- info->arrOper = funcArith.m_func;
- info->arrOp = funcArith.m_args[1];
- info->vnBound = funcArith.m_args[0];
+ info->arrOper = funcArith.m_func;
+ info->arrOp = funcArith.m_args[1];
+ info->vnBound = funcArith.m_args[0];
+ info->arrOpLHS = false;
}
}
diff --git a/src/coreclr/jit/valuenum.h b/src/coreclr/jit/valuenum.h
index 04fed7bfbc1f..af6ba5290148 100644
--- a/src/coreclr/jit/valuenum.h
+++ b/src/coreclr/jit/valuenum.h
@@ -920,9 +920,10 @@ class ValueNumStore
ValueNum vnBound;
unsigned arrOper;
ValueNum arrOp;
+ bool arrOpLHS; // arrOp is on the left side of cmpOp expression
unsigned cmpOper;
ValueNum cmpOp;
- CompareCheckedBoundArithInfo() : vnBound(NoVN), arrOper(GT_NONE), arrOp(NoVN), cmpOper(GT_NONE), cmpOp(NoVN)
+ CompareCheckedBoundArithInfo() : vnBound(NoVN), arrOper(GT_NONE), arrOp(NoVN), arrOpLHS(false), cmpOper(GT_NONE), cmpOp(NoVN)
{
}
#ifdef DEBUG
diff --git a/src/coreclr/nativeaot/BuildIntegration/Microsoft.DotNet.ILCompiler.SingleEntry.targets b/src/coreclr/nativeaot/BuildIntegration/Microsoft.DotNet.ILCompiler.SingleEntry.targets
index 178d8aaa93aa..299840345a79 100644
--- a/src/coreclr/nativeaot/BuildIntegration/Microsoft.DotNet.ILCompiler.SingleEntry.targets
+++ b/src/coreclr/nativeaot/BuildIntegration/Microsoft.DotNet.ILCompiler.SingleEntry.targets
@@ -5,8 +5,7 @@
<_hostOS>$(NETCoreSdkPortableRuntimeIdentifier.SubString(0, $(NETCoreSdkPortableRuntimeIdentifier.LastIndexOf('-'))))
<_targetOS>$(RuntimeIdentifier.SubString(0, $(RuntimeIdentifier.LastIndexOf('-'))))
- <_indexOfPeriod>$(_targetOS.IndexOf('.'))
- <_targetOS Condition="'$(_indexOfPeriod)' > -1">$(_targetOS.SubString(0, $(_indexOfPeriod)))
+ <_targetOS Condition="$(_targetOS.Contains('.'))">$(_targetOS.SubString(0, $(_targetOS.IndexOf('.'))))
<_targetOS Condition="$(_targetOS.StartsWith('win'))">win
diff --git a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
index 82df158c9e29..25ae0efcfb8f 100644
--- a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
+++ b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.Unix.targets
@@ -166,7 +166,7 @@ The .NET Foundation licenses this file to you under the MIT license.
-
+
diff --git a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.targets b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.targets
index 7922c74747b5..20ae554a6c08 100644
--- a/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.targets
+++ b/src/coreclr/nativeaot/BuildIntegration/Microsoft.NETCore.Native.targets
@@ -356,7 +356,6 @@ The .NET Foundation licenses this file to you under the MIT license.
<_IgnoreLinkerWarnings>false
<_IgnoreLinkerWarnings Condition="'$(_IsApplePlatform)' == 'true'">true
- <_StripFlag Condition="'$(_IsApplePlatform)' == 'true' and '$(IlcExportUnmanagedEntrypoints)' == 'true'">-x
@@ -384,7 +383,7 @@ The .NET Foundation licenses this file to you under the MIT license.
+ strip -no_code_signature_warning -x "$(NativeBinary)"" />
IsDoNotTriggerGcSet());
ASSERT(pThread->IsCurrentThreadInCooperativeMode());
+ if (pEEType->ContainsPointers())
+ {
+ uFlags |= GC_ALLOC_CONTAINS_REF;
+ uFlags &= ~GC_ALLOC_ZEROING_OPTIONAL;
+ }
+
size_t cbSize = pEEType->get_BaseSize();
if (pEEType->HasComponentSize())
diff --git a/src/coreclr/pal/src/include/pal/palinternal.h b/src/coreclr/pal/src/include/pal/palinternal.h
index a7c5ba129c90..3b8a55a94496 100644
--- a/src/coreclr/pal/src/include/pal/palinternal.h
+++ b/src/coreclr/pal/src/include/pal/palinternal.h
@@ -426,6 +426,7 @@ function_name() to call the system's implementation
#undef va_start
#undef va_end
#undef va_copy
+#undef va_arg
#undef stdin
#undef stdout
#undef stderr
diff --git a/src/coreclr/pal/src/safecrt/vsprintf.cpp b/src/coreclr/pal/src/safecrt/vsprintf.cpp
index b8ff745f563c..360222d5dc67 100644
--- a/src/coreclr/pal/src/safecrt/vsprintf.cpp
+++ b/src/coreclr/pal/src/safecrt/vsprintf.cpp
@@ -95,7 +95,7 @@ DLLEXPORT int __cdecl _vsnprintf_s (
retvalue = vsnprintf(string, sizeInBytes, format, ap);
string[sizeInBytes - 1] = '\0';
/* we allow truncation if count == _TRUNCATE */
- if (retvalue > (int)sizeInBytes && count == _TRUNCATE)
+ if (retvalue >= (int)sizeInBytes && count == _TRUNCATE)
{
if (errno == ERANGE)
{
diff --git a/src/coreclr/pal/tests/palsuite/c_runtime/_vsnprintf_s/test1/test1.cpp b/src/coreclr/pal/tests/palsuite/c_runtime/_vsnprintf_s/test1/test1.cpp
index fb5ab3a2d7af..62b725208769 100644
--- a/src/coreclr/pal/tests/palsuite/c_runtime/_vsnprintf_s/test1/test1.cpp
+++ b/src/coreclr/pal/tests/palsuite/c_runtime/_vsnprintf_s/test1/test1.cpp
@@ -49,6 +49,18 @@ PALTEST(c_runtime__vsnprintf_s_test1_paltest_vsnprintf_test1, "c_runtime/_vsnpri
Fail("ERROR: expected %s (up to %d chars), got %s\n", checkstr, 8, buf);
}
+ char buf8[8] = {0};
+
+ ret = Testvsnprintf(buf8, 8, "abcdefgh");
+ if (ret >= 0)
+ {
+ Fail("ERROR: expected negative return value, got %d", ret);
+ }
+ if (memcmp(buf8, "abcdefg\0", 8) != 0)
+ {
+ Fail("ERROR: Expected 7 chars + null terminator");
+ }
+
PAL_Terminate();
return PASS;
}
diff --git a/src/coreclr/scripts/paltests.proj b/src/coreclr/scripts/paltests.proj
new file mode 100644
index 000000000000..167208f4ecb0
--- /dev/null
+++ b/src/coreclr/scripts/paltests.proj
@@ -0,0 +1,44 @@
+
+
+ true
+ $(_Creator)
+ $(_HelixAccessToken)
+ $(_HelixBuild)
+ $(_HelixSource)
+ $(_HelixTargetQueues)
+ $(_HelixType)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ @(PalTestArchive)
+ ./runpaltestshelix.sh
+ 0:10
+
+
+
+
diff --git a/src/coreclr/tools/aot/ILCompiler/ILCompiler.csproj b/src/coreclr/tools/aot/ILCompiler/ILCompiler.csproj
index 7c25444f209c..c0a239225fa3 100644
--- a/src/coreclr/tools/aot/ILCompiler/ILCompiler.csproj
+++ b/src/coreclr/tools/aot/ILCompiler/ILCompiler.csproj
@@ -12,6 +12,7 @@
$(RuntimeBinDir)ilc-published/
false
+ false
false
true
diff --git a/src/coreclr/tools/aot/crossgen2/crossgen2.csproj b/src/coreclr/tools/aot/crossgen2/crossgen2.csproj
index ef1b82f537fb..6f8578b26b96 100644
--- a/src/coreclr/tools/aot/crossgen2/crossgen2.csproj
+++ b/src/coreclr/tools/aot/crossgen2/crossgen2.csproj
@@ -1,113 +1,14 @@
-
-
-
-
+
- $(RuntimeBinDir)crossgen2
-
- false
-
- true
- linux-x64;linux-musl-x64;linux-arm;linux-musl-arm;linux-arm64;linux-musl-arm64;freebsd-x64;freebsd-arm64;osx-x64;osx-arm64;win-x64;win-x86;win-arm64
- $(PackageRID)
- false
- true
+ $(RuntimeBinDir)/crossgen2
+ false
+
+ $(NetCoreAppToolCurrent)
-
-
-
- true
- true
-
- false
-
- false
- true
-
-
-
-
-
- $(CoreCLRILCompilerDir)
- $(CoreCLRCrossILCompilerDir)
- $(ROOTFS_DIR)
- $(CoreCLRILCompilerDir)netstandard/ILCompiler.Build.Tasks.dll
- $(CoreCLRAotSdkDir)
- $(MicrosoftNetCoreAppRuntimePackRidLibTfmDir)
- $(MicrosoftNetCoreAppRuntimePackNativeDir)
- false
-
- .dwarf
- --flat
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- $(MicrosoftNetCoreAppRuntimePackDir)
-
-
-
-
-
-
-
-
- $(RuntimeIdentifier)
-
-
- x86_64
- aarch64
- arm64
-
-
- $(CrossCompileArch)-linux-gnu
- $(CrossCompileArch)-alpine-linux-musl
- $(CrossCompileArch)-unknown-freebsd12
-
-
-
-
-
-
-
-
-
- clang
-
-
-
-
-
-
-
- $(_CC_LDFLAGS.SubString(0, $(_CC_LDFLAGS.IndexOf(';'))))
- <_LDFLAGS>$(_CC_LDFLAGS.SubString($([MSBuild]::Add($(_CC_LDFLAGS.IndexOf(';')), 1))))
- lld
-
-
-
diff --git a/src/coreclr/tools/aot/crossgen2/crossgen2.props b/src/coreclr/tools/aot/crossgen2/crossgen2.props
index 0f2f954c6a15..4d5cc30c3bb6 100644
--- a/src/coreclr/tools/aot/crossgen2/crossgen2.props
+++ b/src/coreclr/tools/aot/crossgen2/crossgen2.props
@@ -3,7 +3,6 @@
crossgen2
true
Exe
- $(NetCoreAppToolCurrent)
8002,NU1701
x64;x86;arm64;arm;loongarch64
AnyCPU
diff --git a/src/coreclr/tools/aot/crossgen2/crossgen2_crossarch.csproj b/src/coreclr/tools/aot/crossgen2/crossgen2_crossarch.csproj
deleted file mode 100644
index 544b4271117b..000000000000
--- a/src/coreclr/tools/aot/crossgen2/crossgen2_crossarch.csproj
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
- $(BuildArchitecture)
- $(RuntimeBinDir)/$(CrossHostArch)/crossgen2
- false
-
-
-
diff --git a/src/coreclr/tools/aot/crossgen2/crossgen2_inbuild.csproj b/src/coreclr/tools/aot/crossgen2/crossgen2_inbuild.csproj
new file mode 100644
index 000000000000..75766fd75eab
--- /dev/null
+++ b/src/coreclr/tools/aot/crossgen2/crossgen2_inbuild.csproj
@@ -0,0 +1,9 @@
+
+
+ $(BuildArchitecture)
+ $(RuntimeBinDir)/$(BuildArchitecture)/crossgen2
+ false
+ $(NetCoreAppToolCurrent)
+
+
+
diff --git a/src/coreclr/tools/aot/crossgen2/crossgen2_publish.csproj b/src/coreclr/tools/aot/crossgen2/crossgen2_publish.csproj
new file mode 100644
index 000000000000..d5105eccff36
--- /dev/null
+++ b/src/coreclr/tools/aot/crossgen2/crossgen2_publish.csproj
@@ -0,0 +1,105 @@
+
+
+
+
+
+ false
+ false
+ true
+ $(PackageRID)
+ true
+ $(NetCoreAppCurrent)
+ true
+ true
+
+
+
+
+
+ true
+ true
+
+ false
+
+ false
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+ $(CoreCLRILCompilerDir)
+ $(CoreCLRCrossILCompilerDir)
+ $(ROOTFS_DIR)
+ $(CoreCLRILCompilerDir)netstandard/ILCompiler.Build.Tasks.dll
+ $(CoreCLRAotSdkDir)
+ $(MicrosoftNetCoreAppRuntimePackRidLibTfmDir)
+ $(MicrosoftNetCoreAppRuntimePackNativeDir)
+ false
+
+ .dwarf
+ --flat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ $(RuntimeIdentifier)
+
+
+ x86_64
+ aarch64
+ arm64
+
+
+ $(CrossCompileArch)-linux-gnu
+ $(CrossCompileArch)-alpine-linux-musl
+ $(CrossCompileArch)-unknown-freebsd12
+
+
+
+
+
+
+
+
+
+ clang
+
+
+
+
+
+
+
+ $(_CC_LDFLAGS.SubString(0, $(_CC_LDFLAGS.IndexOf(';'))))
+ <_LDFLAGS>$(_CC_LDFLAGS.SubString($([MSBuild]::Add($(_CC_LDFLAGS.IndexOf(';')), 1))))
+ lld
+
+
+
+
diff --git a/src/coreclr/vm/arm/thunktemplates.S b/src/coreclr/vm/arm/thunktemplates.S
index 0686bb2ed4b7..8744c8ebb632 100644
--- a/src/coreclr/vm/arm/thunktemplates.S
+++ b/src/coreclr/vm/arm/thunktemplates.S
@@ -11,7 +11,7 @@
PAGE_SIZE = 4096
-#define DATA_SLOT(stub, field) stub##Code + PAGE_SIZE + stub##Data__##field
+#define DATA_SLOT(stub, field) . - (. - stub##Code) + PAGE_SIZE + stub##Data__##field
LEAF_ENTRY StubPrecodeCode
ldr r12, DATA_SLOT(StubPrecode, MethodDesc)
diff --git a/src/coreclr/vm/ceemain.cpp b/src/coreclr/vm/ceemain.cpp
index 218cda11d7f0..680a9a2fb249 100644
--- a/src/coreclr/vm/ceemain.cpp
+++ b/src/coreclr/vm/ceemain.cpp
@@ -1331,9 +1331,6 @@ void STDMETHODCALLTYPE EEShutDownHelper(BOOL fIsDllUnloading)
// Shutdown finalizer before we suspend all background threads. Otherwise we
// never get to finalize anything.
- // No longer process exceptions
- g_fNoExceptions = true;
-
// @TODO: This does things which shouldn't occur in part 2. Namely,
// calling managed dll main callbacks (AppDomain::SignalProcessDetach), and
// RemoveAppDomainFromIPC.
diff --git a/src/coreclr/vm/corelib.h b/src/coreclr/vm/corelib.h
index 96576a858131..cf059679bd08 100644
--- a/src/coreclr/vm/corelib.h
+++ b/src/coreclr/vm/corelib.h
@@ -1068,6 +1068,21 @@ DEFINE_METHOD(HANDLE_MARSHALER, CONVERT_SAFEHANDLE_TO_NATIVE,ConvertSaf
DEFINE_METHOD(HANDLE_MARSHALER, THROW_SAFEHANDLE_FIELD_CHANGED, ThrowSafeHandleFieldChanged, SM_RetVoid)
DEFINE_METHOD(HANDLE_MARSHALER, THROW_CRITICALHANDLE_FIELD_CHANGED, ThrowCriticalHandleFieldChanged, SM_RetVoid)
+#ifdef TARGET_WINDOWS
+#ifdef TARGET_X86
+DEFINE_CLASS(COPY_CONSTRUCTOR_CHAIN, StubHelpers, CopyConstructorChain)
+DEFINE_METHOD(COPY_CONSTRUCTOR_CHAIN, EXECUTE_CURRENT_COPIES_AND_GET_TARGET, ExecuteCurrentCopiesAndGetTarget, SM_PtrVoid_RetPtrVoid)
+DEFINE_METHOD(COPY_CONSTRUCTOR_CHAIN, INSTALL, Install, IM_PtrVoid_RetVoid)
+DEFINE_METHOD(COPY_CONSTRUCTOR_CHAIN, ADD, Add, IM_PtrCopyConstructorCookie_RetVoid)
+
+DEFINE_CLASS(COPY_CONSTRUCTOR_COOKIE, StubHelpers, CopyConstructorCookie)
+DEFINE_FIELD(COPY_CONSTRUCTOR_COOKIE, SOURCE, m_source)
+DEFINE_FIELD(COPY_CONSTRUCTOR_COOKIE, DESTINATION_OFFSET, m_destinationOffset)
+DEFINE_FIELD(COPY_CONSTRUCTOR_COOKIE, COPY_CONSTRUCTOR, m_copyConstructor)
+DEFINE_FIELD(COPY_CONSTRUCTOR_COOKIE, DESTRUCTOR, m_destructor)
+#endif // TARGET_X86
+#endif // TARGET_WINDOWS
+
DEFINE_CLASS(NATIVEVARIANT, StubHelpers, NativeVariant)
DEFINE_CLASS(NATIVEDECIMAL, StubHelpers, NativeDecimal)
diff --git a/src/coreclr/vm/dllimport.cpp b/src/coreclr/vm/dllimport.cpp
index 37aab6a3f530..31c4467b7982 100644
--- a/src/coreclr/vm/dllimport.cpp
+++ b/src/coreclr/vm/dllimport.cpp
@@ -1623,6 +1623,10 @@ NDirectStubLinker::NDirectStubLinker(
m_pcsSetup->EmitSTLOC(m_dwTargetInterfacePointerLocalNum);
}
#endif // FEATURE_COMINTEROP
+
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+ m_dwCopyCtorChainLocalNum = (DWORD)-1;
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
}
void NDirectStubLinker::SetCallingConvention(CorInfoCallConvExtension unmngCallConv, BOOL fIsVarArg)
@@ -1835,6 +1839,23 @@ DWORD NDirectStubLinker::GetReturnValueLocalNum()
return m_dwRetValLocalNum;
}
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+DWORD NDirectStubLinker::GetCopyCtorChainLocalNum()
+{
+ STANDARD_VM_CONTRACT;
+
+ if (m_dwCopyCtorChainLocalNum == (DWORD)-1)
+ {
+ // The local is created and initialized lazily when first asked.
+ m_dwCopyCtorChainLocalNum = NewLocal(CoreLibBinder::GetClass(CLASS__COPY_CONSTRUCTOR_CHAIN));
+ m_pcsSetup->EmitLDLOCA(m_dwCopyCtorChainLocalNum);
+ m_pcsSetup->EmitINITOBJ(m_pcsSetup->GetToken(CoreLibBinder::GetClass(CLASS__COPY_CONSTRUCTOR_CHAIN)));
+ }
+
+ return m_dwCopyCtorChainLocalNum;
+}
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
+
BOOL NDirectStubLinker::IsCleanupNeeded()
{
LIMITED_METHOD_CONTRACT;
@@ -2064,6 +2085,10 @@ void NDirectStubLinker::End(DWORD dwStubFlags)
}
}
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+EXTERN_C void STDCALL CopyConstructorCallStub(void);
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
+
void NDirectStubLinker::DoNDirect(ILCodeStream *pcsEmit, DWORD dwStubFlags, MethodDesc * pStubMD)
{
STANDARD_VM_CONTRACT;
@@ -2150,6 +2175,21 @@ void NDirectStubLinker::DoNDirect(ILCodeStream *pcsEmit, DWORD dwStubFlags, Meth
}
}
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+ if (m_dwCopyCtorChainLocalNum != (DWORD)-1)
+ {
+ // If we have a copy constructor chain local, we need to call the copy constructor stub
+ // to ensure that the chain is called correctly.
+ // Let's install the stub chain here and redirect the call to the stub.
+ DWORD targetLoc = NewLocal(ELEMENT_TYPE_I);
+ pcsEmit->EmitSTLOC(targetLoc);
+ pcsEmit->EmitLDLOCA(m_dwCopyCtorChainLocalNum);
+ pcsEmit->EmitLDLOC(targetLoc);
+ pcsEmit->EmitCALL(METHOD__COPY_CONSTRUCTOR_CHAIN__INSTALL, 2, 0);
+ pcsEmit->EmitLDC((DWORD_PTR)&CopyConstructorCallStub);
+ }
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
+
// For managed-to-native calls, the rest of the work is done by the JIT. It will
// erect InlinedCallFrame, flip GC mode, and use the specified calling convention
// to call the target. For native-to-managed calls, this is an ordinary managed
@@ -6078,5 +6118,21 @@ PCODE GetILStubForCalli(VASigCookie *pVASigCookie, MethodDesc *pMD)
RETURN pVASigCookie->pNDirectILStub;
}
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+// Copy constructor support for C++/CLI
+EXTERN_C void* STDCALL CallCopyConstructorsWorker(void* esp)
+{
+ STATIC_CONTRACT_THROWS;
+ STATIC_CONTRACT_GC_TRIGGERS;
+ STATIC_CONTRACT_MODE_PREEMPTIVE; // we've already switched to preemptive
+
+ using ExecuteCallback = void*(STDMETHODCALLTYPE*)(void*);
+
+ MethodDesc* pMD = CoreLibBinder::GetMethod(METHOD__COPY_CONSTRUCTOR_CHAIN__EXECUTE_CURRENT_COPIES_AND_GET_TARGET);
+ ExecuteCallback pExecute = (ExecuteCallback)pMD->GetMultiCallableAddrOfCode();
+
+ return pExecute(esp);
+}
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
#endif // #ifndef DACCESS_COMPILE
diff --git a/src/coreclr/vm/dllimport.h b/src/coreclr/vm/dllimport.h
index 256b95079933..a89a01bc7003 100644
--- a/src/coreclr/vm/dllimport.h
+++ b/src/coreclr/vm/dllimport.h
@@ -484,6 +484,9 @@ class NDirectStubLinker : public ILStubLinker
DWORD GetCleanupWorkListLocalNum();
DWORD GetThreadLocalNum();
DWORD GetReturnValueLocalNum();
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+ DWORD GetCopyCtorChainLocalNum();
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
void SetCleanupNeeded();
void SetExceptionCleanupNeeded();
BOOL IsCleanupWorkListSetup();
@@ -553,6 +556,10 @@ class NDirectStubLinker : public ILStubLinker
DWORD m_dwTargetEntryPointLocalNum;
#endif // FEATURE_COMINTEROP
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+ DWORD m_dwCopyCtorChainLocalNum;
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
+
BOOL m_fHasCleanupCode;
BOOL m_fHasExceptionCleanupCode;
BOOL m_fCleanupWorkListIsSetup;
diff --git a/src/coreclr/vm/eepolicy.cpp b/src/coreclr/vm/eepolicy.cpp
index af27338544e8..6069f0ec1247 100644
--- a/src/coreclr/vm/eepolicy.cpp
+++ b/src/coreclr/vm/eepolicy.cpp
@@ -59,13 +59,6 @@ void SafeExitProcess(UINT exitCode, ShutdownCompleteAction sca = SCA_ExitProcess
}
}
- // Turn off exception processing, because if some other random DLL has a
- // fault in DLL_PROCESS_DETACH, we could get called for exception handling.
- // Since we've turned off part of the runtime, we can't, for instance,
- // properly execute the GC that handling an exception might trigger.
- g_fNoExceptions = true;
- LOG((LF_EH, LL_INFO10, "SafeExitProcess: turning off exceptions\n"));
-
if (sca == SCA_TerminateProcessWhenShutdownComplete)
{
// disabled because if we fault in this code path we will trigger our Watson code
diff --git a/src/coreclr/vm/excep.cpp b/src/coreclr/vm/excep.cpp
index ce8b325a5f41..9fe9376fd58e 100644
--- a/src/coreclr/vm/excep.cpp
+++ b/src/coreclr/vm/excep.cpp
@@ -4623,12 +4623,6 @@ LONG InternalUnhandledExceptionFilter_Worker(
}
#endif
- // This shouldn't be possible, but MSVC re-installs us... for now, just bail if this happens.
- if (g_fNoExceptions)
- {
- return EXCEPTION_CONTINUE_SEARCH;
- }
-
// Are we looking at a stack overflow here?
if ((pThread != NULL) && !pThread->DetermineIfGuardPagePresent())
{
@@ -5533,8 +5527,6 @@ static LONG ThreadBaseExceptionFilter_Worker(PEXCEPTION_POINTERS pExceptionInfo,
ThreadBaseExceptionFilterParam *pParam = (ThreadBaseExceptionFilterParam *) pvParam;
UnhandledExceptionLocation location = pParam->location;
- _ASSERTE(!g_fNoExceptions);
-
Thread* pThread = GetThread();
#ifdef _DEBUG
@@ -6704,14 +6696,6 @@ VEH_ACTION WINAPI CLRVectoredExceptionHandlerPhase3(PEXCEPTION_POINTERS pExcepti
VEH_ACTION WINAPI CLRVectoredExceptionHandler(PEXCEPTION_POINTERS pExceptionInfo)
{
- // It is not safe to execute code inside VM after we shutdown EE. One example is DisablePreemptiveGC
- // will block forever.
- if (g_fForbidEnterEE)
- {
- return VEH_CONTINUE_SEARCH;
- }
-
-
//
// DO NOT USE CONTRACTS HERE AS THIS ROUTINE MAY NEVER RETURN. You can use
// static contracts, but currently this is all WRAPPER_NO_CONTRACT.
@@ -7393,12 +7377,6 @@ LONG WINAPI CLRVectoredExceptionHandlerShim(PEXCEPTION_POINTERS pExceptionInfo)
// WARNING WARNING WARNING WARNING WARNING WARNING WARNING
//
- // If exceptions (or runtime) have been disabled, then simply return.
- if (g_fForbidEnterEE || g_fNoExceptions)
- {
- return EXCEPTION_CONTINUE_SEARCH;
- }
-
// WARNING
//
// We must preserve this so that GCStress=4 eh processing doesnt kill last error.
diff --git a/src/coreclr/vm/frozenobjectheap.cpp b/src/coreclr/vm/frozenobjectheap.cpp
index 8f11f3c8c74d..bd25b4ba7b3e 100644
--- a/src/coreclr/vm/frozenobjectheap.cpp
+++ b/src/coreclr/vm/frozenobjectheap.cpp
@@ -47,6 +47,7 @@ Object* FrozenObjectHeapManager::TryAllocateObject(PTR_MethodTable type, size_t
_ASSERT(type != nullptr);
_ASSERT(FOH_COMMIT_SIZE >= MIN_OBJECT_SIZE);
+ _ASSERT(!type->Collectible());
// Currently we don't support frozen objects with special alignment requirements
// TODO: We should also give up on arrays of doubles on 32-bit platforms.
diff --git a/src/coreclr/vm/i386/asmhelpers.asm b/src/coreclr/vm/i386/asmhelpers.asm
index e03ffa9544f2..1d02fc48f8d8 100644
--- a/src/coreclr/vm/i386/asmhelpers.asm
+++ b/src/coreclr/vm/i386/asmhelpers.asm
@@ -41,6 +41,7 @@ EXTERN _NDirectImportWorker@4:PROC
EXTERN _VarargPInvokeStubWorker@12:PROC
EXTERN _GenericPInvokeCalliStubWorker@12:PROC
+EXTERN _CallCopyConstructorsWorker@4:PROC
EXTERN _PreStubWorker@8:PROC
EXTERN _TheUMEntryPrestubWorker@4:PROC
@@ -1062,6 +1063,29 @@ GoCallCalliWorker:
_GenericPInvokeCalliHelper@0 endp
+;==========================================================================
+; This is small stub whose purpose is to record current stack pointer and
+; call CallCopyConstructorsWorker to invoke copy constructors and destructors
+; as appropriate. This stub operates on arguments already pushed to the
+; stack by JITted IL stub and must not create a new frame, i.e. it must tail
+; call to the target for it to see the arguments that copy ctors have been
+; called on.
+;
+_CopyConstructorCallStub@0 proc public
+ ; there may be an argument in ecx - save it
+ push ecx
+
+ ; push pointer to arguments
+ lea edx, [esp + 8]
+ push edx
+
+ call _CallCopyConstructorsWorker@4
+
+ ; restore ecx and tail call to the target
+ pop ecx
+ jmp eax
+_CopyConstructorCallStub@0 endp
+
ifdef FEATURE_COMINTEROP
;==========================================================================
diff --git a/src/coreclr/vm/i386/excepx86.cpp b/src/coreclr/vm/i386/excepx86.cpp
index f0c4dcc8b2a2..616cbaea8248 100644
--- a/src/coreclr/vm/i386/excepx86.cpp
+++ b/src/coreclr/vm/i386/excepx86.cpp
@@ -1571,9 +1571,6 @@ EXCEPTION_HANDLER_IMPL(COMPlusFrameHandler)
_ASSERTE((pContext == NULL) || ((pContext->ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL));
- if (g_fNoExceptions)
- return ExceptionContinueSearch; // No EH during EE shutdown.
-
// Check if the exception represents a GCStress Marker. If it does,
// we shouldnt record its entry in the TLS as such exceptions are
// continuable and can confuse the VM to treat them as CSE,
diff --git a/src/coreclr/vm/ilmarshalers.cpp b/src/coreclr/vm/ilmarshalers.cpp
index 431064f4e121..2e622bd725ed 100644
--- a/src/coreclr/vm/ilmarshalers.cpp
+++ b/src/coreclr/vm/ilmarshalers.cpp
@@ -3459,6 +3459,38 @@ MarshalerOverrideStatus ILBlittableValueClassWithCopyCtorMarshaler::ArgumentOver
#ifdef TARGET_X86
pslIL->SetStubTargetArgType(&locDesc); // native type is the value type
pslILDispatch->EmitLDLOC(dwNewValueTypeLocal); // we load the local directly
+
+#if defined(TARGET_WINDOWS)
+ // Record this argument's stack slot in the copy constructor chain so we can correctly invoke the copy constructor.
+ DWORD ctorCookie = pslIL->NewLocal(CoreLibBinder::GetClass(CLASS__COPY_CONSTRUCTOR_COOKIE));
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitINITOBJ(pslIL->GetToken(CoreLibBinder::GetClass(CLASS__COPY_CONSTRUCTOR_COOKIE)));
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitLDLOCA(dwNewValueTypeLocal);
+ pslIL->EmitSTFLD(pslIL->GetToken(CoreLibBinder::GetField(FIELD__COPY_CONSTRUCTOR_COOKIE__SOURCE)));
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitLDC(nativeStackOffset);
+ pslIL->EmitSTFLD(pslIL->GetToken(CoreLibBinder::GetField(FIELD__COPY_CONSTRUCTOR_COOKIE__DESTINATION_OFFSET)));
+
+ if (pargs->mm.m_pCopyCtor)
+ {
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitLDFTN(pslIL->GetToken(pargs->mm.m_pCopyCtor));
+ pslIL->EmitSTFLD(pslIL->GetToken(CoreLibBinder::GetField(FIELD__COPY_CONSTRUCTOR_COOKIE__COPY_CONSTRUCTOR)));
+ }
+
+ if (pargs->mm.m_pDtor)
+ {
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitLDFTN(pslIL->GetToken(pargs->mm.m_pDtor));
+ pslIL->EmitSTFLD(pslIL->GetToken(CoreLibBinder::GetField(FIELD__COPY_CONSTRUCTOR_COOKIE__DESTRUCTOR)));
+ }
+
+ pslIL->EmitLDLOCA(psl->GetCopyCtorChainLocalNum());
+ pslIL->EmitLDLOCA(ctorCookie);
+ pslIL->EmitCALL(METHOD__COPY_CONSTRUCTOR_CHAIN__ADD, 2, 0);
+#endif // defined(TARGET_WINDOWS)
+
#else
pslIL->SetStubTargetArgType(ELEMENT_TYPE_I); // native type is a pointer
EmitLoadNativeLocalAddrForByRefDispatch(pslILDispatch, dwNewValueTypeLocal);
@@ -3477,9 +3509,13 @@ MarshalerOverrideStatus ILBlittableValueClassWithCopyCtorMarshaler::ArgumentOver
DWORD dwNewValueTypeLocal;
dwNewValueTypeLocal = pslIL->NewLocal(locDesc);
+#ifdef TARGET_WINDOWS
+ pslILDispatch->EmitLDARGA(argidx);
+#else // !TARGET_WINDOWS
pslILDispatch->EmitLDARG(argidx);
pslILDispatch->EmitSTLOC(dwNewValueTypeLocal);
pslILDispatch->EmitLDLOCA(dwNewValueTypeLocal);
+#endif // TARGET_WINDOWS
#else
LocalDesc locDesc(pargs->mm.m_pMT);
locDesc.MakeCopyConstructedPointer();
diff --git a/src/coreclr/vm/jitinterface.cpp b/src/coreclr/vm/jitinterface.cpp
index 1cc6c37e09c2..67e127bf91ea 100644
--- a/src/coreclr/vm/jitinterface.cpp
+++ b/src/coreclr/vm/jitinterface.cpp
@@ -11881,23 +11881,6 @@ HRESULT CEEJitInfo::allocPgoInstrumentationBySchema(
JIT_TO_EE_TRANSITION();
- // We need to know the code size. Typically we can get the code size
- // from m_ILHeader. For dynamic methods, m_ILHeader will be NULL, so
- // for that case we need to use DynamicResolver to get the code size.
-
- unsigned codeSize = 0;
- if (m_pMethodBeingCompiled->IsDynamicMethod())
- {
- unsigned stackSize, ehSize;
- CorInfoOptions options;
- DynamicResolver * pResolver = m_pMethodBeingCompiled->AsDynamicMethodDesc()->GetResolver();
- pResolver->GetCodeInfo(&codeSize, &stackSize, &options, &ehSize);
- }
- else
- {
- codeSize = m_ILHeader->GetCodeSize();
- }
-
#ifdef FEATURE_PGO
hr = PgoManager::allocPgoInstrumentationBySchema(m_pMethodBeingCompiled, pSchema, countSchemaItems, pInstrumentationData);
#else
diff --git a/src/coreclr/vm/metasig.h b/src/coreclr/vm/metasig.h
index 6adf044a0170..ed277458c9b2 100644
--- a/src/coreclr/vm/metasig.h
+++ b/src/coreclr/vm/metasig.h
@@ -578,6 +578,13 @@ DEFINE_METASIG_T(SM(RefCleanupWorkListElement_RetVoid, r(C(CLEANUP_WORK_LIST_ELE
DEFINE_METASIG_T(SM(RefCleanupWorkListElement_SafeHandle_RetIntPtr, r(C(CLEANUP_WORK_LIST_ELEMENT)) C(SAFE_HANDLE), I))
DEFINE_METASIG_T(SM(RefCleanupWorkListElement_Obj_RetVoid, r(C(CLEANUP_WORK_LIST_ELEMENT)) j, v))
+DEFINE_METASIG(SM(PtrVoid_RetPtrVoid, P(v), P(v)))
+DEFINE_METASIG(IM(PtrVoid_RetVoid, P(v), v))
+#if defined(TARGET_X86) && defined(TARGET_WINDOWS)
+DEFINE_METASIG_T(IM(PtrCopyConstructorCookie_RetVoid, P(g(COPY_CONSTRUCTOR_COOKIE)), v))
+#endif // defined(TARGET_X86) && defined(TARGET_WINDOWS)
+
+
#ifdef FEATURE_ICASTABLE
DEFINE_METASIG_T(SM(ICastable_RtType_RefException_RetBool, C(ICASTABLE) C(CLASS) r(C(EXCEPTION)), F))
DEFINE_METASIG_T(SM(ICastable_RtType_RetRtType, C(ICASTABLE) C(CLASS), C(CLASS)))
diff --git a/src/coreclr/vm/threadstatics.cpp b/src/coreclr/vm/threadstatics.cpp
index e3430acf8e4f..0216ae054a18 100644
--- a/src/coreclr/vm/threadstatics.cpp
+++ b/src/coreclr/vm/threadstatics.cpp
@@ -52,13 +52,22 @@ void ThreadLocalBlock::FreeTLM(SIZE_T i, BOOL isThreadShuttingdown)
ThreadLocalModule::CollectibleDynamicEntry *entry = (ThreadLocalModule::CollectibleDynamicEntry*)pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
PTR_LoaderAllocator pLoaderAllocator = entry->m_pLoaderAllocator;
- if (entry->m_hGCStatics != NULL)
- {
- pLoaderAllocator->FreeHandle(entry->m_hGCStatics);
- }
- if (entry->m_hNonGCStatics != NULL)
+ // LoaderAllocator may be collected when the thread is shutting down.
+ // We enter coop mode to ensure that we get a valid value of the exposed object and
+ // can safely clean up handles if it is not yet collected.
+ GCX_COOP();
+
+ LOADERALLOCATORREF loaderAllocator = pLoaderAllocator->GetExposedObject();
+ if (loaderAllocator != NULL)
{
- pLoaderAllocator->FreeHandle(entry->m_hNonGCStatics);
+ if (entry->m_hGCStatics != NULL)
+ {
+ pLoaderAllocator->FreeHandle(entry->m_hGCStatics);
+ }
+ if (entry->m_hNonGCStatics != NULL)
+ {
+ pLoaderAllocator->FreeHandle(entry->m_hNonGCStatics);
+ }
}
}
delete pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
diff --git a/src/coreclr/vm/vars.cpp b/src/coreclr/vm/vars.cpp
index 00840f919565..6baded4ca7dd 100644
--- a/src/coreclr/vm/vars.cpp
+++ b/src/coreclr/vm/vars.cpp
@@ -193,7 +193,6 @@ GVAL_IMPL(SIZE_T, g_runtimeVirtualSize);
Volatile g_fForbidEnterEE = false;
bool g_fManagedAttach = false;
-bool g_fNoExceptions = false;
DWORD g_FinalizerWaiterStatus = 0;
diff --git a/src/coreclr/vm/vars.hpp b/src/coreclr/vm/vars.hpp
index 03762a24e695..652c8cb7fd50 100644
--- a/src/coreclr/vm/vars.hpp
+++ b/src/coreclr/vm/vars.hpp
@@ -477,7 +477,6 @@ GVAL_DECL(bool, g_fProcessDetach);
GVAL_DECL(bool, g_metadataUpdatesApplied);
#endif
EXTERN bool g_fManagedAttach;
-EXTERN bool g_fNoExceptions;
// Indicates whether we're executing shut down as a result of DllMain
// (DLL_PROCESS_DETACH). See comments at code:EEShutDown for details.
diff --git a/src/coreclr/vm/zapsig.cpp b/src/coreclr/vm/zapsig.cpp
index bbbab9a51c84..dfd447f94eaa 100644
--- a/src/coreclr/vm/zapsig.cpp
+++ b/src/coreclr/vm/zapsig.cpp
@@ -1350,6 +1350,9 @@ BOOL ZapSig::EncodeMethod(
else
{
Instantiation inst = pMethod->GetMethodInstantiation();
+
+ pSigBuilder->AppendData(inst.GetNumArgs());
+
for (DWORD i = 0; i < inst.GetNumArgs(); i++)
{
TypeHandle t = inst[i];
diff --git a/src/installer/pkg/projects/Microsoft.DotNet.ILCompiler/ILCompilerRIDs.props b/src/installer/pkg/projects/Microsoft.DotNet.ILCompiler/ILCompilerRIDs.props
index 1d51a78880ae..7cbe74757e28 100644
--- a/src/installer/pkg/projects/Microsoft.DotNet.ILCompiler/ILCompilerRIDs.props
+++ b/src/installer/pkg/projects/Microsoft.DotNet.ILCompiler/ILCompilerRIDs.props
@@ -2,7 +2,6 @@
-
diff --git a/src/installer/pkg/sfx/Microsoft.NETCore.App/Microsoft.NETCore.App.Crossgen2.sfxproj b/src/installer/pkg/sfx/Microsoft.NETCore.App/Microsoft.NETCore.App.Crossgen2.sfxproj
index e8e8591bbaf8..a95eb81b9317 100644
--- a/src/installer/pkg/sfx/Microsoft.NETCore.App/Microsoft.NETCore.App.Crossgen2.sfxproj
+++ b/src/installer/pkg/sfx/Microsoft.NETCore.App/Microsoft.NETCore.App.Crossgen2.sfxproj
@@ -28,36 +28,22 @@
+
+
+
+
-
-
-
-
-
-
-
-
-
-
<_CrossgenPublishFiles Include="@(_RawCrossgenPublishFiles->'%(OutputPath)')"
KeepMetadata="REMOVE_ALL" />
diff --git a/src/installer/pkg/sfx/Microsoft.NETCore.App/ReadyToRun.targets b/src/installer/pkg/sfx/Microsoft.NETCore.App/ReadyToRun.targets
index 0b82891e6c42..6b24c54f4c1e 100644
--- a/src/installer/pkg/sfx/Microsoft.NETCore.App/ReadyToRun.targets
+++ b/src/installer/pkg/sfx/Microsoft.NETCore.App/ReadyToRun.targets
@@ -14,9 +14,7 @@
-
- $(BuildArchitecture)
- $(CoreCLRArtifactsPath)\$(CrossDir)\crossgen2\crossgen2.dll
+ $(CoreCLRArtifactsPath)\$(BuildArchitecture)\crossgen2\crossgen2.dll
true
@(PublishReadyToRunCrossgen2ExtraArgsList)
diff --git a/src/installer/pkg/sfx/installers.proj b/src/installer/pkg/sfx/installers.proj
index 06e366db911d..2f947e6d9e29 100644
--- a/src/installer/pkg/sfx/installers.proj
+++ b/src/installer/pkg/sfx/installers.proj
@@ -10,6 +10,7 @@
+
@@ -21,6 +22,10 @@
+
+
+
+
+
+ false
+ azl.3
+
+
+
+
+
+
diff --git a/src/installer/pkg/sfx/installers/netstandard2.1.proj b/src/installer/pkg/sfx/installers/netstandard2.1.proj
new file mode 100644
index 000000000000..3d3758cb4c79
--- /dev/null
+++ b/src/installer/pkg/sfx/installers/netstandard2.1.proj
@@ -0,0 +1,60 @@
+
+
+ true
+ false
+ netstandard-targeting-pack-2.1
+ NETStandard.Library.Ref
+ NETStandard.Library.Ref 2.1.0
+ false
+ ToolPack
+ true
+ netstandard-targeting-pack-2.1.0-x64.rpm
+ https://dotnetcli.blob.core.windows.net/dotnet/Runtime/3.1.0/$(OriginalNETStandard21PkgFilename)
+ $([MSBuild]::NormalizeDirectory('$(BaseIntermediateOutputPath)', 'download'))
+ $(NETStandard21TempDir)$(OriginalNETStandard21PkgFilename)
+ $(NETStandard21TempDir)netstandard21.semaphore
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/installer/prepare-artifacts.proj b/src/installer/prepare-artifacts.proj
index 5413e825c951..a9e0931c1160 100644
--- a/src/installer/prepare-artifacts.proj
+++ b/src/installer/prepare-artifacts.proj
@@ -23,6 +23,8 @@
+
+
@@ -56,6 +58,16 @@
+
+
+
+ DotNetReleaseShipping=true
+
+
+
+ WasmTestOnBrowser
+ $(TestArchiveRoot)browserornodejs/
+ $(TestArchiveTestsRoot)$(OSPlatformConfig)/
+ $(DefineConstants);TARGET_BROWSER
+ true
+
@@ -106,4 +114,4 @@
-
\ No newline at end of file
+
diff --git a/src/libraries/System.Globalization/tests/DateTimeFormatInfo/DateTimeFormatInfoAbbreviatedMonthGenitiveNames.cs b/src/libraries/System.Globalization/tests/DateTimeFormatInfo/DateTimeFormatInfoAbbreviatedMonthGenitiveNames.cs
index 8c23d077f050..c3e2df1adbbd 100644
--- a/src/libraries/System.Globalization/tests/DateTimeFormatInfo/DateTimeFormatInfoAbbreviatedMonthGenitiveNames.cs
+++ b/src/libraries/System.Globalization/tests/DateTimeFormatInfo/DateTimeFormatInfoAbbreviatedMonthGenitiveNames.cs
@@ -170,8 +170,11 @@ public static IEnumerable
diff --git a/src/mono/wasm/debugger/DebuggerTestSuite/MiscTests.cs b/src/mono/wasm/debugger/DebuggerTestSuite/MiscTests.cs
index 35fd867cb7ab..5a65521a1374 100644
--- a/src/mono/wasm/debugger/DebuggerTestSuite/MiscTests.cs
+++ b/src/mono/wasm/debugger/DebuggerTestSuite/MiscTests.cs
@@ -1188,5 +1188,24 @@ public async Task TestDebugUsingMultiThreadedRuntime()
Assert.Equal(locals[1]["value"]["type"], "number");
Assert.Equal(locals[1]["name"], "currentThread");
}
+
+ [Fact]
+ public async Task InspectSpanByte()
+ {
+ var expression = $"{{ invoke_static_method('[debugger-test] SpanByte:Run'); }}";
+
+ await EvaluateAndCheck(
+ "window.setTimeout(function() {" + expression + "; }, 1);",
+ "dotnet://debugger-test.dll/debugger-test.cs", 1664, 8,
+ "SpanByte.Run",
+ wait_for_event_fn: async (pause_location) =>
+ {
+ var id = pause_location["callFrames"][0]["callFrameId"].Value();
+ await EvaluateOnCallFrameAndCheck(id,
+ ("span", TObject("System.Span", null))
+ );
+ }
+ );
+ }
}
}
diff --git a/src/mono/wasm/debugger/tests/debugger-test/debugger-test.cs b/src/mono/wasm/debugger/tests/debugger-test/debugger-test.cs
index 4a43032a4b39..b69baf08552d 100644
--- a/src/mono/wasm/debugger/tests/debugger-test/debugger-test.cs
+++ b/src/mono/wasm/debugger/tests/debugger-test/debugger-test.cs
@@ -1656,3 +1656,12 @@ public void CallMethod()
public int myField;
}
+
+public class SpanByte
+{
+ public static void Run()
+ {
+ System.Span span = new ();
+ System.Diagnostics.Debugger.Break();
+ }
+}
\ No newline at end of file
diff --git a/src/mono/wasm/runtime/exports-internal.ts b/src/mono/wasm/runtime/exports-internal.ts
index 826180282e3b..b009da17c6c9 100644
--- a/src/mono/wasm/runtime/exports-internal.ts
+++ b/src/mono/wasm/runtime/exports-internal.ts
@@ -8,7 +8,7 @@ import { http_wasm_supports_streaming_response, http_wasm_create_abort_controler
import { exportedRuntimeAPI, Module, runtimeHelpers } from "./globals";
import { get_property, set_property, has_property, get_typeof_property, get_global_this, dynamic_import } from "./invoke-js";
import { mono_wasm_stringify_as_error_with_stack } from "./logging";
-import { ws_wasm_create, ws_wasm_open, ws_wasm_send, ws_wasm_receive, ws_wasm_close, ws_wasm_abort } from "./web-socket";
+import { ws_wasm_create, ws_wasm_open, ws_wasm_send, ws_wasm_receive, ws_wasm_close, ws_wasm_abort, ws_get_state } from "./web-socket";
import { mono_wasm_get_loaded_files } from "./assets";
import { jiterpreter_dump_stats } from "./jiterpreter";
import { getOptions, applyOptions } from "./jiterpreter-support";
@@ -62,6 +62,7 @@ export function export_internal(): any {
ws_wasm_receive,
ws_wasm_close,
ws_wasm_abort,
+ ws_get_state,
// BrowserHttpHandler
http_wasm_supports_streaming_response,
diff --git a/src/mono/wasm/runtime/hybrid-globalization/culture-info.ts b/src/mono/wasm/runtime/hybrid-globalization/culture-info.ts
index 226cfcb54173..825ff9d709f4 100644
--- a/src/mono/wasm/runtime/hybrid-globalization/culture-info.ts
+++ b/src/mono/wasm/runtime/hybrid-globalization/culture-info.ts
@@ -47,8 +47,8 @@ export function mono_wasm_get_culture_info(culture: MonoStringRef, dst: number,
function getAmPmDesignators(locale: any)
{
- const pmTime = new Date("August 19, 1975 12:15:30"); // do not change, some PM hours result in hour digits change, e.g. 13 -> 01 or 1
- const amTime = new Date("August 19, 1975 11:15:30"); // do not change, some AM hours result in hour digits change, e.g. 9 -> 09
+ const pmTime = new Date("August 19, 1975 12:15:33"); // do not change, some PM hours result in hour digits change, e.g. 13 -> 01 or 1
+ const amTime = new Date("August 19, 1975 11:15:33"); // do not change, some AM hours result in hour digits change, e.g. 9 -> 09
const pmDesignator = getDesignator(pmTime, locale);
const amDesignator = getDesignator(amTime, locale);
return {
@@ -59,7 +59,14 @@ function getAmPmDesignators(locale: any)
function getDesignator(time: Date, locale: string)
{
- const withDesignator = time.toLocaleTimeString(locale, { hourCycle: "h12"});
+ let withDesignator = time.toLocaleTimeString(locale, { hourCycle: "h12"});
+ const localizedZero = (0).toLocaleString(locale);
+ if (withDesignator.includes(localizedZero))
+ {
+ // in v8>=11.8 "12" changes to "0" for ja-JP
+ const localizedTwelve = (12).toLocaleString(locale);
+ withDesignator = withDesignator.replace(localizedZero, localizedTwelve);
+ }
const withoutDesignator = time.toLocaleTimeString(locale, { hourCycle: "h24"});
const designator = withDesignator.replace(withoutDesignator, "").trim();
if (new RegExp("[0-9]$").test(designator)){
diff --git a/src/mono/wasm/runtime/loader/assets.ts b/src/mono/wasm/runtime/loader/assets.ts
index 8730988193ff..5e9eb8a8d3b7 100644
--- a/src/mono/wasm/runtime/loader/assets.ts
+++ b/src/mono/wasm/runtime/loader/assets.ts
@@ -12,6 +12,7 @@ import { mono_exit } from "./exit";
import { addCachedReponse, findCachedResponse } from "./assetsCache";
import { getIcuResourceName } from "./icu";
import { makeURLAbsoluteWithApplicationBase } from "./polyfills";
+import { mono_log_info } from "./logging";
let throttlingPromise: PromiseAndController | undefined;
@@ -545,7 +546,7 @@ async function start_asset_download_sources(asset: AssetEntryInternal): Promise<
err.status = response.status;
throw err;
} else {
- loaderHelpers.out(`optional download '${response.url}' for ${asset.name} failed ${response.status} ${response.statusText}`);
+ mono_log_info(`optional download '${response.url}' for ${asset.name} failed ${response.status} ${response.statusText}`);
return undefined;
}
}
diff --git a/src/mono/wasm/runtime/loader/icu.ts b/src/mono/wasm/runtime/loader/icu.ts
index 927672f5db99..bb7213b22a88 100644
--- a/src/mono/wasm/runtime/loader/icu.ts
+++ b/src/mono/wasm/runtime/loader/icu.ts
@@ -1,6 +1,7 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+import { mono_log_error } from "./logging";
import { GlobalizationMode, MonoConfig } from "../types";
import { ENVIRONMENT_IS_WEB, loaderHelpers } from "./globals";
import { mono_log_info, mono_log_debug } from "./logging";
@@ -18,7 +19,7 @@ export function init_globalization() {
loaderHelpers.preferredIcuAsset = null;
} else {
const msg = "invariant globalization mode is inactive and no ICU data archives are available";
- loaderHelpers.err(`ERROR: ${msg}`);
+ mono_log_error(`ERROR: ${msg}`);
throw new Error(msg);
}
}
diff --git a/src/mono/wasm/runtime/loader/run.ts b/src/mono/wasm/runtime/loader/run.ts
index b3437a5a81d6..112d66edab94 100644
--- a/src/mono/wasm/runtime/loader/run.ts
+++ b/src/mono/wasm/runtime/loader/run.ts
@@ -458,7 +458,7 @@ async function initializeModules(es6Modules: [RuntimeModuleExportsInternal, Nati
const { default: emscriptenFactory } = es6Modules[1];
setRuntimeGlobals(globalObjectsRoot);
initializeExports(globalObjectsRoot);
- await configureRuntimeStartup();
+ await configureRuntimeStartup(globalObjectsRoot.module);
loaderHelpers.runtimeModuleLoaded.promise_control.resolve();
emscriptenFactory((originalModule: EmscriptenModuleInternal) => {
diff --git a/src/mono/wasm/runtime/startup.ts b/src/mono/wasm/runtime/startup.ts
index c385844dfd30..324bdc78b043 100644
--- a/src/mono/wasm/runtime/startup.ts
+++ b/src/mono/wasm/runtime/startup.ts
@@ -39,7 +39,23 @@ import { assertNoProxies } from "./gc-handles";
// default size if MonoConfig.pthreadPoolSize is undefined
const MONO_PTHREAD_POOL_SIZE = 4;
-export async function configureRuntimeStartup(): Promise {
+export async function configureRuntimeStartup (module: DotnetModuleInternal): Promise {
+ if (!module.out) {
+ // eslint-disable-next-line no-console
+ module.out = console.log.bind(console);
+ }
+ if (!module.err) {
+ // eslint-disable-next-line no-console
+ module.err = console.error.bind(console);
+ }
+ if (!module.print) {
+ module.print = module.out;
+ }
+ if (!module.printErr) {
+ module.printErr = module.err;
+ }
+ loaderHelpers.out = module.print;
+ loaderHelpers.err = module.printErr;
await init_polyfills_async();
await checkMemorySnapshotSize();
}
@@ -54,17 +70,6 @@ export function configureEmscriptenStartup(module: DotnetModuleInternal): void {
module.locateFile = module.__locateFile = (path) => loaderHelpers.scriptDirectory + path;
}
- if (!module.out) {
- // eslint-disable-next-line no-console
- module.out = console.log.bind(console);
- }
-
- if (!module.err) {
- // eslint-disable-next-line no-console
- module.err = console.error.bind(console);
- }
- loaderHelpers.out = module.out;
- loaderHelpers.err = module.err;
module.mainScriptUrlOrBlob = loaderHelpers.scriptUrl;// this is needed by worker threads
// these all could be overridden on DotnetModuleConfig, we are chaing them to async below, as opposed to emscripten
diff --git a/src/mono/wasm/runtime/types/internal.ts b/src/mono/wasm/runtime/types/internal.ts
index a91b21973c8d..c43443a48da6 100644
--- a/src/mono/wasm/runtime/types/internal.ts
+++ b/src/mono/wasm/runtime/types/internal.ts
@@ -464,6 +464,8 @@ export declare interface EmscriptenModuleInternal {
runtimeKeepalivePush(): void;
runtimeKeepalivePop(): void;
maybeExit(): void;
+ print(message: string): void;
+ printErr(message: string): void;
}
/// A PromiseController encapsulates a Promise together with easy access to its resolve and reject functions.
@@ -492,7 +494,7 @@ export type setGlobalObjectsType = (globalObjects: GlobalObjects) => void;
export type initializeExportsType = (globalObjects: GlobalObjects) => RuntimeAPI;
export type initializeReplacementsType = (replacements: EmscriptenReplacements) => void;
export type configureEmscriptenStartupType = (module: DotnetModuleInternal) => void;
-export type configureRuntimeStartupType = () => Promise;
+export type configureRuntimeStartupType = (module: DotnetModuleInternal) => Promise;
export type configureWorkerStartupType = (module: DotnetModuleInternal) => Promise
@@ -508,4 +510,4 @@ export type RuntimeModuleExportsInternal = {
export type NativeModuleExportsInternal = {
default: (unificator: Function) => EmscriptenModuleInternal
-}
\ No newline at end of file
+}
diff --git a/src/mono/wasm/runtime/web-socket.ts b/src/mono/wasm/runtime/web-socket.ts
index 5cf400aca536..0f99b7e2eaf2 100644
--- a/src/mono/wasm/runtime/web-socket.ts
+++ b/src/mono/wasm/runtime/web-socket.ts
@@ -43,6 +43,17 @@ function verifyEnvironment() {
}
}
+export function ws_get_state(ws: WebSocketExtension) : number
+{
+ if (ws.readyState != WebSocket.CLOSED)
+ return ws.readyState ?? -1;
+ const receive_event_queue = ws[wasm_ws_pending_receive_event_queue];
+ const queued_events_count = receive_event_queue.getLength();
+ if (queued_events_count == 0)
+ return ws.readyState ?? -1;
+ return WebSocket.OPEN;
+}
+
export function ws_wasm_create(uri: string, sub_protocols: string[] | null, receive_status_ptr: VoidPtr, onClosed: (code: number, reason: string) => void): WebSocketExtension {
verifyEnvironment();
mono_assert(uri && typeof uri === "string", () => `ERR12: Invalid uri ${typeof uri}`);
@@ -175,8 +186,7 @@ export function ws_wasm_receive(ws: WebSocketExtension, buffer_ptr: VoidPtr, buf
return null;
}
- const readyState = ws.readyState;
- if (readyState == WebSocket.CLOSED) {
+ if (ws[wasm_ws_close_received]) {
const receive_status_ptr = ws[wasm_ws_receive_status_ptr];
setI32(receive_status_ptr, 0); // count
setI32(receive_status_ptr + 4, 2); // type:close
diff --git a/src/mono/wasm/testassets/WasmBasicTestApp/App/Common/Program.cs b/src/mono/wasm/testassets/WasmBasicTestApp/App/Common/Program.cs
index 4f38c02031c7..eda19b81f506 100644
--- a/src/mono/wasm/testassets/WasmBasicTestApp/App/Common/Program.cs
+++ b/src/mono/wasm/testassets/WasmBasicTestApp/App/Common/Program.cs
@@ -2,3 +2,4 @@
// The .NET Foundation licenses this file to you under the MIT license.
System.Console.WriteLine("WasmBasicTestApp");
+System.Console.Error.WriteLine("WasmBasicTestApp stderr");
diff --git a/src/mono/wasm/testassets/WasmBasicTestApp/App/wwwroot/main.js b/src/mono/wasm/testassets/WasmBasicTestApp/App/wwwroot/main.js
index 076f37a62a6d..8e3485bc2e67 100644
--- a/src/mono/wasm/testassets/WasmBasicTestApp/App/wwwroot/main.js
+++ b/src/mono/wasm/testassets/WasmBasicTestApp/App/wwwroot/main.js
@@ -63,6 +63,18 @@ switch (testCase) {
}
});
break;
+ case "OutErrOverrideWorks":
+ dotnet.withModuleConfig({
+ out: (message) => {
+ console.log("Emscripten out override works!");
+ console.log(message)
+ },
+ err: (message) => {
+ console.error("Emscripten err override works!");
+ console.error(message)
+ },
+ });
+ break;
}
const { getAssemblyExports, getConfig, INTERNAL } = await dotnet.create();
@@ -94,6 +106,9 @@ try {
case "DownloadResourceProgressTest":
exit(0);
break;
+ case "OutErrOverrideWorks":
+ dotnet.run();
+ break;
default:
console.error(`Unknown test case: ${testCase}`);
exit(3);
diff --git a/src/mono/wasm/wasm.proj b/src/mono/wasm/wasm.proj
index 3b75fa9feeb1..1790445cbc67 100644
--- a/src/mono/wasm/wasm.proj
+++ b/src/mono/wasm/wasm.proj
@@ -370,7 +370,8 @@
$(CMakeConfigurationEmccFlags) -s ASSERTIONS=1
-O2
- $(CMakeConfigurationLinkFlags) -s EXPORT_ES6=1
+
+ $(CMakeConfigurationLinkFlags) -s EXPORT_ES6=1 -lexports.js
$(CMakeConfigurationLinkFlags) -msimd128
$(CMakeConfigurationLinkFlags) -Wno-pthreads-mem-growth
$(CMakeConfigurationLinkFlags) --emit-symbol-map
diff --git a/src/native/eventpipe/ds-ipc-pal-namedpipe.c b/src/native/eventpipe/ds-ipc-pal-namedpipe.c
index 01a12275a421..151b3d95cc46 100644
--- a/src/native/eventpipe/ds-ipc-pal-namedpipe.c
+++ b/src/native/eventpipe/ds-ipc-pal-namedpipe.c
@@ -173,6 +173,27 @@ ds_ipc_free (DiagnosticsIpc *ipc)
ep_rt_object_free (ipc);
}
+void
+ds_ipc_reset (DiagnosticsIpc *ipc)
+{
+ if (!ipc)
+ return;
+
+ if (ipc->pipe != INVALID_HANDLE_VALUE) {
+ DisconnectNamedPipe (ipc->pipe);
+ CloseHandle (ipc->pipe);
+ ipc->pipe = INVALID_HANDLE_VALUE;
+ }
+
+ if (ipc->overlap.hEvent != INVALID_HANDLE_VALUE) {
+ CloseHandle (ipc->overlap.hEvent);
+ }
+
+ memset(&ipc->overlap, 0, sizeof(OVERLAPPED)); // clear the overlapped objects state
+ ipc->overlap.hEvent = INVALID_HANDLE_VALUE;
+ ipc->is_listening = false;
+}
+
int32_t
ds_ipc_poll (
DiagnosticsIpcPollHandle *poll_handles_data,
@@ -192,6 +213,10 @@ ds_ipc_poll (
// SERVER
EP_ASSERT (poll_handles_data [i].ipc->mode == DS_IPC_CONNECTION_MODE_LISTEN);
handles [i] = poll_handles_data [i].ipc->overlap.hEvent;
+ if (handles [i] == INVALID_HANDLE_VALUE) {
+ // Invalid handle, wait will fail. Signal error
+ poll_handles_data [i].events = DS_IPC_POLL_EVENTS_ERR;
+ }
} else {
// CLIENT
bool success = true;
diff --git a/src/native/eventpipe/ds-ipc-pal-socket.c b/src/native/eventpipe/ds-ipc-pal-socket.c
index d93233c506b7..7ad0b0f5d485 100644
--- a/src/native/eventpipe/ds-ipc-pal-socket.c
+++ b/src/native/eventpipe/ds-ipc-pal-socket.c
@@ -1064,6 +1064,11 @@ ds_ipc_free (DiagnosticsIpc *ipc)
ep_rt_object_free (ipc);
}
+void
+ds_ipc_reset (DiagnosticsIpc *ipc)
+{
+}
+
int32_t
ds_ipc_poll (
DiagnosticsIpcPollHandle *poll_handles_data,
diff --git a/src/native/eventpipe/ds-ipc-pal.h b/src/native/eventpipe/ds-ipc-pal.h
index 98e0fba180e6..8e246ed67195 100644
--- a/src/native/eventpipe/ds-ipc-pal.h
+++ b/src/native/eventpipe/ds-ipc-pal.h
@@ -35,6 +35,9 @@ ds_ipc_alloc (
void
ds_ipc_free (DiagnosticsIpc *ipc);
+void
+ds_ipc_reset (DiagnosticsIpc *ipc);
+
// Poll
// Parameters:
// - IpcPollHandle * poll_handles_data: Array of IpcPollHandles to poll
diff --git a/src/native/eventpipe/ds-ipc.c b/src/native/eventpipe/ds-ipc.c
index 1256e3b00333..d3b7292514dd 100644
--- a/src/native/eventpipe/ds-ipc.c
+++ b/src/native/eventpipe/ds-ipc.c
@@ -839,7 +839,11 @@ listen_port_reset (
ds_ipc_error_callback_func callback)
{
EP_ASSERT (object != NULL);
- return;
+#ifdef _WIN32
+ DiagnosticsListenPort *listen_port = (DiagnosticsListenPort *)object;
+ ds_ipc_reset (listen_port->port.ipc);
+ ds_ipc_listen (listen_port->port.ipc, callback);
+#endif // _WIN32
}
static DiagnosticsPortVtable listen_port_vtable = {
diff --git a/src/native/external/cgmanifest.json b/src/native/external/cgmanifest.json
index 4f6264b3c7ab..e45d9d900296 100644
--- a/src/native/external/cgmanifest.json
+++ b/src/native/external/cgmanifest.json
@@ -46,7 +46,7 @@
"Type": "git",
"Git": {
"RepositoryUrl": "https://github.com/madler/zlib",
- "CommitHash": "04f42ceca40f73e2978b50e93806c2a18c1281fc"
+ "CommitHash": "51b7f2abdade71cd9bb0e7a373ef2610ec6f9daf"
}
},
"DevelopmentDependency": false
diff --git a/src/native/external/zlib-version.txt b/src/native/external/zlib-version.txt
index fcac66cc4645..5da9869df33f 100644
--- a/src/native/external/zlib-version.txt
+++ b/src/native/external/zlib-version.txt
@@ -1,17 +1,9 @@
-v1.2.13
-(04f42ceca40f73e2978b50e93806c2a18c1281fc)
+v1.3.1
+(51b7f2abdade71cd9bb0e7a373ef2610ec6f9daf)
-https://github.com/madler/zlib/releases/tag/v1.2.13
+https://github.com/madler/zlib/releases/tag/v1.3.1
We have removed zlib.3.pdf from our local copy, as it is a binary file which is
not needed for our compilation.
-We have also cherry-picked into our local copy:
-
-- https://github.com/madler/zlib/commit/e554695638228b846d49657f31eeff0ca4680e8a
-
- This patch only affects memLevel 9 compression. .NET doesn't currently use this
- memLevel, but we'll take this patch out of an abundance of caution just in case
- we enable this functionality in a future release.
-
We have also applied the custom patches under the patches/zlib folder.
diff --git a/src/native/external/zlib/CMakeLists.txt b/src/native/external/zlib/CMakeLists.txt
index b412dc7feb73..15ceebe787e7 100644
--- a/src/native/external/zlib/CMakeLists.txt
+++ b/src/native/external/zlib/CMakeLists.txt
@@ -1,9 +1,11 @@
-cmake_minimum_required(VERSION 2.4.4)
+cmake_minimum_required(VERSION 2.4.4...3.15.0)
set(CMAKE_ALLOW_LOOSE_LOOP_CONSTRUCTS ON)
project(zlib C)
-set(VERSION "1.2.13")
+set(VERSION "1.3.1")
+
+option(ZLIB_BUILD_EXAMPLES "Enable Zlib Examples" ON)
set(INSTALL_BIN_DIR "${CMAKE_INSTALL_PREFIX}/bin" CACHE PATH "Installation directory for executables")
set(INSTALL_LIB_DIR "${CMAKE_INSTALL_PREFIX}/lib" CACHE PATH "Installation directory for libraries")
@@ -148,7 +150,9 @@ if(MINGW)
endif(MINGW)
add_library(zlib SHARED ${ZLIB_SRCS} ${ZLIB_DLL_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS})
+target_include_directories(zlib PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR})
add_library(zlibstatic STATIC ${ZLIB_SRCS} ${ZLIB_PUBLIC_HDRS} ${ZLIB_PRIVATE_HDRS})
+target_include_directories(zlibstatic PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${CMAKE_CURRENT_SOURCE_DIR})
set_target_properties(zlib PROPERTIES DEFINE_SYMBOL ZLIB_DLL)
set_target_properties(zlib PROPERTIES SOVERSION 1)
@@ -166,7 +170,7 @@ endif()
if(UNIX)
# On unix-like platforms the library is almost always called libz
set_target_properties(zlib zlibstatic PROPERTIES OUTPUT_NAME z)
- if(NOT APPLE)
+ if(NOT APPLE AND NOT(CMAKE_SYSTEM_NAME STREQUAL AIX))
set_target_properties(zlib PROPERTIES LINK_FLAGS "-Wl,--version-script,\"${CMAKE_CURRENT_SOURCE_DIR}/zlib.map\"")
endif()
elseif(BUILD_SHARED_LIBS AND WIN32)
@@ -193,21 +197,22 @@ endif()
#============================================================================
# Example binaries
#============================================================================
-
-add_executable(example test/example.c)
-target_link_libraries(example zlib)
-add_test(example example)
-
-add_executable(minigzip test/minigzip.c)
-target_link_libraries(minigzip zlib)
-
-if(HAVE_OFF64_T)
- add_executable(example64 test/example.c)
- target_link_libraries(example64 zlib)
- set_target_properties(example64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64")
- add_test(example64 example64)
-
- add_executable(minigzip64 test/minigzip.c)
- target_link_libraries(minigzip64 zlib)
- set_target_properties(minigzip64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64")
+if(ZLIB_BUILD_EXAMPLES)
+ add_executable(example test/example.c)
+ target_link_libraries(example zlib)
+ add_test(example example)
+
+ add_executable(minigzip test/minigzip.c)
+ target_link_libraries(minigzip zlib)
+
+ if(HAVE_OFF64_T)
+ add_executable(example64 test/example.c)
+ target_link_libraries(example64 zlib)
+ set_target_properties(example64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64")
+ add_test(example64 example64)
+
+ add_executable(minigzip64 test/minigzip.c)
+ target_link_libraries(minigzip64 zlib)
+ set_target_properties(minigzip64 PROPERTIES COMPILE_FLAGS "-D_FILE_OFFSET_BITS=64")
+ endif()
endif()
diff --git a/src/native/external/zlib/ChangeLog b/src/native/external/zlib/ChangeLog
index 457526bc6a51..b801a1031ec0 100644
--- a/src/native/external/zlib/ChangeLog
+++ b/src/native/external/zlib/ChangeLog
@@ -1,6 +1,34 @@
ChangeLog file for zlib
+Changes in 1.3.1 (22 Jan 2024)
+- Reject overflows of zip header fields in minizip
+- Fix bug in inflateSync() for data held in bit buffer
+- Add LIT_MEM define to use more memory for a small deflate speedup
+- Fix decision on the emission of Zip64 end records in minizip
+- Add bounds checking to ERR_MSG() macro, used by zError()
+- Neutralize zip file traversal attacks in miniunz
+- Fix a bug in ZLIB_DEBUG compiles in check_match()
+- Various portability and appearance improvements
+
+Changes in 1.3 (18 Aug 2023)
+- Remove K&R function definitions and zlib2ansi
+- Fix bug in deflateBound() for level 0 and memLevel 9
+- Fix bug when gzungetc() is used immediately after gzopen()
+- Fix bug when using gzflush() with a very small buffer
+- Fix crash when gzsetparams() attempted for transparent write
+- Fix test/example.c to work with FORCE_STORED
+- Rewrite of zran in examples (see zran.c version history)
+- Fix minizip to allow it to open an empty zip file
+- Fix reading disk number start on zip64 files in minizip
+- Fix logic error in minizip argument processing
+- Add minizip testing to Makefile
+- Read multiple bytes instead of byte-by-byte in minizip unzip.c
+- Add memory sanitizer to configure (--memory)
+- Various portability improvements
+- Various documentation improvements
+- Various spelling and typo corrections
+
Changes in 1.2.13 (13 Oct 2022)
- Fix configure issue that discarded provided CC definition
- Correct incorrect inputs provided to the CRC functions
@@ -1445,7 +1473,7 @@ Changes in 0.99 (27 Jan 96)
- fix typo in Make_vms.com (f$trnlnm -> f$getsyi)
- in fcalloc, normalize pointer if size > 65520 bytes
- don't use special fcalloc for 32 bit Borland C++
-- use STDC instead of __GO32__ to avoid redeclaring exit, calloc, etc...
+- use STDC instead of __GO32__ to avoid redeclaring exit, calloc, etc.
- use Z_BINARY instead of BINARY
- document that gzclose after gzdopen will close the file
- allow "a" as mode in gzopen
diff --git a/src/native/external/zlib/FAQ b/src/native/external/zlib/FAQ
index 99b7cf92e454..92f5d3e29fab 100644
--- a/src/native/external/zlib/FAQ
+++ b/src/native/external/zlib/FAQ
@@ -4,7 +4,7 @@
If your question is not there, please check the zlib home page
http://zlib.net/ which may have more recent information.
-The lastest zlib FAQ is at http://zlib.net/zlib_faq.html
+The latest zlib FAQ is at http://zlib.net/zlib_faq.html
1. Is zlib Y2K-compliant?
@@ -14,8 +14,7 @@ The lastest zlib FAQ is at http://zlib.net/zlib_faq.html
2. Where can I get a Windows DLL version?
The zlib sources can be compiled without change to produce a DLL. See the
- file win32/DLL_FAQ.txt in the zlib distribution. Pointers to the
- precompiled DLL are found in the zlib web site at http://zlib.net/ .
+ file win32/DLL_FAQ.txt in the zlib distribution.
3. Where can I get a Visual Basic interface to zlib?
diff --git a/src/native/external/zlib/Makefile.in b/src/native/external/zlib/Makefile.in
index 7d2713f4c574..cb8b00a9b078 100644
--- a/src/native/external/zlib/Makefile.in
+++ b/src/native/external/zlib/Makefile.in
@@ -1,5 +1,5 @@
# Makefile for zlib
-# Copyright (C) 1995-2017 Jean-loup Gailly, Mark Adler
+# Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler
# For conditions of distribution and use, see copyright notice in zlib.h
# To compile and test, type:
@@ -22,13 +22,13 @@ CFLAGS=-O
SFLAGS=-O
LDFLAGS=
-TEST_LDFLAGS=$(LDFLAGS) -L. libz.a
+TEST_LIBS=-L. libz.a
LDSHARED=$(CC)
CPP=$(CC) -E
STATICLIB=libz.a
SHAREDLIB=libz.so
-SHAREDLIBV=libz.so.1.2.13
+SHAREDLIBV=libz.so.1.3.1
SHAREDLIBM=libz.so.1
LIBS=$(STATICLIB) $(SHAREDLIBV)
@@ -282,10 +282,10 @@ placebo $(SHAREDLIBV): $(PIC_OBJS) libz.a
-@rmdir objs
example$(EXE): example.o $(STATICLIB)
- $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS)
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ example.o $(TEST_LIBS)
minigzip$(EXE): minigzip.o $(STATICLIB)
- $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS)
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ minigzip.o $(TEST_LIBS)
examplesh$(EXE): example.o $(SHAREDLIBV)
$(CC) $(CFLAGS) -o $@ example.o $(LDFLAGS) -L. $(SHAREDLIBV)
@@ -294,10 +294,10 @@ minigzipsh$(EXE): minigzip.o $(SHAREDLIBV)
$(CC) $(CFLAGS) -o $@ minigzip.o $(LDFLAGS) -L. $(SHAREDLIBV)
example64$(EXE): example64.o $(STATICLIB)
- $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS)
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ example64.o $(TEST_LIBS)
minigzip64$(EXE): minigzip64.o $(STATICLIB)
- $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS)
+ $(CC) $(CFLAGS) $(LDFLAGS) -o $@ minigzip64.o $(TEST_LIBS)
install-libs: $(LIBS)
-@if [ ! -d $(DESTDIR)$(exec_prefix) ]; then mkdir -p $(DESTDIR)$(exec_prefix); fi
@@ -359,8 +359,14 @@ zconf.h.cmakein: $(SRCDIR)zconf.h.in
zconf: $(SRCDIR)zconf.h.in
cp -p $(SRCDIR)zconf.h.in zconf.h
+minizip-test: static
+ cd contrib/minizip && { CC="$(CC)" CFLAGS="$(CFLAGS)" $(MAKE) test ; cd ../.. ; }
+
+minizip-clean:
+ cd contrib/minizip && { $(MAKE) clean ; cd ../.. ; }
+
mostlyclean: clean
-clean:
+clean: minizip-clean
rm -f *.o *.lo *~ \
example$(EXE) minigzip$(EXE) examplesh$(EXE) minigzipsh$(EXE) \
example64$(EXE) minigzip64$(EXE) \
diff --git a/src/native/external/zlib/README b/src/native/external/zlib/README
index ba34d1894a9b..c5f917540b6f 100644
--- a/src/native/external/zlib/README
+++ b/src/native/external/zlib/README
@@ -1,6 +1,6 @@
ZLIB DATA COMPRESSION LIBRARY
-zlib 1.2.13 is a general purpose data compression library. All the code is
+zlib 1.3.1 is a general purpose data compression library. All the code is
thread safe. The data format used by the zlib library is described by RFCs
(Request for Comments) 1950 to 1952 in the files
http://tools.ietf.org/html/rfc1950 (zlib format), rfc1951 (deflate format) and
@@ -29,18 +29,17 @@ PLEASE read the zlib FAQ http://zlib.net/zlib_faq.html before asking for help.
Mark Nelson wrote an article about zlib for the Jan. 1997
issue of Dr. Dobb's Journal; a copy of the article is available at
-http://marknelson.us/1997/01/01/zlib-engine/ .
+https://marknelson.us/posts/1997/01/01/zlib-engine.html .
-The changes made in version 1.2.13 are documented in the file ChangeLog.
+The changes made in version 1.3.1 are documented in the file ChangeLog.
Unsupported third party contributions are provided in directory contrib/ .
-zlib is available in Java using the java.util.zip package, documented at
-http://java.sun.com/developer/technicalArticles/Programming/compression/ .
+zlib is available in Java using the java.util.zip package. Follow the API
+Documentation link at: https://docs.oracle.com/search/?q=java.util.zip .
-A Perl interface to zlib written by Paul Marquess is available
-at CPAN (Comprehensive Perl Archive Network) sites, including
-http://search.cpan.org/~pmqs/IO-Compress-Zlib/ .
+A Perl interface to zlib and bzip2 written by Paul Marquess
+can be found at https://github.com/pmqs/IO-Compress .
A Python interface to zlib written by A.M. Kuchling is
available in Python 1.5 and later versions, see
@@ -64,7 +63,7 @@ Notes for some targets:
- zlib doesn't work with gcc 2.6.3 on a DEC 3000/300LX under OSF/1 2.1 it works
when compiled with cc.
-- On Digital Unix 4.0D (formely OSF/1) on AlphaServer, the cc option -std1 is
+- On Digital Unix 4.0D (formerly OSF/1) on AlphaServer, the cc option -std1 is
necessary to get gzprintf working correctly. This is done by configure.
- zlib doesn't work on HP-UX 9.05 with some versions of /bin/cc. It works with
@@ -84,7 +83,7 @@ Acknowledgments:
Copyright notice:
- (C) 1995-2022 Jean-loup Gailly and Mark Adler
+ (C) 1995-2024 Jean-loup Gailly and Mark Adler
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
diff --git a/src/native/external/zlib/adler32.c b/src/native/external/zlib/adler32.c
index d0be4380a39c..04b81d29bad1 100644
--- a/src/native/external/zlib/adler32.c
+++ b/src/native/external/zlib/adler32.c
@@ -7,8 +7,6 @@
#include "zutil.h"
-local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
-
#define BASE 65521U /* largest prime smaller than 65536 */
#define NMAX 5552
/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
@@ -60,11 +58,7 @@ local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
#endif
/* ========================================================================= */
-uLong ZEXPORT adler32_z(adler, buf, len)
- uLong adler;
- const Bytef *buf;
- z_size_t len;
-{
+uLong ZEXPORT adler32_z(uLong adler, const Bytef *buf, z_size_t len) {
unsigned long sum2;
unsigned n;
@@ -131,20 +125,12 @@ uLong ZEXPORT adler32_z(adler, buf, len)
}
/* ========================================================================= */
-uLong ZEXPORT adler32(adler, buf, len)
- uLong adler;
- const Bytef *buf;
- uInt len;
-{
+uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len) {
return adler32_z(adler, buf, len);
}
/* ========================================================================= */
-local uLong adler32_combine_(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off64_t len2;
-{
+local uLong adler32_combine_(uLong adler1, uLong adler2, z_off64_t len2) {
unsigned long sum1;
unsigned long sum2;
unsigned rem;
@@ -169,18 +155,10 @@ local uLong adler32_combine_(adler1, adler2, len2)
}
/* ========================================================================= */
-uLong ZEXPORT adler32_combine(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off_t len2;
-{
+uLong ZEXPORT adler32_combine(uLong adler1, uLong adler2, z_off_t len2) {
return adler32_combine_(adler1, adler2, len2);
}
-uLong ZEXPORT adler32_combine64(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off64_t len2;
-{
+uLong ZEXPORT adler32_combine64(uLong adler1, uLong adler2, z_off64_t len2) {
return adler32_combine_(adler1, adler2, len2);
}
diff --git a/src/native/external/zlib/compress.c b/src/native/external/zlib/compress.c
index 2ad5326c14ec..f43bacf7ab97 100644
--- a/src/native/external/zlib/compress.c
+++ b/src/native/external/zlib/compress.c
@@ -19,13 +19,8 @@
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
Z_STREAM_ERROR if the level parameter is invalid.
*/
-int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
- int level;
-{
+int ZEXPORT compress2(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen, int level) {
z_stream stream;
int err;
const uInt max = (uInt)-1;
@@ -65,12 +60,8 @@ int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
/* ===========================================================================
*/
-int ZEXPORT compress(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
-{
+int ZEXPORT compress(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen) {
return compress2(dest, destLen, source, sourceLen, Z_DEFAULT_COMPRESSION);
}
@@ -78,9 +69,7 @@ int ZEXPORT compress(dest, destLen, source, sourceLen)
If the default memLevel or windowBits for deflateInit() is changed, then
this function needs to be updated.
*/
-uLong ZEXPORT compressBound(sourceLen)
- uLong sourceLen;
-{
+uLong ZEXPORT compressBound(uLong sourceLen) {
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
(sourceLen >> 25) + 13;
}
diff --git a/src/native/external/zlib/configure b/src/native/external/zlib/configure
index fa4d5daaba99..c55098afc4ae 100644
--- a/src/native/external/zlib/configure
+++ b/src/native/external/zlib/configure
@@ -25,7 +25,7 @@ if test $SRCDIR = "."; then
ZINCOUT="-I."
SRCDIR=""
else
- ZINC='-include zconf.h'
+ ZINC='-I. -include zconf.h'
ZINCOUT='-I. -I$(SRCDIR)'
SRCDIR="$SRCDIR/"
fi
@@ -44,9 +44,8 @@ STATICLIB=libz.a
# extract zlib version numbers from zlib.h
VER=`sed -n -e '/VERSION "/s/.*"\(.*\)".*/\1/p' < ${SRCDIR}zlib.h`
-VER3=`sed -n -e '/VERSION "/s/.*"\([0-9]*\\.[0-9]*\\.[0-9]*\).*/\1/p' < ${SRCDIR}zlib.h`
-VER2=`sed -n -e '/VERSION "/s/.*"\([0-9]*\\.[0-9]*\)\\..*/\1/p' < ${SRCDIR}zlib.h`
-VER1=`sed -n -e '/VERSION "/s/.*"\([0-9]*\)\\..*/\1/p' < ${SRCDIR}zlib.h`
+VER3=`echo ${VER}|sed -n -e 's/\([0-9]\{1,\}\(\\.[0-9]\{1,\}\)\{1,2\}\).*/\1/p'`
+VER1=`echo ${VER}|sed -n -e 's/\([0-9]\{1,\}\)\\..*/\1/p'`
# establish commands for library building
if "${CROSS_PREFIX}ar" --version >/dev/null 2>/dev/null || test $? -lt 126; then
@@ -90,7 +89,8 @@ build64=0
gcc=0
warn=0
debug=0
-sanitize=0
+address=0
+memory=0
old_cc="$CC"
old_cflags="$CFLAGS"
OBJC='$(OBJZ) $(OBJG)'
@@ -102,7 +102,7 @@ leave()
if test "$*" != "0"; then
echo "** $0 aborting." | tee -a configure.log
fi
- rm -f $test.[co] $test $test$shared_ext $test.gcno ./--version
+ rm -rf $test.[co] $test $test$shared_ext $test.gcno $test.dSYM ./--version
echo -------------------- >> configure.log
echo >> configure.log
echo >> configure.log
@@ -141,7 +141,9 @@ case "$1" in
-c* | --const) zconst=1; shift ;;
-w* | --warn) warn=1; shift ;;
-d* | --debug) debug=1; shift ;;
- --sanitize) sanitize=1; shift ;;
+ --sanitize) address=1; shift ;;
+ --address) address=1; shift ;;
+ --memory) memory=1; shift ;;
*)
echo "unknown option: $1" | tee -a configure.log
echo "$0 --help for help" | tee -a configure.log
@@ -211,8 +213,11 @@ if test "$gcc" -eq 1 && ($cc -c $test.c) >> configure.log 2>&1; then
CFLAGS="${CFLAGS} -Wall -Wextra"
fi
fi
- if test $sanitize -eq 1; then
- CFLAGS="${CFLAGS} -g -fsanitize=address"
+ if test $address -eq 1; then
+ CFLAGS="${CFLAGS} -g -fsanitize=address -fno-omit-frame-pointer"
+ fi
+ if test $memory -eq 1; then
+ CFLAGS="${CFLAGS} -g -fsanitize=memory -fno-omit-frame-pointer"
fi
if test $debug -eq 1; then
CFLAGS="${CFLAGS} -DZLIB_DEBUG"
@@ -260,7 +265,9 @@ if test "$gcc" -eq 1 && ($cc -c $test.c) >> configure.log 2>&1; then
SHAREDLIBV=libz.$VER$shared_ext
SHAREDLIBM=libz.$VER1$shared_ext
LDSHARED=${LDSHARED-"$cc -dynamiclib -install_name $libdir/$SHAREDLIBM -compatibility_version $VER1 -current_version $VER3"}
- if libtool -V 2>&1 | grep Apple > /dev/null; then
+ if "${CROSS_PREFIX}libtool" -V 2>&1 | grep Apple > /dev/null; then
+ AR="${CROSS_PREFIX}libtool"
+ elif libtool -V 2>&1 | grep Apple > /dev/null; then
AR="libtool"
else
AR="/usr/bin/libtool"
@@ -435,7 +442,7 @@ EOF
if test $shared -eq 1; then
echo Checking for shared library support... | tee -a configure.log
# we must test in two steps (cc then ld), required at least on SunOS 4.x
- if try $CC -w -c $SFLAGS $test.c &&
+ if try $CC -c $SFLAGS $test.c &&
try $LDSHARED $SFLAGS -o $test$shared_ext $test.o; then
echo Building shared library $SHAREDLIBV with $CC. | tee -a configure.log
elif test -z "$old_cc" -a -z "$old_cflags"; then
@@ -860,7 +867,7 @@ echo prefix = $prefix >> configure.log
echo sharedlibdir = $sharedlibdir >> configure.log
echo uname = $uname >> configure.log
-# udpate Makefile with the configure results
+# update Makefile with the configure results
sed < ${SRCDIR}Makefile.in "
/^CC *=/s#=.*#=$CC#
/^CFLAGS *=/s#=.*#=$CFLAGS#
diff --git a/src/native/external/zlib/crc32.c b/src/native/external/zlib/crc32.c
index f8357b083f76..6c38f5c04c6a 100644
--- a/src/native/external/zlib/crc32.c
+++ b/src/native/external/zlib/crc32.c
@@ -103,19 +103,6 @@
# define ARMCRC32
#endif
-/* Local functions. */
-local z_crc_t multmodp OF((z_crc_t a, z_crc_t b));
-local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
-
-#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
- local z_word_t byte_swap OF((z_word_t word));
-#endif
-
-#if defined(W) && !defined(ARMCRC32)
- local z_crc_t crc_word OF((z_word_t data));
- local z_word_t crc_word_big OF((z_word_t data));
-#endif
-
#if defined(W) && (!defined(ARMCRC32) || defined(DYNAMIC_CRC_TABLE))
/*
Swap the bytes in a z_word_t to convert between little and big endian. Any
@@ -123,9 +110,7 @@ local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
instruction, if one is available. This assumes that word_t is either 32 bits
or 64 bits.
*/
-local z_word_t byte_swap(word)
- z_word_t word;
-{
+local z_word_t byte_swap(z_word_t word) {
# if W == 8
return
(word & 0xff00000000000000) >> 56 |
@@ -146,24 +131,77 @@ local z_word_t byte_swap(word)
}
#endif
+#ifdef DYNAMIC_CRC_TABLE
+/* =========================================================================
+ * Table of powers of x for combining CRC-32s, filled in by make_crc_table()
+ * below.
+ */
+ local z_crc_t FAR x2n_table[32];
+#else
+/* =========================================================================
+ * Tables for byte-wise and braided CRC-32 calculations, and a table of powers
+ * of x for combining CRC-32s, all made by make_crc_table().
+ */
+# include "crc32.h"
+#endif
+
/* CRC polynomial. */
#define POLY 0xedb88320 /* p(x) reflected, with x^32 implied */
-#ifdef DYNAMIC_CRC_TABLE
+/*
+ Return a(x) multiplied by b(x) modulo p(x), where p(x) is the CRC polynomial,
+ reflected. For speed, this requires that a not be zero.
+ */
+local z_crc_t multmodp(z_crc_t a, z_crc_t b) {
+ z_crc_t m, p;
+
+ m = (z_crc_t)1 << 31;
+ p = 0;
+ for (;;) {
+ if (a & m) {
+ p ^= b;
+ if ((a & (m - 1)) == 0)
+ break;
+ }
+ m >>= 1;
+ b = b & 1 ? (b >> 1) ^ POLY : b >> 1;
+ }
+ return p;
+}
+/*
+ Return x^(n * 2^k) modulo p(x). Requires that x2n_table[] has been
+ initialized.
+ */
+local z_crc_t x2nmodp(z_off64_t n, unsigned k) {
+ z_crc_t p;
+
+ p = (z_crc_t)1 << 31; /* x^0 == 1 */
+ while (n) {
+ if (n & 1)
+ p = multmodp(x2n_table[k & 31], p);
+ n >>= 1;
+ k++;
+ }
+ return p;
+}
+
+#ifdef DYNAMIC_CRC_TABLE
+/* =========================================================================
+ * Build the tables for byte-wise and braided CRC-32 calculations, and a table
+ * of powers of x for combining CRC-32s.
+ */
local z_crc_t FAR crc_table[256];
-local z_crc_t FAR x2n_table[32];
-local void make_crc_table OF((void));
#ifdef W
local z_word_t FAR crc_big_table[256];
local z_crc_t FAR crc_braid_table[W][256];
local z_word_t FAR crc_braid_big_table[W][256];
- local void braid OF((z_crc_t [][256], z_word_t [][256], int, int));
+ local void braid(z_crc_t [][256], z_word_t [][256], int, int);
#endif
#ifdef MAKECRCH
- local void write_table OF((FILE *, const z_crc_t FAR *, int));
- local void write_table32hi OF((FILE *, const z_word_t FAR *, int));
- local void write_table64 OF((FILE *, const z_word_t FAR *, int));
+ local void write_table(FILE *, const z_crc_t FAR *, int);
+ local void write_table32hi(FILE *, const z_word_t FAR *, int);
+ local void write_table64(FILE *, const z_word_t FAR *, int);
#endif /* MAKECRCH */
/*
@@ -176,7 +214,6 @@ local void make_crc_table OF((void));
/* Definition of once functionality. */
typedef struct once_s once_t;
-local void once OF((once_t *, void (*)(void)));
/* Check for the availability of atomics. */
#if defined(__STDC__) && __STDC_VERSION__ >= 201112L && \
@@ -196,10 +233,7 @@ struct once_s {
invoke once() at the same time. The state must be a once_t initialized with
ONCE_INIT.
*/
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
+local void once(once_t *state, void (*init)(void)) {
if (!atomic_load(&state->done)) {
if (atomic_flag_test_and_set(&state->begun))
while (!atomic_load(&state->done))
@@ -222,10 +256,7 @@ struct once_s {
/* Test and set. Alas, not atomic, but tries to minimize the period of
vulnerability. */
-local int test_and_set OF((int volatile *));
-local int test_and_set(flag)
- int volatile *flag;
-{
+local int test_and_set(int volatile *flag) {
int was;
was = *flag;
@@ -234,10 +265,7 @@ local int test_and_set(flag)
}
/* Run the provided init() function once. This is not thread-safe. */
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
+local void once(once_t *state, void (*init)(void)) {
if (!state->done) {
if (test_and_set(&state->begun))
while (!state->done)
@@ -279,8 +307,7 @@ local once_t made = ONCE_INIT;
combinations of CRC register values and incoming bytes.
*/
-local void make_crc_table()
-{
+local void make_crc_table(void) {
unsigned i, j, n;
z_crc_t p;
@@ -447,11 +474,7 @@ local void make_crc_table()
Write the 32-bit values in table[0..k-1] to out, five per line in
hexadecimal separated by commas.
*/
-local void write_table(out, table, k)
- FILE *out;
- const z_crc_t FAR *table;
- int k;
-{
+local void write_table(FILE *out, const z_crc_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -464,11 +487,7 @@ local void write_table(out, table, k)
Write the high 32-bits of each value in table[0..k-1] to out, five per line
in hexadecimal separated by commas.
*/
-local void write_table32hi(out, table, k)
-FILE *out;
-const z_word_t FAR *table;
-int k;
-{
+local void write_table32hi(FILE *out, const z_word_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -484,11 +503,7 @@ int k;
bits. If not, then the type cast and format string can be adjusted
accordingly.
*/
-local void write_table64(out, table, k)
- FILE *out;
- const z_word_t FAR *table;
- int k;
-{
+local void write_table64(FILE *out, const z_word_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -498,8 +513,7 @@ local void write_table64(out, table, k)
}
/* Actually do the deed. */
-int main()
-{
+int main(void) {
make_crc_table();
return 0;
}
@@ -511,12 +525,7 @@ int main()
Generate the little and big-endian braid tables for the given n and z_word_t
size w. Each array must have room for w blocks of 256 elements.
*/
-local void braid(ltl, big, n, w)
- z_crc_t ltl[][256];
- z_word_t big[][256];
- int n;
- int w;
-{
+local void braid(z_crc_t ltl[][256], z_word_t big[][256], int n, int w) {
int k;
z_crc_t i, p, q;
for (k = 0; k < w; k++) {
@@ -531,69 +540,13 @@ local void braid(ltl, big, n, w)
}
#endif
-#else /* !DYNAMIC_CRC_TABLE */
-/* ========================================================================
- * Tables for byte-wise and braided CRC-32 calculations, and a table of powers
- * of x for combining CRC-32s, all made by make_crc_table().
- */
-#include "crc32.h"
#endif /* DYNAMIC_CRC_TABLE */
-/* ========================================================================
- * Routines used for CRC calculation. Some are also required for the table
- * generation above.
- */
-
-/*
- Return a(x) multiplied by b(x) modulo p(x), where p(x) is the CRC polynomial,
- reflected. For speed, this requires that a not be zero.
- */
-local z_crc_t multmodp(a, b)
- z_crc_t a;
- z_crc_t b;
-{
- z_crc_t m, p;
-
- m = (z_crc_t)1 << 31;
- p = 0;
- for (;;) {
- if (a & m) {
- p ^= b;
- if ((a & (m - 1)) == 0)
- break;
- }
- m >>= 1;
- b = b & 1 ? (b >> 1) ^ POLY : b >> 1;
- }
- return p;
-}
-
-/*
- Return x^(n * 2^k) modulo p(x). Requires that x2n_table[] has been
- initialized.
- */
-local z_crc_t x2nmodp(n, k)
- z_off64_t n;
- unsigned k;
-{
- z_crc_t p;
-
- p = (z_crc_t)1 << 31; /* x^0 == 1 */
- while (n) {
- if (n & 1)
- p = multmodp(x2n_table[k & 31], p);
- n >>= 1;
- k++;
- }
- return p;
-}
-
/* =========================================================================
* This function can be used by asm versions of crc32(), and to force the
* generation of the CRC tables in a threaded application.
*/
-const z_crc_t FAR * ZEXPORT get_crc_table()
-{
+const z_crc_t FAR * ZEXPORT get_crc_table(void) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -619,11 +572,8 @@ const z_crc_t FAR * ZEXPORT get_crc_table()
#define Z_BATCH_ZEROS 0xa10d3d0c /* computed from Z_BATCH = 3990 */
#define Z_BATCH_MIN 800 /* fewest words in a final batch */
-unsigned long ZEXPORT crc32_z(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- z_size_t len;
-{
+unsigned long ZEXPORT crc32_z(unsigned long crc, const unsigned char FAR *buf,
+ z_size_t len) {
z_crc_t val;
z_word_t crc1, crc2;
const z_word_t *word;
@@ -723,18 +673,14 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
least-significant byte of the word as the first byte of data, without any pre
or post conditioning. This is used to combine the CRCs of each braid.
*/
-local z_crc_t crc_word(data)
- z_word_t data;
-{
+local z_crc_t crc_word(z_word_t data) {
int k;
for (k = 0; k < W; k++)
data = (data >> 8) ^ crc_table[data & 0xff];
return (z_crc_t)data;
}
-local z_word_t crc_word_big(data)
- z_word_t data;
-{
+local z_word_t crc_word_big(z_word_t data) {
int k;
for (k = 0; k < W; k++)
data = (data << 8) ^
@@ -745,11 +691,8 @@ local z_word_t crc_word_big(data)
#endif
/* ========================================================================= */
-unsigned long ZEXPORT crc32_z(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- z_size_t len;
-{
+unsigned long ZEXPORT crc32_z(unsigned long crc, const unsigned char FAR *buf,
+ z_size_t len) {
/* Return initial CRC, if requested. */
if (buf == Z_NULL) return 0;
@@ -781,8 +724,8 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
words = (z_word_t const *)buf;
/* Do endian check at execution time instead of compile time, since ARM
- processors can change the endianess at execution time. If the
- compiler knows what the endianess will be, it can optimize out the
+ processors can change the endianness at execution time. If the
+ compiler knows what the endianness will be, it can optimize out the
check and the unused branch. */
endian = 1;
if (*(unsigned char *)&endian) {
@@ -1069,20 +1012,13 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
#endif
/* ========================================================================= */
-unsigned long ZEXPORT crc32(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- uInt len;
-{
+unsigned long ZEXPORT crc32(unsigned long crc, const unsigned char FAR *buf,
+ uInt len) {
return crc32_z(crc, buf, len);
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
- uLong crc1;
- uLong crc2;
- z_off64_t len2;
-{
+uLong ZEXPORT crc32_combine64(uLong crc1, uLong crc2, z_off64_t len2) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -1090,18 +1026,12 @@ uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine(crc1, crc2, len2)
- uLong crc1;
- uLong crc2;
- z_off_t len2;
-{
+uLong ZEXPORT crc32_combine(uLong crc1, uLong crc2, z_off_t len2) {
return crc32_combine64(crc1, crc2, (z_off64_t)len2);
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen64(len2)
- z_off64_t len2;
-{
+uLong ZEXPORT crc32_combine_gen64(z_off64_t len2) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -1109,17 +1039,11 @@ uLong ZEXPORT crc32_combine_gen64(len2)
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen(len2)
- z_off_t len2;
-{
+uLong ZEXPORT crc32_combine_gen(z_off_t len2) {
return crc32_combine_gen64((z_off64_t)len2);
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_op(crc1, crc2, op)
- uLong crc1;
- uLong crc2;
- uLong op;
-{
+uLong ZEXPORT crc32_combine_op(uLong crc1, uLong crc2, uLong op) {
return multmodp(op, crc1) ^ (crc2 & 0xffffffff);
}
diff --git a/src/native/external/zlib/deflate.c b/src/native/external/zlib/deflate.c
index b76366397545..ca2fc59a1b54 100644
--- a/src/native/external/zlib/deflate.c
+++ b/src/native/external/zlib/deflate.c
@@ -1,5 +1,5 @@
/* deflate.c -- compress data using the deflation algorithm
- * Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
+ * Copyright (C) 1995-2024 Jean-loup Gailly and Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -52,7 +52,7 @@
#include "deflate.h"
const char deflate_copyright[] =
- " deflate 1.2.13 Copyright 1995-2022 Jean-loup Gailly and Mark Adler ";
+ " deflate 1.3.1 Copyright 1995-2024 Jean-loup Gailly and Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
@@ -60,9 +60,6 @@ const char deflate_copyright[] =
copyright string in the executable of your product.
*/
-/* ===========================================================================
- * Function prototypes.
- */
typedef enum {
need_more, /* block not completed, need more input or more output */
block_done, /* block flush performed */
@@ -70,29 +67,16 @@ typedef enum {
finish_done /* finish done, accept no more input or output */
} block_state;
-typedef block_state (*compress_func) OF((deflate_state *s, int flush));
+typedef block_state (*compress_func)(deflate_state *s, int flush);
/* Compression function. Returns the block state after the call. */
-local int deflateStateCheck OF((z_streamp strm));
-local void slide_hash OF((deflate_state *s));
-local void fill_window OF((deflate_state *s));
-local block_state deflate_stored OF((deflate_state *s, int flush));
-local block_state deflate_fast OF((deflate_state *s, int flush));
+local block_state deflate_stored(deflate_state *s, int flush);
+local block_state deflate_fast(deflate_state *s, int flush);
#ifndef FASTEST
-local block_state deflate_slow OF((deflate_state *s, int flush));
-#endif
-local block_state deflate_rle OF((deflate_state *s, int flush));
-local block_state deflate_huff OF((deflate_state *s, int flush));
-local void lm_init OF((deflate_state *s));
-local void putShortMSB OF((deflate_state *s, uInt b));
-local void flush_pending OF((z_streamp strm));
-local unsigned read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
-local uInt longest_match OF((deflate_state *s, IPos cur_match));
-
-#ifdef ZLIB_DEBUG
-local void check_match OF((deflate_state *s, IPos start, IPos match,
- int length));
+local block_state deflate_slow(deflate_state *s, int flush);
#endif
+local block_state deflate_rle(deflate_state *s, int flush);
+local block_state deflate_huff(deflate_state *s, int flush);
/* ===========================================================================
* Local data
@@ -195,9 +179,12 @@ local const config configuration_table[10] = {
* bit values at the expense of memory usage). We slide even when level == 0 to
* keep the hash table consistent if we switch back to level > 0 later.
*/
-local void slide_hash(s)
- deflate_state *s;
-{
+#if defined(__has_feature)
+# if __has_feature(memory_sanitizer)
+ __attribute__((no_sanitize("memory")))
+# endif
+#endif
+local void slide_hash(deflate_state *s) {
unsigned n, m;
Posf *p;
uInt wsize = s->w_size;
@@ -221,30 +208,177 @@ local void slide_hash(s)
#endif
}
+/* ===========================================================================
+ * Read a new buffer from the current input stream, update the adler32
+ * and total number of bytes read. All deflate() input goes through
+ * this function so some applications may wish to modify it to avoid
+ * allocating a large strm->next_in buffer and copying from it.
+ * (See also flush_pending()).
+ */
+local unsigned read_buf(z_streamp strm, Bytef *buf, unsigned size) {
+ unsigned len = strm->avail_in;
+
+ if (len > size) len = size;
+ if (len == 0) return 0;
+
+ strm->avail_in -= len;
+
+ zmemcpy(buf, strm->next_in, len);
+ if (strm->state->wrap == 1) {
+ strm->adler = adler32(strm->adler, buf, len);
+ }
+#ifdef GZIP
+ else if (strm->state->wrap == 2) {
+ strm->adler = crc32(strm->adler, buf, len);
+ }
+#endif
+ strm->next_in += len;
+ strm->total_in += len;
+
+ return len;
+}
+
+/* ===========================================================================
+ * Fill the window when the lookahead becomes insufficient.
+ * Updates strstart and lookahead.
+ *
+ * IN assertion: lookahead < MIN_LOOKAHEAD
+ * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
+ * At least one byte has been read, or avail_in == 0; reads are
+ * performed for at least two bytes (required for the zip translate_eol
+ * option -- not supported here).
+ */
+local void fill_window(deflate_state *s) {
+ unsigned n;
+ unsigned more; /* Amount of free space at the end of the window. */
+ uInt wsize = s->w_size;
+
+ Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
+
+ do {
+ more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
+
+ /* Deal with !@#$% 64K limit: */
+ if (sizeof(int) <= 2) {
+ if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
+ more = wsize;
+
+ } else if (more == (unsigned)(-1)) {
+ /* Very unlikely, but possible on 16 bit machine if
+ * strstart == 0 && lookahead == 1 (input done a byte at time)
+ */
+ more--;
+ }
+ }
+
+ /* If the window is almost full and there is insufficient lookahead,
+ * move the upper half to the lower one to make room in the upper half.
+ */
+ if (s->strstart >= wsize + MAX_DIST(s)) {
+
+ zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
+ s->match_start -= wsize;
+ s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
+ s->block_start -= (long) wsize;
+ if (s->insert > s->strstart)
+ s->insert = s->strstart;
+ slide_hash(s);
+ more += wsize;
+ }
+ if (s->strm->avail_in == 0) break;
+
+ /* If there was no sliding:
+ * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
+ * more == window_size - lookahead - strstart
+ * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
+ * => more >= window_size - 2*WSIZE + 2
+ * In the BIG_MEM or MMAP case (not yet supported),
+ * window_size == input_size + MIN_LOOKAHEAD &&
+ * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
+ * Otherwise, window_size == 2*WSIZE so more >= 2.
+ * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
+ */
+ Assert(more >= 2, "more < 2");
+
+ n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
+ s->lookahead += n;
+
+ /* Initialize the hash value now that we have some input: */
+ if (s->lookahead + s->insert >= MIN_MATCH) {
+ uInt str = s->strstart - s->insert;
+ s->ins_h = s->window[str];
+ UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
+#if MIN_MATCH != 3
+ Call UPDATE_HASH() MIN_MATCH-3 more times
+#endif
+ while (s->insert) {
+ UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
+#ifndef FASTEST
+ s->prev[str & s->w_mask] = s->head[s->ins_h];
+#endif
+ s->head[s->ins_h] = (Pos)str;
+ str++;
+ s->insert--;
+ if (s->lookahead + s->insert < MIN_MATCH)
+ break;
+ }
+ }
+ /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
+ * but this is not important since only literal bytes will be emitted.
+ */
+
+ } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
+
+ /* If the WIN_INIT bytes after the end of the current data have never been
+ * written, then zero those bytes in order to avoid memory check reports of
+ * the use of uninitialized (or uninitialised as Julian writes) bytes by
+ * the longest match routines. Update the high water mark for the next
+ * time through here. WIN_INIT is set to MAX_MATCH since the longest match
+ * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
+ */
+ if (s->high_water < s->window_size) {
+ ulg curr = s->strstart + (ulg)(s->lookahead);
+ ulg init;
+
+ if (s->high_water < curr) {
+ /* Previous high water mark below current data -- zero WIN_INIT
+ * bytes or up to end of window, whichever is less.
+ */
+ init = s->window_size - curr;
+ if (init > WIN_INIT)
+ init = WIN_INIT;
+ zmemzero(s->window + curr, (unsigned)init);
+ s->high_water = curr + init;
+ }
+ else if (s->high_water < (ulg)curr + WIN_INIT) {
+ /* High water mark at or above current data, but below current data
+ * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
+ * to end of window, whichever is less.
+ */
+ init = (ulg)curr + WIN_INIT - s->high_water;
+ if (init > s->window_size - s->high_water)
+ init = s->window_size - s->high_water;
+ zmemzero(s->window + s->high_water, (unsigned)init);
+ s->high_water += init;
+ }
+ }
+
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "not enough room for search");
+}
+
/* ========================================================================= */
-int ZEXPORT deflateInit_(strm, level, version, stream_size)
- z_streamp strm;
- int level;
- const char *version;
- int stream_size;
-{
+int ZEXPORT deflateInit_(z_streamp strm, int level, const char *version,
+ int stream_size) {
return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
Z_DEFAULT_STRATEGY, version, stream_size);
/* To do: ignore strm->next_in if we use it as window */
}
/* ========================================================================= */
-int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
- version, stream_size)
- z_streamp strm;
- int level;
- int method;
- int windowBits;
- int memLevel;
- int strategy;
- const char *version;
- int stream_size;
-{
+int ZEXPORT deflateInit2_(z_streamp strm, int level, int method,
+ int windowBits, int memLevel, int strategy,
+ const char *version, int stream_size) {
deflate_state *s;
int wrap = 1;
static const char my_version[] = ZLIB_VERSION;
@@ -359,7 +493,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
* symbols from which it is being constructed.
*/
- s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, 4);
+ s->pending_buf = (uchf *) ZALLOC(strm, s->lit_bufsize, LIT_BUFS);
s->pending_buf_size = (ulg)s->lit_bufsize * 4;
if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
@@ -369,8 +503,14 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
deflateEnd (strm);
return Z_MEM_ERROR;
}
+#ifdef LIT_MEM
+ s->d_buf = (ushf *)(s->pending_buf + (s->lit_bufsize << 1));
+ s->l_buf = s->pending_buf + (s->lit_bufsize << 2);
+ s->sym_end = s->lit_bufsize - 1;
+#else
s->sym_buf = s->pending_buf + s->lit_bufsize;
s->sym_end = (s->lit_bufsize - 1) * 3;
+#endif
/* We avoid equality with lit_bufsize*3 because of wraparound at 64K
* on 16 bit machines and because stored blocks are restricted to
* 64K-1 bytes.
@@ -386,9 +526,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
/* =========================================================================
* Check for a valid deflate stream state. Return 0 if ok, 1 if not.
*/
-local int deflateStateCheck(strm)
- z_streamp strm;
-{
+local int deflateStateCheck(z_streamp strm) {
deflate_state *s;
if (strm == Z_NULL ||
strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
@@ -409,11 +547,8 @@ local int deflateStateCheck(strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
- z_streamp strm;
- const Bytef *dictionary;
- uInt dictLength;
-{
+int ZEXPORT deflateSetDictionary(z_streamp strm, const Bytef *dictionary,
+ uInt dictLength) {
deflate_state *s;
uInt str, n;
int wrap;
@@ -478,11 +613,8 @@ int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
- z_streamp strm;
- Bytef *dictionary;
- uInt *dictLength;
-{
+int ZEXPORT deflateGetDictionary(z_streamp strm, Bytef *dictionary,
+ uInt *dictLength) {
deflate_state *s;
uInt len;
@@ -500,9 +632,7 @@ int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateResetKeep(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateResetKeep(z_streamp strm) {
deflate_state *s;
if (deflateStateCheck(strm)) {
@@ -537,10 +667,32 @@ int ZEXPORT deflateResetKeep(strm)
return Z_OK;
}
+/* ===========================================================================
+ * Initialize the "longest match" routines for a new zlib stream
+ */
+local void lm_init(deflate_state *s) {
+ s->window_size = (ulg)2L*s->w_size;
+
+ CLEAR_HASH(s);
+
+ /* Set the default configuration parameters:
+ */
+ s->max_lazy_match = configuration_table[s->level].max_lazy;
+ s->good_match = configuration_table[s->level].good_length;
+ s->nice_match = configuration_table[s->level].nice_length;
+ s->max_chain_length = configuration_table[s->level].max_chain;
+
+ s->strstart = 0;
+ s->block_start = 0L;
+ s->lookahead = 0;
+ s->insert = 0;
+ s->match_length = s->prev_length = MIN_MATCH-1;
+ s->match_available = 0;
+ s->ins_h = 0;
+}
+
/* ========================================================================= */
-int ZEXPORT deflateReset(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateReset(z_streamp strm) {
int ret;
ret = deflateResetKeep(strm);
@@ -550,10 +702,7 @@ int ZEXPORT deflateReset(strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetHeader(strm, head)
- z_streamp strm;
- gz_headerp head;
-{
+int ZEXPORT deflateSetHeader(z_streamp strm, gz_headerp head) {
if (deflateStateCheck(strm) || strm->state->wrap != 2)
return Z_STREAM_ERROR;
strm->state->gzhead = head;
@@ -561,11 +710,7 @@ int ZEXPORT deflateSetHeader(strm, head)
}
/* ========================================================================= */
-int ZEXPORT deflatePending(strm, pending, bits)
- unsigned *pending;
- int *bits;
- z_streamp strm;
-{
+int ZEXPORT deflatePending(z_streamp strm, unsigned *pending, int *bits) {
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
if (pending != Z_NULL)
*pending = strm->state->pending;
@@ -575,19 +720,21 @@ int ZEXPORT deflatePending(strm, pending, bits)
}
/* ========================================================================= */
-int ZEXPORT deflatePrime(strm, bits, value)
- z_streamp strm;
- int bits;
- int value;
-{
+int ZEXPORT deflatePrime(z_streamp strm, int bits, int value) {
deflate_state *s;
int put;
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
s = strm->state;
+#ifdef LIT_MEM
+ if (bits < 0 || bits > 16 ||
+ (uchf *)s->d_buf < s->pending_out + ((Buf_size + 7) >> 3))
+ return Z_BUF_ERROR;
+#else
if (bits < 0 || bits > 16 ||
s->sym_buf < s->pending_out + ((Buf_size + 7) >> 3))
return Z_BUF_ERROR;
+#endif
do {
put = Buf_size - s->bi_valid;
if (put > bits)
@@ -602,11 +749,7 @@ int ZEXPORT deflatePrime(strm, bits, value)
}
/* ========================================================================= */
-int ZEXPORT deflateParams(strm, level, strategy)
- z_streamp strm;
- int level;
- int strategy;
-{
+int ZEXPORT deflateParams(z_streamp strm, int level, int strategy) {
deflate_state *s;
compress_func func;
@@ -651,13 +794,8 @@ int ZEXPORT deflateParams(strm, level, strategy)
}
/* ========================================================================= */
-int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
- z_streamp strm;
- int good_length;
- int max_lazy;
- int nice_length;
- int max_chain;
-{
+int ZEXPORT deflateTune(z_streamp strm, int good_length, int max_lazy,
+ int nice_length, int max_chain) {
deflate_state *s;
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -693,10 +831,7 @@ int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
*
* Shifts are used to approximate divisions, for speed.
*/
-uLong ZEXPORT deflateBound(strm, sourceLen)
- z_streamp strm;
- uLong sourceLen;
-{
+uLong ZEXPORT deflateBound(z_streamp strm, uLong sourceLen) {
deflate_state *s;
uLong fixedlen, storelen, wraplen;
@@ -766,10 +901,7 @@ uLong ZEXPORT deflateBound(strm, sourceLen)
* IN assertion: the stream state is correct and there is enough room in
* pending_buf.
*/
-local void putShortMSB(s, b)
- deflate_state *s;
- uInt b;
-{
+local void putShortMSB(deflate_state *s, uInt b) {
put_byte(s, (Byte)(b >> 8));
put_byte(s, (Byte)(b & 0xff));
}
@@ -780,9 +912,7 @@ local void putShortMSB(s, b)
* applications may wish to modify it to avoid allocating a large
* strm->next_out buffer and copying into it. (See also read_buf()).
*/
-local void flush_pending(strm)
- z_streamp strm;
-{
+local void flush_pending(z_streamp strm) {
unsigned len;
deflate_state *s = strm->state;
@@ -813,10 +943,7 @@ local void flush_pending(strm)
} while (0)
/* ========================================================================= */
-int ZEXPORT deflate(strm, flush)
- z_streamp strm;
- int flush;
-{
+int ZEXPORT deflate(z_streamp strm, int flush) {
int old_flush; /* value of flush param for previous deflate call */
deflate_state *s;
@@ -1128,9 +1255,7 @@ int ZEXPORT deflate(strm, flush)
}
/* ========================================================================= */
-int ZEXPORT deflateEnd(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateEnd(z_streamp strm) {
int status;
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1154,11 +1279,10 @@ int ZEXPORT deflateEnd(strm)
* To simplify the source, this is not supported for 16-bit MSDOS (which
* doesn't have enough memory anyway to duplicate compression states).
*/
-int ZEXPORT deflateCopy(dest, source)
- z_streamp dest;
- z_streamp source;
-{
+int ZEXPORT deflateCopy(z_streamp dest, z_streamp source) {
#ifdef MAXSEG_64K
+ (void)dest;
+ (void)source;
return Z_STREAM_ERROR;
#else
deflate_state *ds;
@@ -1182,7 +1306,7 @@ int ZEXPORT deflateCopy(dest, source)
ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos));
ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos));
- ds->pending_buf = (uchf *) ZALLOC(dest, ds->lit_bufsize, 4);
+ ds->pending_buf = (uchf *) ZALLOC(dest, ds->lit_bufsize, LIT_BUFS);
if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
ds->pending_buf == Z_NULL) {
@@ -1193,10 +1317,15 @@ int ZEXPORT deflateCopy(dest, source)
zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
zmemcpy((voidpf)ds->prev, (voidpf)ss->prev, ds->w_size * sizeof(Pos));
zmemcpy((voidpf)ds->head, (voidpf)ss->head, ds->hash_size * sizeof(Pos));
- zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
+ zmemcpy(ds->pending_buf, ss->pending_buf, ds->lit_bufsize * LIT_BUFS);
ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
+#ifdef LIT_MEM
+ ds->d_buf = (ushf *)(ds->pending_buf + (ds->lit_bufsize << 1));
+ ds->l_buf = ds->pending_buf + (ds->lit_bufsize << 2);
+#else
ds->sym_buf = ds->pending_buf + ds->lit_bufsize;
+#endif
ds->l_desc.dyn_tree = ds->dyn_ltree;
ds->d_desc.dyn_tree = ds->dyn_dtree;
@@ -1206,66 +1335,6 @@ int ZEXPORT deflateCopy(dest, source)
#endif /* MAXSEG_64K */
}
-/* ===========================================================================
- * Read a new buffer from the current input stream, update the adler32
- * and total number of bytes read. All deflate() input goes through
- * this function so some applications may wish to modify it to avoid
- * allocating a large strm->next_in buffer and copying from it.
- * (See also flush_pending()).
- */
-local unsigned read_buf(strm, buf, size)
- z_streamp strm;
- Bytef *buf;
- unsigned size;
-{
- unsigned len = strm->avail_in;
-
- if (len > size) len = size;
- if (len == 0) return 0;
-
- strm->avail_in -= len;
-
- zmemcpy(buf, strm->next_in, len);
- if (strm->state->wrap == 1) {
- strm->adler = adler32(strm->adler, buf, len);
- }
-#ifdef GZIP
- else if (strm->state->wrap == 2) {
- strm->adler = crc32(strm->adler, buf, len);
- }
-#endif
- strm->next_in += len;
- strm->total_in += len;
-
- return len;
-}
-
-/* ===========================================================================
- * Initialize the "longest match" routines for a new zlib stream
- */
-local void lm_init(s)
- deflate_state *s;
-{
- s->window_size = (ulg)2L*s->w_size;
-
- CLEAR_HASH(s);
-
- /* Set the default configuration parameters:
- */
- s->max_lazy_match = configuration_table[s->level].max_lazy;
- s->good_match = configuration_table[s->level].good_length;
- s->nice_match = configuration_table[s->level].nice_length;
- s->max_chain_length = configuration_table[s->level].max_chain;
-
- s->strstart = 0;
- s->block_start = 0L;
- s->lookahead = 0;
- s->insert = 0;
- s->match_length = s->prev_length = MIN_MATCH-1;
- s->match_available = 0;
- s->ins_h = 0;
-}
-
#ifndef FASTEST
/* ===========================================================================
* Set match_start to the longest match starting at the given string and
@@ -1276,10 +1345,7 @@ local void lm_init(s)
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
* OUT assertion: the match length is not greater than s->lookahead.
*/
-local uInt longest_match(s, cur_match)
- deflate_state *s;
- IPos cur_match; /* current match */
-{
+local uInt longest_match(deflate_state *s, IPos cur_match) {
unsigned chain_length = s->max_chain_length;/* max hash chain length */
register Bytef *scan = s->window + s->strstart; /* current string */
register Bytef *match; /* matched string */
@@ -1427,10 +1493,7 @@ local uInt longest_match(s, cur_match)
/* ---------------------------------------------------------------------------
* Optimized version for FASTEST only
*/
-local uInt longest_match(s, cur_match)
- deflate_state *s;
- IPos cur_match; /* current match */
-{
+local uInt longest_match(deflate_state *s, IPos cur_match) {
register Bytef *scan = s->window + s->strstart; /* current string */
register Bytef *match; /* matched string */
register int len; /* length of current match */
@@ -1491,19 +1554,23 @@ local uInt longest_match(s, cur_match)
/* ===========================================================================
* Check that the match at match_start is indeed a match.
*/
-local void check_match(s, start, match, length)
- deflate_state *s;
- IPos start, match;
- int length;
-{
+local void check_match(deflate_state *s, IPos start, IPos match, int length) {
/* check that the match is indeed a match */
- if (zmemcmp(s->window + match,
- s->window + start, length) != EQUAL) {
- fprintf(stderr, " start %u, match %u, length %d\n",
- start, match, length);
+ Bytef *back = s->window + (int)match, *here = s->window + start;
+ IPos len = length;
+ if (match == (IPos)-1) {
+ /* match starts one byte before the current window -- just compare the
+ subsequent length-1 bytes */
+ back++;
+ here++;
+ len--;
+ }
+ if (zmemcmp(back, here, len) != EQUAL) {
+ fprintf(stderr, " start %u, match %d, length %d\n",
+ start, (int)match, length);
do {
- fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
- } while (--length != 0);
+ fprintf(stderr, "(%02x %02x)", *back++, *here++);
+ } while (--len != 0);
z_error("invalid match");
}
if (z_verbose > 1) {
@@ -1515,137 +1582,6 @@ local void check_match(s, start, match, length)
# define check_match(s, start, match, length)
#endif /* ZLIB_DEBUG */
-/* ===========================================================================
- * Fill the window when the lookahead becomes insufficient.
- * Updates strstart and lookahead.
- *
- * IN assertion: lookahead < MIN_LOOKAHEAD
- * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
- * At least one byte has been read, or avail_in == 0; reads are
- * performed for at least two bytes (required for the zip translate_eol
- * option -- not supported here).
- */
-local void fill_window(s)
- deflate_state *s;
-{
- unsigned n;
- unsigned more; /* Amount of free space at the end of the window. */
- uInt wsize = s->w_size;
-
- Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
-
- do {
- more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
-
- /* Deal with !@#$% 64K limit: */
- if (sizeof(int) <= 2) {
- if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
- more = wsize;
-
- } else if (more == (unsigned)(-1)) {
- /* Very unlikely, but possible on 16 bit machine if
- * strstart == 0 && lookahead == 1 (input done a byte at time)
- */
- more--;
- }
- }
-
- /* If the window is almost full and there is insufficient lookahead,
- * move the upper half to the lower one to make room in the upper half.
- */
- if (s->strstart >= wsize + MAX_DIST(s)) {
-
- zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
- s->match_start -= wsize;
- s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
- s->block_start -= (long) wsize;
- if (s->insert > s->strstart)
- s->insert = s->strstart;
- slide_hash(s);
- more += wsize;
- }
- if (s->strm->avail_in == 0) break;
-
- /* If there was no sliding:
- * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
- * more == window_size - lookahead - strstart
- * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
- * => more >= window_size - 2*WSIZE + 2
- * In the BIG_MEM or MMAP case (not yet supported),
- * window_size == input_size + MIN_LOOKAHEAD &&
- * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
- * Otherwise, window_size == 2*WSIZE so more >= 2.
- * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
- */
- Assert(more >= 2, "more < 2");
-
- n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
- s->lookahead += n;
-
- /* Initialize the hash value now that we have some input: */
- if (s->lookahead + s->insert >= MIN_MATCH) {
- uInt str = s->strstart - s->insert;
- s->ins_h = s->window[str];
- UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
-#if MIN_MATCH != 3
- Call UPDATE_HASH() MIN_MATCH-3 more times
-#endif
- while (s->insert) {
- UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
-#ifndef FASTEST
- s->prev[str & s->w_mask] = s->head[s->ins_h];
-#endif
- s->head[s->ins_h] = (Pos)str;
- str++;
- s->insert--;
- if (s->lookahead + s->insert < MIN_MATCH)
- break;
- }
- }
- /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
- * but this is not important since only literal bytes will be emitted.
- */
-
- } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
-
- /* If the WIN_INIT bytes after the end of the current data have never been
- * written, then zero those bytes in order to avoid memory check reports of
- * the use of uninitialized (or uninitialised as Julian writes) bytes by
- * the longest match routines. Update the high water mark for the next
- * time through here. WIN_INIT is set to MAX_MATCH since the longest match
- * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
- */
- if (s->high_water < s->window_size) {
- ulg curr = s->strstart + (ulg)(s->lookahead);
- ulg init;
-
- if (s->high_water < curr) {
- /* Previous high water mark below current data -- zero WIN_INIT
- * bytes or up to end of window, whichever is less.
- */
- init = s->window_size - curr;
- if (init > WIN_INIT)
- init = WIN_INIT;
- zmemzero(s->window + curr, (unsigned)init);
- s->high_water = curr + init;
- }
- else if (s->high_water < (ulg)curr + WIN_INIT) {
- /* High water mark at or above current data, but below current data
- * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
- * to end of window, whichever is less.
- */
- init = (ulg)curr + WIN_INIT - s->high_water;
- if (init > s->window_size - s->high_water)
- init = s->window_size - s->high_water;
- zmemzero(s->window + s->high_water, (unsigned)init);
- s->high_water += init;
- }
- }
-
- Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
- "not enough room for search");
-}
-
/* ===========================================================================
* Flush the current block, with given end-of-file flag.
* IN assertion: strstart is set to the end of the current match.
@@ -1688,10 +1624,7 @@ local void fill_window(s)
* copied. It is most efficient with large input and output buffers, which
* maximizes the opportunities to have a single copy from next_in to next_out.
*/
-local block_state deflate_stored(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_stored(deflate_state *s, int flush) {
/* Smallest worthy block size when not flushing or finishing. By default
* this is 32K. This can be as small as 507 bytes for memLevel == 1. For
* large input and output buffers, the stored block size will be larger.
@@ -1875,10 +1808,7 @@ local block_state deflate_stored(s, flush)
* new strings in the dictionary only for unmatched strings or for short
* matches. It is used only for the fast compression options.
*/
-local block_state deflate_fast(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_fast(deflate_state *s, int flush) {
IPos hash_head; /* head of the hash chain */
int bflush; /* set if current block must be flushed */
@@ -1977,10 +1907,7 @@ local block_state deflate_fast(s, flush)
* evaluation for matches: a match is finally adopted only if there is
* no better match at the next window position.
*/
-local block_state deflate_slow(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_slow(deflate_state *s, int flush) {
IPos hash_head; /* head of hash chain */
int bflush; /* set if current block must be flushed */
@@ -2108,10 +2035,7 @@ local block_state deflate_slow(s, flush)
* one. Do not maintain a hash table. (It will be regenerated if this run of
* deflate switches away from Z_RLE.)
*/
-local block_state deflate_rle(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_rle(deflate_state *s, int flush) {
int bflush; /* set if current block must be flushed */
uInt prev; /* byte at distance one to match */
Bytef *scan, *strend; /* scan goes up to strend for length of run */
@@ -2182,10 +2106,7 @@ local block_state deflate_rle(s, flush)
* For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
* (It will be regenerated if this run of deflate switches away from Huffman.)
*/
-local block_state deflate_huff(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_huff(deflate_state *s, int flush) {
int bflush; /* set if current block must be flushed */
for (;;) {
diff --git a/src/native/external/zlib/deflate.h b/src/native/external/zlib/deflate.h
index 1a06cd5f25d1..300c6ada62b8 100644
--- a/src/native/external/zlib/deflate.h
+++ b/src/native/external/zlib/deflate.h
@@ -1,5 +1,5 @@
/* deflate.h -- internal compression state
- * Copyright (C) 1995-2018 Jean-loup Gailly
+ * Copyright (C) 1995-2024 Jean-loup Gailly
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -23,6 +23,10 @@
# define GZIP
#endif
+/* define LIT_MEM to slightly increase the speed of deflate (order 1% to 2%) at
+ the cost of a larger memory footprint */
+/* #define LIT_MEM */
+
/* ===========================================================================
* Internal compression state.
*/
@@ -217,7 +221,14 @@ typedef struct internal_state {
/* Depth of each subtree used as tie breaker for trees of equal frequency
*/
+#ifdef LIT_MEM
+# define LIT_BUFS 5
+ ushf *d_buf; /* buffer for distances */
+ uchf *l_buf; /* buffer for literals/lengths */
+#else
+# define LIT_BUFS 4
uchf *sym_buf; /* buffer for distances and literals/lengths */
+#endif
uInt lit_bufsize;
/* Size of match buffer for literals/lengths. There are 4 reasons for
@@ -239,7 +250,7 @@ typedef struct internal_state {
* - I can't count above 4
*/
- uInt sym_next; /* running index in sym_buf */
+ uInt sym_next; /* running index in symbol buffer */
uInt sym_end; /* symbol table full when sym_next reaches this */
ulg opt_len; /* bit length of current block with optimal trees */
@@ -291,14 +302,14 @@ typedef struct internal_state {
memory checker errors from longest match routines */
/* in trees.c */
-void ZLIB_INTERNAL _tr_init OF((deflate_state *s));
-int ZLIB_INTERNAL _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc));
-void ZLIB_INTERNAL _tr_flush_block OF((deflate_state *s, charf *buf,
- ulg stored_len, int last));
-void ZLIB_INTERNAL _tr_flush_bits OF((deflate_state *s));
-void ZLIB_INTERNAL _tr_align OF((deflate_state *s));
-void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
- ulg stored_len, int last));
+void ZLIB_INTERNAL _tr_init(deflate_state *s);
+int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc);
+void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last);
+void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s);
+void ZLIB_INTERNAL _tr_align(deflate_state *s);
+void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last);
#define d_code(dist) \
((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
@@ -318,6 +329,25 @@ void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
extern const uch ZLIB_INTERNAL _dist_code[];
#endif
+#ifdef LIT_MEM
+# define _tr_tally_lit(s, c, flush) \
+ { uch cc = (c); \
+ s->d_buf[s->sym_next] = 0; \
+ s->l_buf[s->sym_next++] = cc; \
+ s->dyn_ltree[cc].Freq++; \
+ flush = (s->sym_next == s->sym_end); \
+ }
+# define _tr_tally_dist(s, distance, length, flush) \
+ { uch len = (uch)(length); \
+ ush dist = (ush)(distance); \
+ s->d_buf[s->sym_next] = dist; \
+ s->l_buf[s->sym_next++] = len; \
+ dist--; \
+ s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
+ s->dyn_dtree[d_code(dist)].Freq++; \
+ flush = (s->sym_next == s->sym_end); \
+ }
+#else
# define _tr_tally_lit(s, c, flush) \
{ uch cc = (c); \
s->sym_buf[s->sym_next++] = 0; \
@@ -337,6 +367,7 @@ void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
s->dyn_dtree[d_code(dist)].Freq++; \
flush = (s->sym_next == s->sym_end); \
}
+#endif
#else
# define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
# define _tr_tally_dist(s, distance, length, flush) \
diff --git a/src/native/external/zlib/gzclose.c b/src/native/external/zlib/gzclose.c
index caeb99a3177f..48d6a86f04b6 100644
--- a/src/native/external/zlib/gzclose.c
+++ b/src/native/external/zlib/gzclose.c
@@ -8,9 +8,7 @@
/* gzclose() is in a separate file so that it is linked in only if it is used.
That way the other gzclose functions can be used instead to avoid linking in
unneeded compression or decompression routines. */
-int ZEXPORT gzclose(file)
- gzFile file;
-{
+int ZEXPORT gzclose(gzFile file) {
#ifndef NO_GZCOMPRESS
gz_statep state;
diff --git a/src/native/external/zlib/gzguts.h b/src/native/external/zlib/gzguts.h
index 57faf37165a3..eba72085bb75 100644
--- a/src/native/external/zlib/gzguts.h
+++ b/src/native/external/zlib/gzguts.h
@@ -1,5 +1,5 @@
/* gzguts.h -- zlib internal header definitions for gz* operations
- * Copyright (C) 2004-2019 Mark Adler
+ * Copyright (C) 2004-2024 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -7,9 +7,8 @@
# ifndef _LARGEFILE_SOURCE
# define _LARGEFILE_SOURCE 1
# endif
-# ifdef _FILE_OFFSET_BITS
-# undef _FILE_OFFSET_BITS
-# endif
+# undef _FILE_OFFSET_BITS
+# undef _TIME_BITS
#endif
#ifdef HAVE_HIDDEN
@@ -119,8 +118,8 @@
/* gz* functions always use library allocation functions */
#ifndef STDC
- extern voidp malloc OF((uInt size));
- extern void free OF((voidpf ptr));
+ extern voidp malloc(uInt size);
+ extern void free(voidpf ptr);
#endif
/* get errno and strerror definition */
@@ -138,10 +137,10 @@
/* provide prototypes for these when building zlib without LFS */
#if !defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off64_t ZEXPORT gzseek64(gzFile, z_off64_t, int);
+ ZEXTERN z_off64_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off64_t ZEXPORT gzoffset64(gzFile);
#endif
/* default memLevel */
@@ -203,17 +202,13 @@ typedef struct {
typedef gz_state FAR *gz_statep;
/* shared functions */
-void ZLIB_INTERNAL gz_error OF((gz_statep, int, const char *));
+void ZLIB_INTERNAL gz_error(gz_statep, int, const char *);
#if defined UNDER_CE
-char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
+char ZLIB_INTERNAL *gz_strwinerror(DWORD error);
#endif
/* GT_OFF(x), where x is an unsigned value, is true if x > maximum z_off64_t
value -- needed when comparing unsigned to z_off64_t, which is signed
(possible z_off64_t types off_t, off64_t, and long are all signed) */
-#ifdef INT_MAX
-# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > INT_MAX)
-#else
-unsigned ZLIB_INTERNAL gz_intmax OF((void));
-# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
-#endif
+unsigned ZLIB_INTERNAL gz_intmax(void);
+#define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
diff --git a/src/native/external/zlib/gzlib.c b/src/native/external/zlib/gzlib.c
index 55da46a453fd..983153cc8e49 100644
--- a/src/native/external/zlib/gzlib.c
+++ b/src/native/external/zlib/gzlib.c
@@ -1,5 +1,5 @@
/* gzlib.c -- zlib functions common to reading and writing gzip files
- * Copyright (C) 2004-2019 Mark Adler
+ * Copyright (C) 2004-2024 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -15,10 +15,6 @@
#endif
#endif
-/* Local functions */
-local void gz_reset OF((gz_statep));
-local gzFile gz_open OF((const void *, int, const char *));
-
#if defined UNDER_CE
/* Map the Windows error number in ERROR to a locale-dependent error message
@@ -30,9 +26,7 @@ local gzFile gz_open OF((const void *, int, const char *));
The gz_strwinerror function does not change the current setting of
GetLastError. */
-char ZLIB_INTERNAL *gz_strwinerror(error)
- DWORD error;
-{
+char ZLIB_INTERNAL *gz_strwinerror(DWORD error) {
static char buf[1024];
wchar_t *msgbuf;
@@ -72,9 +66,7 @@ char ZLIB_INTERNAL *gz_strwinerror(error)
#endif /* UNDER_CE */
/* Reset gzip file state */
-local void gz_reset(state)
- gz_statep state;
-{
+local void gz_reset(gz_statep state) {
state->x.have = 0; /* no output data available */
if (state->mode == GZ_READ) { /* for reading ... */
state->eof = 0; /* not at end of file */
@@ -90,11 +82,7 @@ local void gz_reset(state)
}
/* Open a gzip file either by name or file descriptor. */
-local gzFile gz_open(path, fd, mode)
- const void *path;
- int fd;
- const char *mode;
-{
+local gzFile gz_open(const void *path, int fd, const char *mode) {
gz_statep state;
z_size_t len;
int oflag;
@@ -269,26 +257,17 @@ local gzFile gz_open(path, fd, mode)
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzopen(path, mode)
- const char *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen(const char *path, const char *mode) {
return gz_open(path, -1, mode);
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzopen64(path, mode)
- const char *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen64(const char *path, const char *mode) {
return gz_open(path, -1, mode);
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzdopen(fd, mode)
- int fd;
- const char *mode;
-{
+gzFile ZEXPORT gzdopen(int fd, const char *mode) {
char *path; /* identifier for error messages */
gzFile gz;
@@ -306,19 +285,13 @@ gzFile ZEXPORT gzdopen(fd, mode)
/* -- see zlib.h -- */
#ifdef WIDECHAR
-gzFile ZEXPORT gzopen_w(path, mode)
- const wchar_t *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen_w(const wchar_t *path, const char *mode) {
return gz_open(path, -2, mode);
}
#endif
/* -- see zlib.h -- */
-int ZEXPORT gzbuffer(file, size)
- gzFile file;
- unsigned size;
-{
+int ZEXPORT gzbuffer(gzFile file, unsigned size) {
gz_statep state;
/* get internal structure and check integrity */
@@ -335,16 +308,14 @@ int ZEXPORT gzbuffer(file, size)
/* check and set requested size */
if ((size << 1) < size)
return -1; /* need to be able to double it */
- if (size < 2)
- size = 2; /* need two bytes to check magic header */
+ if (size < 8)
+ size = 8; /* needed to behave well with flushing */
state->want = size;
return 0;
}
/* -- see zlib.h -- */
-int ZEXPORT gzrewind(file)
- gzFile file;
-{
+int ZEXPORT gzrewind(gzFile file) {
gz_statep state;
/* get internal structure */
@@ -365,11 +336,7 @@ int ZEXPORT gzrewind(file)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gzseek64(file, offset, whence)
- gzFile file;
- z_off64_t offset;
- int whence;
-{
+z_off64_t ZEXPORT gzseek64(gzFile file, z_off64_t offset, int whence) {
unsigned n;
z_off64_t ret;
gz_statep state;
@@ -442,11 +409,7 @@ z_off64_t ZEXPORT gzseek64(file, offset, whence)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gzseek(file, offset, whence)
- gzFile file;
- z_off_t offset;
- int whence;
-{
+z_off_t ZEXPORT gzseek(gzFile file, z_off_t offset, int whence) {
z_off64_t ret;
ret = gzseek64(file, (z_off64_t)offset, whence);
@@ -454,9 +417,7 @@ z_off_t ZEXPORT gzseek(file, offset, whence)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gztell64(file)
- gzFile file;
-{
+z_off64_t ZEXPORT gztell64(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -471,9 +432,7 @@ z_off64_t ZEXPORT gztell64(file)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gztell(file)
- gzFile file;
-{
+z_off_t ZEXPORT gztell(gzFile file) {
z_off64_t ret;
ret = gztell64(file);
@@ -481,9 +440,7 @@ z_off_t ZEXPORT gztell(file)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gzoffset64(file)
- gzFile file;
-{
+z_off64_t ZEXPORT gzoffset64(gzFile file) {
z_off64_t offset;
gz_statep state;
@@ -504,9 +461,7 @@ z_off64_t ZEXPORT gzoffset64(file)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gzoffset(file)
- gzFile file;
-{
+z_off_t ZEXPORT gzoffset(gzFile file) {
z_off64_t ret;
ret = gzoffset64(file);
@@ -514,9 +469,7 @@ z_off_t ZEXPORT gzoffset(file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzeof(file)
- gzFile file;
-{
+int ZEXPORT gzeof(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -531,10 +484,7 @@ int ZEXPORT gzeof(file)
}
/* -- see zlib.h -- */
-const char * ZEXPORT gzerror(file, errnum)
- gzFile file;
- int *errnum;
-{
+const char * ZEXPORT gzerror(gzFile file, int *errnum) {
gz_statep state;
/* get internal structure and check integrity */
@@ -552,9 +502,7 @@ const char * ZEXPORT gzerror(file, errnum)
}
/* -- see zlib.h -- */
-void ZEXPORT gzclearerr(file)
- gzFile file;
-{
+void ZEXPORT gzclearerr(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -578,11 +526,7 @@ void ZEXPORT gzclearerr(file)
memory). Simply save the error message as a static string. If there is an
allocation failure constructing the error message, then convert the error to
out of memory. */
-void ZLIB_INTERNAL gz_error(state, err, msg)
- gz_statep state;
- int err;
- const char *msg;
-{
+void ZLIB_INTERNAL gz_error(gz_statep state, int err, const char *msg) {
/* free previously allocated message and clear */
if (state->msg != NULL) {
if (state->err != Z_MEM_ERROR)
@@ -619,21 +563,20 @@ void ZLIB_INTERNAL gz_error(state, err, msg)
#endif
}
-#ifndef INT_MAX
/* portably return maximum value for an int (when limits.h presumed not
available) -- we need to do this to cover cases where 2's complement not
used, since C standard permits 1's complement and sign-bit representations,
otherwise we could just use ((unsigned)-1) >> 1 */
-unsigned ZLIB_INTERNAL gz_intmax()
-{
- unsigned p, q;
-
- p = 1;
+unsigned ZLIB_INTERNAL gz_intmax(void) {
+#ifdef INT_MAX
+ return INT_MAX;
+#else
+ unsigned p = 1, q;
do {
q = p;
p <<= 1;
p++;
} while (p > q);
return q >> 1;
-}
#endif
+}
diff --git a/src/native/external/zlib/gzread.c b/src/native/external/zlib/gzread.c
index dd77381596cb..4168cbc88752 100644
--- a/src/native/external/zlib/gzread.c
+++ b/src/native/external/zlib/gzread.c
@@ -5,25 +5,12 @@
#include "gzguts.h"
-/* Local functions */
-local int gz_load OF((gz_statep, unsigned char *, unsigned, unsigned *));
-local int gz_avail OF((gz_statep));
-local int gz_look OF((gz_statep));
-local int gz_decomp OF((gz_statep));
-local int gz_fetch OF((gz_statep));
-local int gz_skip OF((gz_statep, z_off64_t));
-local z_size_t gz_read OF((gz_statep, voidp, z_size_t));
-
/* Use read() to load a buffer -- return -1 on error, otherwise 0. Read from
state->fd, and update state->eof, state->err, and state->msg as appropriate.
This function needs to loop on read(), since read() is not guaranteed to
read the number of bytes requested, depending on the type of descriptor. */
-local int gz_load(state, buf, len, have)
- gz_statep state;
- unsigned char *buf;
- unsigned len;
- unsigned *have;
-{
+local int gz_load(gz_statep state, unsigned char *buf, unsigned len,
+ unsigned *have) {
int ret;
unsigned get, max = ((unsigned)-1 >> 2) + 1;
@@ -53,9 +40,7 @@ local int gz_load(state, buf, len, have)
If strm->avail_in != 0, then the current data is moved to the beginning of
the input buffer, and then the remainder of the buffer is loaded with the
available data from the input file. */
-local int gz_avail(state)
- gz_statep state;
-{
+local int gz_avail(gz_statep state) {
unsigned got;
z_streamp strm = &(state->strm);
@@ -88,9 +73,7 @@ local int gz_avail(state)
case, all further file reads will be directly to either the output buffer or
a user buffer. If decompressing, the inflate state will be initialized.
gz_look() will return 0 on success or -1 on failure. */
-local int gz_look(state)
- gz_statep state;
-{
+local int gz_look(gz_statep state) {
z_streamp strm = &(state->strm);
/* allocate read buffers and inflate memory */
@@ -170,9 +153,7 @@ local int gz_look(state)
data. If the gzip stream completes, state->how is reset to LOOK to look for
the next gzip stream or raw data, once state->x.have is depleted. Returns 0
on success, -1 on failure. */
-local int gz_decomp(state)
- gz_statep state;
-{
+local int gz_decomp(gz_statep state) {
int ret = Z_OK;
unsigned had;
z_streamp strm = &(state->strm);
@@ -224,9 +205,7 @@ local int gz_decomp(state)
looked for to determine whether to copy or decompress. Returns -1 on error,
otherwise 0. gz_fetch() will leave state->how as COPY or GZIP unless the
end of the input file has been reached and all data has been processed. */
-local int gz_fetch(state)
- gz_statep state;
-{
+local int gz_fetch(gz_statep state) {
z_streamp strm = &(state->strm);
do {
@@ -254,10 +233,7 @@ local int gz_fetch(state)
}
/* Skip len uncompressed bytes of output. Return -1 on error, 0 on success. */
-local int gz_skip(state, len)
- gz_statep state;
- z_off64_t len;
-{
+local int gz_skip(gz_statep state, z_off64_t len) {
unsigned n;
/* skip over len bytes or reach end-of-file, whichever comes first */
@@ -289,11 +265,7 @@ local int gz_skip(state, len)
input. Return the number of bytes read. If zero is returned, either the
end of file was reached, or there was an error. state->err must be
consulted in that case to determine which. */
-local z_size_t gz_read(state, buf, len)
- gz_statep state;
- voidp buf;
- z_size_t len;
-{
+local z_size_t gz_read(gz_statep state, voidp buf, z_size_t len) {
z_size_t got;
unsigned n;
@@ -370,11 +342,7 @@ local z_size_t gz_read(state, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzread(file, buf, len)
- gzFile file;
- voidp buf;
- unsigned len;
-{
+int ZEXPORT gzread(gzFile file, voidp buf, unsigned len) {
gz_statep state;
/* get internal structure */
@@ -406,12 +374,7 @@ int ZEXPORT gzread(file, buf, len)
}
/* -- see zlib.h -- */
-z_size_t ZEXPORT gzfread(buf, size, nitems, file)
- voidp buf;
- z_size_t size;
- z_size_t nitems;
- gzFile file;
-{
+z_size_t ZEXPORT gzfread(voidp buf, z_size_t size, z_size_t nitems, gzFile file) {
z_size_t len;
gz_statep state;
@@ -442,9 +405,7 @@ z_size_t ZEXPORT gzfread(buf, size, nitems, file)
#else
# undef gzgetc
#endif
-int ZEXPORT gzgetc(file)
- gzFile file;
-{
+int ZEXPORT gzgetc(gzFile file) {
unsigned char buf[1];
gz_statep state;
@@ -469,17 +430,12 @@ int ZEXPORT gzgetc(file)
return gz_read(state, buf, 1) < 1 ? -1 : buf[0];
}
-int ZEXPORT gzgetc_(file)
-gzFile file;
-{
+int ZEXPORT gzgetc_(gzFile file) {
return gzgetc(file);
}
/* -- see zlib.h -- */
-int ZEXPORT gzungetc(c, file)
- int c;
- gzFile file;
-{
+int ZEXPORT gzungetc(int c, gzFile file) {
gz_statep state;
/* get internal structure */
@@ -487,6 +443,10 @@ int ZEXPORT gzungetc(c, file)
return -1;
state = (gz_statep)file;
+ /* in case this was just opened, set up the input buffer */
+ if (state->mode == GZ_READ && state->how == LOOK && state->x.have == 0)
+ (void)gz_look(state);
+
/* check that we're reading and that there's no (serious) error */
if (state->mode != GZ_READ ||
(state->err != Z_OK && state->err != Z_BUF_ERROR))
@@ -536,11 +496,7 @@ int ZEXPORT gzungetc(c, file)
}
/* -- see zlib.h -- */
-char * ZEXPORT gzgets(file, buf, len)
- gzFile file;
- char *buf;
- int len;
-{
+char * ZEXPORT gzgets(gzFile file, char *buf, int len) {
unsigned left, n;
char *str;
unsigned char *eol;
@@ -600,9 +556,7 @@ char * ZEXPORT gzgets(file, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzdirect(file)
- gzFile file;
-{
+int ZEXPORT gzdirect(gzFile file) {
gz_statep state;
/* get internal structure */
@@ -620,9 +574,7 @@ int ZEXPORT gzdirect(file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzclose_r(file)
- gzFile file;
-{
+int ZEXPORT gzclose_r(gzFile file) {
int ret, err;
gz_statep state;
diff --git a/src/native/external/zlib/gzwrite.c b/src/native/external/zlib/gzwrite.c
index eb8a0e5893ff..435b4621b534 100644
--- a/src/native/external/zlib/gzwrite.c
+++ b/src/native/external/zlib/gzwrite.c
@@ -5,18 +5,10 @@
#include "gzguts.h"
-/* Local functions */
-local int gz_init OF((gz_statep));
-local int gz_comp OF((gz_statep, int));
-local int gz_zero OF((gz_statep, z_off64_t));
-local z_size_t gz_write OF((gz_statep, voidpc, z_size_t));
-
/* Initialize state for writing a gzip file. Mark initialization by setting
state->size to non-zero. Return -1 on a memory allocation failure, or 0 on
success. */
-local int gz_init(state)
- gz_statep state;
-{
+local int gz_init(gz_statep state) {
int ret;
z_streamp strm = &(state->strm);
@@ -70,10 +62,7 @@ local int gz_init(state)
deflate() flush value. If flush is Z_FINISH, then the deflate() state is
reset to start a new gzip stream. If gz->direct is true, then simply write
to the output file without compressing, and ignore flush. */
-local int gz_comp(state, flush)
- gz_statep state;
- int flush;
-{
+local int gz_comp(gz_statep state, int flush) {
int ret, writ;
unsigned have, put, max = ((unsigned)-1 >> 2) + 1;
z_streamp strm = &(state->strm);
@@ -151,10 +140,7 @@ local int gz_comp(state, flush)
/* Compress len zeros to output. Return -1 on a write error or memory
allocation failure by gz_comp(), or 0 on success. */
-local int gz_zero(state, len)
- gz_statep state;
- z_off64_t len;
-{
+local int gz_zero(gz_statep state, z_off64_t len) {
int first;
unsigned n;
z_streamp strm = &(state->strm);
@@ -184,11 +170,7 @@ local int gz_zero(state, len)
/* Write len bytes from buf to file. Return the number of bytes written. If
the returned value is less than len, then there was an error. */
-local z_size_t gz_write(state, buf, len)
- gz_statep state;
- voidpc buf;
- z_size_t len;
-{
+local z_size_t gz_write(gz_statep state, voidpc buf, z_size_t len) {
z_size_t put = len;
/* if len is zero, avoid unnecessary operations */
@@ -252,11 +234,7 @@ local z_size_t gz_write(state, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzwrite(file, buf, len)
- gzFile file;
- voidpc buf;
- unsigned len;
-{
+int ZEXPORT gzwrite(gzFile file, voidpc buf, unsigned len) {
gz_statep state;
/* get internal structure */
@@ -280,12 +258,8 @@ int ZEXPORT gzwrite(file, buf, len)
}
/* -- see zlib.h -- */
-z_size_t ZEXPORT gzfwrite(buf, size, nitems, file)
- voidpc buf;
- z_size_t size;
- z_size_t nitems;
- gzFile file;
-{
+z_size_t ZEXPORT gzfwrite(voidpc buf, z_size_t size, z_size_t nitems,
+ gzFile file) {
z_size_t len;
gz_statep state;
@@ -310,10 +284,7 @@ z_size_t ZEXPORT gzfwrite(buf, size, nitems, file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzputc(file, c)
- gzFile file;
- int c;
-{
+int ZEXPORT gzputc(gzFile file, int c) {
unsigned have;
unsigned char buf[1];
gz_statep state;
@@ -358,10 +329,7 @@ int ZEXPORT gzputc(file, c)
}
/* -- see zlib.h -- */
-int ZEXPORT gzputs(file, s)
- gzFile file;
- const char *s;
-{
+int ZEXPORT gzputs(gzFile file, const char *s) {
z_size_t len, put;
gz_statep state;
@@ -388,8 +356,7 @@ int ZEXPORT gzputs(file, s)
#include
/* -- see zlib.h -- */
-int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va)
-{
+int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va) {
int len;
unsigned left;
char *next;
@@ -460,8 +427,7 @@ int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va)
return len;
}
-int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
-{
+int ZEXPORTVA gzprintf(gzFile file, const char *format, ...) {
va_list va;
int ret;
@@ -474,13 +440,10 @@ int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
#else /* !STDC && !Z_HAVE_STDARG_H */
/* -- see zlib.h -- */
-int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
- a11, a12, a13, a14, a15, a16, a17, a18, a19, a20)
- gzFile file;
- const char *format;
- int a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
- a11, a12, a13, a14, a15, a16, a17, a18, a19, a20;
-{
+int ZEXPORTVA gzprintf(gzFile file, const char *format, int a1, int a2, int a3,
+ int a4, int a5, int a6, int a7, int a8, int a9, int a10,
+ int a11, int a12, int a13, int a14, int a15, int a16,
+ int a17, int a18, int a19, int a20) {
unsigned len, left;
char *next;
gz_statep state;
@@ -562,10 +525,7 @@ int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
#endif
/* -- see zlib.h -- */
-int ZEXPORT gzflush(file, flush)
- gzFile file;
- int flush;
-{
+int ZEXPORT gzflush(gzFile file, int flush) {
gz_statep state;
/* get internal structure */
@@ -594,11 +554,7 @@ int ZEXPORT gzflush(file, flush)
}
/* -- see zlib.h -- */
-int ZEXPORT gzsetparams(file, level, strategy)
- gzFile file;
- int level;
- int strategy;
-{
+int ZEXPORT gzsetparams(gzFile file, int level, int strategy) {
gz_statep state;
z_streamp strm;
@@ -609,7 +565,7 @@ int ZEXPORT gzsetparams(file, level, strategy)
strm = &(state->strm);
/* check that we're writing and that there's no error */
- if (state->mode != GZ_WRITE || state->err != Z_OK)
+ if (state->mode != GZ_WRITE || state->err != Z_OK || state->direct)
return Z_STREAM_ERROR;
/* if no change is requested, then do nothing */
@@ -636,9 +592,7 @@ int ZEXPORT gzsetparams(file, level, strategy)
}
/* -- see zlib.h -- */
-int ZEXPORT gzclose_w(file)
- gzFile file;
-{
+int ZEXPORT gzclose_w(gzFile file) {
int ret = Z_OK;
gz_statep state;
diff --git a/src/native/external/zlib/infback.c b/src/native/external/zlib/infback.c
index babeaf1806f9..e7b25b307a30 100644
--- a/src/native/external/zlib/infback.c
+++ b/src/native/external/zlib/infback.c
@@ -15,9 +15,6 @@
#include "inflate.h"
#include "inffast.h"
-/* function prototypes */
-local void fixedtables OF((struct inflate_state FAR *state));
-
/*
strm provides memory allocation functions in zalloc and zfree, or
Z_NULL to use the library memory allocation functions.
@@ -25,13 +22,9 @@ local void fixedtables OF((struct inflate_state FAR *state));
windowBits is in the range 8..15, and window is a user-supplied
window and output buffer that is 2**windowBits bytes.
*/
-int ZEXPORT inflateBackInit_(strm, windowBits, window, version, stream_size)
-z_streamp strm;
-int windowBits;
-unsigned char FAR *window;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateBackInit_(z_streamp strm, int windowBits,
+ unsigned char FAR *window, const char *version,
+ int stream_size) {
struct inflate_state FAR *state;
if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
@@ -80,9 +73,7 @@ int stream_size;
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
-local void fixedtables(state)
-struct inflate_state FAR *state;
-{
+local void fixedtables(struct inflate_state FAR *state) {
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
@@ -248,13 +239,8 @@ struct inflate_state FAR *state;
inflateBack() can also return Z_STREAM_ERROR if the input parameters
are not correct, i.e. strm is Z_NULL or the state was not initialized.
*/
-int ZEXPORT inflateBack(strm, in, in_desc, out, out_desc)
-z_streamp strm;
-in_func in;
-void FAR *in_desc;
-out_func out;
-void FAR *out_desc;
-{
+int ZEXPORT inflateBack(z_streamp strm, in_func in, void FAR *in_desc,
+ out_func out, void FAR *out_desc) {
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
@@ -632,9 +618,7 @@ void FAR *out_desc;
return ret;
}
-int ZEXPORT inflateBackEnd(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateBackEnd(z_streamp strm) {
if (strm == Z_NULL || strm->state == Z_NULL || strm->zfree == (free_func)0)
return Z_STREAM_ERROR;
ZFREE(strm, strm->state);
diff --git a/src/native/external/zlib/inffast.c b/src/native/external/zlib/inffast.c
index 1fec7f363fa6..9354676e786e 100644
--- a/src/native/external/zlib/inffast.c
+++ b/src/native/external/zlib/inffast.c
@@ -47,10 +47,7 @@
requires strm->avail_out >= 258 for each loop to avoid checking for
output space.
*/
-void ZLIB_INTERNAL inflate_fast(strm, start)
-z_streamp strm;
-unsigned start; /* inflate()'s starting value for strm->avail_out */
-{
+void ZLIB_INTERNAL inflate_fast(z_streamp strm, unsigned start) {
struct inflate_state FAR *state;
z_const unsigned char FAR *in; /* local strm->next_in */
z_const unsigned char FAR *last; /* have enough input while in < last */
diff --git a/src/native/external/zlib/inffast.h b/src/native/external/zlib/inffast.h
index e5c1aa4ca8cd..49c6d156c5c6 100644
--- a/src/native/external/zlib/inffast.h
+++ b/src/native/external/zlib/inffast.h
@@ -8,4 +8,4 @@
subject to change. Applications should only use zlib.h.
*/
-void ZLIB_INTERNAL inflate_fast OF((z_streamp strm, unsigned start));
+void ZLIB_INTERNAL inflate_fast(z_streamp strm, unsigned start);
diff --git a/src/native/external/zlib/inflate.c b/src/native/external/zlib/inflate.c
index 8acbef44e993..94ecff015a9b 100644
--- a/src/native/external/zlib/inflate.c
+++ b/src/native/external/zlib/inflate.c
@@ -91,20 +91,7 @@
# endif
#endif
-/* function prototypes */
-local int inflateStateCheck OF((z_streamp strm));
-local void fixedtables OF((struct inflate_state FAR *state));
-local int updatewindow OF((z_streamp strm, const unsigned char FAR *end,
- unsigned copy));
-#ifdef BUILDFIXED
- void makefixed OF((void));
-#endif
-local unsigned syncsearch OF((unsigned FAR *have, const unsigned char FAR *buf,
- unsigned len));
-
-local int inflateStateCheck(strm)
-z_streamp strm;
-{
+local int inflateStateCheck(z_streamp strm) {
struct inflate_state FAR *state;
if (strm == Z_NULL ||
strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
@@ -116,9 +103,7 @@ z_streamp strm;
return 0;
}
-int ZEXPORT inflateResetKeep(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateResetKeep(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -142,9 +127,7 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateReset(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateReset(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -155,10 +138,7 @@ z_streamp strm;
return inflateResetKeep(strm);
}
-int ZEXPORT inflateReset2(strm, windowBits)
-z_streamp strm;
-int windowBits;
-{
+int ZEXPORT inflateReset2(z_streamp strm, int windowBits) {
int wrap;
struct inflate_state FAR *state;
@@ -195,12 +175,8 @@ int windowBits;
return inflateReset(strm);
}
-int ZEXPORT inflateInit2_(strm, windowBits, version, stream_size)
-z_streamp strm;
-int windowBits;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit2_(z_streamp strm, int windowBits,
+ const char *version, int stream_size) {
int ret;
struct inflate_state FAR *state;
@@ -239,22 +215,17 @@ int stream_size;
return ret;
}
-int ZEXPORT inflateInit_(strm, version, stream_size)
-z_streamp strm;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit_(z_streamp strm, const char *version,
+ int stream_size) {
return inflateInit2_(strm, DEF_WBITS, version, stream_size);
}
-int ZEXPORT inflatePrime(strm, bits, value)
-z_streamp strm;
-int bits;
-int value;
-{
+int ZEXPORT inflatePrime(z_streamp strm, int bits, int value) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
+ if (bits == 0)
+ return Z_OK;
state = (struct inflate_state FAR *)strm->state;
if (bits < 0) {
state->hold = 0;
@@ -278,9 +249,7 @@ int value;
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
-local void fixedtables(state)
-struct inflate_state FAR *state;
-{
+local void fixedtables(struct inflate_state FAR *state) {
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
@@ -342,7 +311,7 @@ struct inflate_state FAR *state;
a.out > inffixed.h
*/
-void makefixed()
+void makefixed(void)
{
unsigned low, size;
struct inflate_state state;
@@ -396,11 +365,7 @@ void makefixed()
output will fall in the output data, making match copies simpler and faster.
The advantage may be dependent on the size of the processor's data caches.
*/
-local int updatewindow(strm, end, copy)
-z_streamp strm;
-const Bytef *end;
-unsigned copy;
-{
+local int updatewindow(z_streamp strm, const Bytef *end, unsigned copy) {
struct inflate_state FAR *state;
unsigned dist;
@@ -622,10 +587,7 @@ unsigned copy;
will return Z_BUF_ERROR if it has not reached the end of the stream.
*/
-int ZEXPORT inflate(strm, flush)
-z_streamp strm;
-int flush;
-{
+int ZEXPORT inflate(z_streamp strm, int flush) {
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
@@ -1301,9 +1263,7 @@ int flush;
return ret;
}
-int ZEXPORT inflateEnd(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateEnd(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
return Z_STREAM_ERROR;
@@ -1315,11 +1275,8 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateGetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-Bytef *dictionary;
-uInt *dictLength;
-{
+int ZEXPORT inflateGetDictionary(z_streamp strm, Bytef *dictionary,
+ uInt *dictLength) {
struct inflate_state FAR *state;
/* check state */
@@ -1338,11 +1295,8 @@ uInt *dictLength;
return Z_OK;
}
-int ZEXPORT inflateSetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-const Bytef *dictionary;
-uInt dictLength;
-{
+int ZEXPORT inflateSetDictionary(z_streamp strm, const Bytef *dictionary,
+ uInt dictLength) {
struct inflate_state FAR *state;
unsigned long dictid;
int ret;
@@ -1373,10 +1327,7 @@ uInt dictLength;
return Z_OK;
}
-int ZEXPORT inflateGetHeader(strm, head)
-z_streamp strm;
-gz_headerp head;
-{
+int ZEXPORT inflateGetHeader(z_streamp strm, gz_headerp head) {
struct inflate_state FAR *state;
/* check state */
@@ -1401,11 +1352,8 @@ gz_headerp head;
called again with more data and the *have state. *have is initialized to
zero for the first call.
*/
-local unsigned syncsearch(have, buf, len)
-unsigned FAR *have;
-const unsigned char FAR *buf;
-unsigned len;
-{
+local unsigned syncsearch(unsigned FAR *have, const unsigned char FAR *buf,
+ unsigned len) {
unsigned got;
unsigned next;
@@ -1424,9 +1372,7 @@ unsigned len;
return next;
}
-int ZEXPORT inflateSync(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSync(z_streamp strm) {
unsigned len; /* number of bytes to look at or looked at */
int flags; /* temporary to save header status */
unsigned long in, out; /* temporary to save total_in and total_out */
@@ -1441,7 +1387,7 @@ z_streamp strm;
/* if first time, start search in bit buffer */
if (state->mode != SYNC) {
state->mode = SYNC;
- state->hold <<= state->bits & 7;
+ state->hold >>= state->bits & 7;
state->bits -= state->bits & 7;
len = 0;
while (state->bits >= 8) {
@@ -1482,9 +1428,7 @@ z_streamp strm;
block. When decompressing, PPP checks that at the end of input packet,
inflate is waiting for these length bytes.
*/
-int ZEXPORT inflateSyncPoint(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSyncPoint(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1492,10 +1436,7 @@ z_streamp strm;
return state->mode == STORED && state->bits == 0;
}
-int ZEXPORT inflateCopy(dest, source)
-z_streamp dest;
-z_streamp source;
-{
+int ZEXPORT inflateCopy(z_streamp dest, z_streamp source) {
struct inflate_state FAR *state;
struct inflate_state FAR *copy;
unsigned char FAR *window;
@@ -1539,10 +1480,7 @@ z_streamp source;
return Z_OK;
}
-int ZEXPORT inflateUndermine(strm, subvert)
-z_streamp strm;
-int subvert;
-{
+int ZEXPORT inflateUndermine(z_streamp strm, int subvert) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1557,10 +1495,7 @@ int subvert;
#endif
}
-int ZEXPORT inflateValidate(strm, check)
-z_streamp strm;
-int check;
-{
+int ZEXPORT inflateValidate(z_streamp strm, int check) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1572,9 +1507,7 @@ int check;
return Z_OK;
}
-long ZEXPORT inflateMark(strm)
-z_streamp strm;
-{
+long ZEXPORT inflateMark(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
@@ -1585,9 +1518,7 @@ z_streamp strm;
(state->mode == MATCH ? state->was - state->length : 0));
}
-unsigned long ZEXPORT inflateCodesUsed(strm)
-z_streamp strm;
-{
+unsigned long ZEXPORT inflateCodesUsed(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return (unsigned long)-1;
state = (struct inflate_state FAR *)strm->state;
diff --git a/src/native/external/zlib/inftrees.c b/src/native/external/zlib/inftrees.c
index 57d2793bec93..98cfe164458c 100644
--- a/src/native/external/zlib/inftrees.c
+++ b/src/native/external/zlib/inftrees.c
@@ -1,5 +1,5 @@
/* inftrees.c -- generate Huffman trees for efficient decoding
- * Copyright (C) 1995-2022 Mark Adler
+ * Copyright (C) 1995-2024 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -9,7 +9,7 @@
#define MAXBITS 15
const char inflate_copyright[] =
- " inflate 1.2.13 Copyright 1995-2022 Mark Adler ";
+ " inflate 1.3.1 Copyright 1995-2024 Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
@@ -29,14 +29,9 @@ const char inflate_copyright[] =
table index bits. It will differ if the request is greater than the
longest code or if it is less than the shortest code.
*/
-int ZLIB_INTERNAL inflate_table(type, lens, codes, table, bits, work)
-codetype type;
-unsigned short FAR *lens;
-unsigned codes;
-code FAR * FAR *table;
-unsigned FAR *bits;
-unsigned short FAR *work;
-{
+int ZLIB_INTERNAL inflate_table(codetype type, unsigned short FAR *lens,
+ unsigned codes, code FAR * FAR *table,
+ unsigned FAR *bits, unsigned short FAR *work) {
unsigned len; /* a code's length in bits */
unsigned sym; /* index of code symbols */
unsigned min, max; /* minimum and maximum code lengths */
@@ -62,7 +57,7 @@ unsigned short FAR *work;
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
static const unsigned short lext[31] = { /* Length codes 257..285 extra */
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
- 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 194, 65};
+ 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 203, 77};
static const unsigned short dbase[32] = { /* Distance codes 0..29 base */
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
diff --git a/src/native/external/zlib/inftrees.h b/src/native/external/zlib/inftrees.h
index f53665311c16..396f74b5da79 100644
--- a/src/native/external/zlib/inftrees.h
+++ b/src/native/external/zlib/inftrees.h
@@ -41,8 +41,8 @@ typedef struct {
examples/enough.c found in the zlib distribution. The arguments to that
program are the number of symbols, the initial root table size, and the
maximum bit length of a code. "enough 286 9 15" for literal/length codes
- returns returns 852, and "enough 30 6 15" for distance codes returns 592.
- The initial root table size (9 or 6) is found in the fifth argument of the
+ returns 852, and "enough 30 6 15" for distance codes returns 592. The
+ initial root table size (9 or 6) is found in the fifth argument of the
inflate_table() calls in inflate.c and infback.c. If the root table size is
changed, then these maximum sizes would be need to be recalculated and
updated. */
@@ -57,6 +57,6 @@ typedef enum {
DISTS
} codetype;
-int ZLIB_INTERNAL inflate_table OF((codetype type, unsigned short FAR *lens,
- unsigned codes, code FAR * FAR *table,
- unsigned FAR *bits, unsigned short FAR *work));
+int ZLIB_INTERNAL inflate_table(codetype type, unsigned short FAR *lens,
+ unsigned codes, code FAR * FAR *table,
+ unsigned FAR *bits, unsigned short FAR *work);
diff --git a/src/native/external/zlib/treebuild.xml b/src/native/external/zlib/treebuild.xml
index 0017a45d3c5c..930b00be4a85 100644
--- a/src/native/external/zlib/treebuild.xml
+++ b/src/native/external/zlib/treebuild.xml
@@ -1,6 +1,6 @@
-
-
+
+
zip compression library
diff --git a/src/native/external/zlib/trees.c b/src/native/external/zlib/trees.c
index 8a3eec559e55..979ae4100a02 100644
--- a/src/native/external/zlib/trees.c
+++ b/src/native/external/zlib/trees.c
@@ -1,5 +1,5 @@
/* trees.c -- output deflated data using Huffman coding
- * Copyright (C) 1995-2021 Jean-loup Gailly
+ * Copyright (C) 1995-2024 Jean-loup Gailly
* detect_data_type() function provided freely by Cosmin Truta, 2006
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -122,39 +122,116 @@ struct static_tree_desc_s {
int max_length; /* max bit length for the codes */
};
-local const static_tree_desc static_l_desc =
+#ifdef NO_INIT_GLOBAL_POINTERS
+# define TCONST
+#else
+# define TCONST const
+#endif
+
+local TCONST static_tree_desc static_l_desc =
{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
-local const static_tree_desc static_d_desc =
+local TCONST static_tree_desc static_d_desc =
{static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
-local const static_tree_desc static_bl_desc =
+local TCONST static_tree_desc static_bl_desc =
{(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
/* ===========================================================================
- * Local (static) routines in this file.
+ * Output a short LSB first on the stream.
+ * IN assertion: there is enough room in pendingBuf.
+ */
+#define put_short(s, w) { \
+ put_byte(s, (uch)((w) & 0xff)); \
+ put_byte(s, (uch)((ush)(w) >> 8)); \
+}
+
+/* ===========================================================================
+ * Reverse the first len bits of a code, using straightforward code (a faster
+ * method would use a table)
+ * IN assertion: 1 <= len <= 15
*/
+local unsigned bi_reverse(unsigned code, int len) {
+ register unsigned res = 0;
+ do {
+ res |= code & 1;
+ code >>= 1, res <<= 1;
+ } while (--len > 0);
+ return res >> 1;
+}
-local void tr_static_init OF((void));
-local void init_block OF((deflate_state *s));
-local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
-local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
-local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
-local void build_tree OF((deflate_state *s, tree_desc *desc));
-local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
-local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
-local int build_bl_tree OF((deflate_state *s));
-local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
- int blcodes));
-local void compress_block OF((deflate_state *s, const ct_data *ltree,
- const ct_data *dtree));
-local int detect_data_type OF((deflate_state *s));
-local unsigned bi_reverse OF((unsigned code, int len));
-local void bi_windup OF((deflate_state *s));
-local void bi_flush OF((deflate_state *s));
+/* ===========================================================================
+ * Flush the bit buffer, keeping at most 7 bits in it.
+ */
+local void bi_flush(deflate_state *s) {
+ if (s->bi_valid == 16) {
+ put_short(s, s->bi_buf);
+ s->bi_buf = 0;
+ s->bi_valid = 0;
+ } else if (s->bi_valid >= 8) {
+ put_byte(s, (Byte)s->bi_buf);
+ s->bi_buf >>= 8;
+ s->bi_valid -= 8;
+ }
+}
+
+/* ===========================================================================
+ * Flush the bit buffer and align the output on a byte boundary
+ */
+local void bi_windup(deflate_state *s) {
+ if (s->bi_valid > 8) {
+ put_short(s, s->bi_buf);
+ } else if (s->bi_valid > 0) {
+ put_byte(s, (Byte)s->bi_buf);
+ }
+ s->bi_buf = 0;
+ s->bi_valid = 0;
+#ifdef ZLIB_DEBUG
+ s->bits_sent = (s->bits_sent + 7) & ~7;
+#endif
+}
+
+/* ===========================================================================
+ * Generate the codes for a given tree and bit counts (which need not be
+ * optimal).
+ * IN assertion: the array bl_count contains the bit length statistics for
+ * the given tree and the field len is set for all tree elements.
+ * OUT assertion: the field code is set for all tree elements of non
+ * zero code length.
+ */
+local void gen_codes(ct_data *tree, int max_code, ushf *bl_count) {
+ ush next_code[MAX_BITS+1]; /* next code value for each bit length */
+ unsigned code = 0; /* running code value */
+ int bits; /* bit index */
+ int n; /* code index */
+
+ /* The distribution counts are first used to generate the code values
+ * without bit reversal.
+ */
+ for (bits = 1; bits <= MAX_BITS; bits++) {
+ code = (code + bl_count[bits - 1]) << 1;
+ next_code[bits] = (ush)code;
+ }
+ /* Check that the bit counts in bl_count are consistent. The last code
+ * must be all ones.
+ */
+ Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
+ "inconsistent bit counts");
+ Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
+
+ for (n = 0; n <= max_code; n++) {
+ int len = tree[n].Len;
+ if (len == 0) continue;
+ /* Now reverse the bits */
+ tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
+
+ Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
+ n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
+ }
+}
#ifdef GEN_TREES_H
-local void gen_trees_header OF((void));
+local void gen_trees_header(void);
#endif
#ifndef ZLIB_DEBUG
@@ -167,27 +244,12 @@ local void gen_trees_header OF((void));
send_bits(s, tree[c].Code, tree[c].Len); }
#endif
-/* ===========================================================================
- * Output a short LSB first on the stream.
- * IN assertion: there is enough room in pendingBuf.
- */
-#define put_short(s, w) { \
- put_byte(s, (uch)((w) & 0xff)); \
- put_byte(s, (uch)((ush)(w) >> 8)); \
-}
-
/* ===========================================================================
* Send a value on a given number of bits.
* IN assertion: length <= 16 and value fits in length bits.
*/
#ifdef ZLIB_DEBUG
-local void send_bits OF((deflate_state *s, int value, int length));
-
-local void send_bits(s, value, length)
- deflate_state *s;
- int value; /* value to send */
- int length; /* number of bits */
-{
+local void send_bits(deflate_state *s, int value, int length) {
Tracevv((stderr," l %2d v %4x ", length, value));
Assert(length > 0 && length <= 15, "invalid length");
s->bits_sent += (ulg)length;
@@ -229,8 +291,7 @@ local void send_bits(s, value, length)
/* ===========================================================================
* Initialize the various 'constant' tables.
*/
-local void tr_static_init()
-{
+local void tr_static_init(void) {
#if defined(GEN_TREES_H) || !defined(STDC)
static int static_init_done = 0;
int n; /* iterates over tree elements */
@@ -323,8 +384,7 @@ local void tr_static_init()
((i) == (last)? "\n};\n\n" : \
((i) % (width) == (width) - 1 ? ",\n" : ", "))
-void gen_trees_header()
-{
+void gen_trees_header(void) {
FILE *header = fopen("trees.h", "w");
int i;
@@ -373,12 +433,26 @@ void gen_trees_header()
}
#endif /* GEN_TREES_H */
+/* ===========================================================================
+ * Initialize a new block.
+ */
+local void init_block(deflate_state *s) {
+ int n; /* iterates over tree elements */
+
+ /* Initialize the trees. */
+ for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
+ for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
+ for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
+
+ s->dyn_ltree[END_BLOCK].Freq = 1;
+ s->opt_len = s->static_len = 0L;
+ s->sym_next = s->matches = 0;
+}
+
/* ===========================================================================
* Initialize the tree data structures for a new zlib stream.
*/
-void ZLIB_INTERNAL _tr_init(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_init(deflate_state *s) {
tr_static_init();
s->l_desc.dyn_tree = s->dyn_ltree;
@@ -401,24 +475,6 @@ void ZLIB_INTERNAL _tr_init(s)
init_block(s);
}
-/* ===========================================================================
- * Initialize a new block.
- */
-local void init_block(s)
- deflate_state *s;
-{
- int n; /* iterates over tree elements */
-
- /* Initialize the trees. */
- for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
- for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
- for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
-
- s->dyn_ltree[END_BLOCK].Freq = 1;
- s->opt_len = s->static_len = 0L;
- s->sym_next = s->matches = 0;
-}
-
#define SMALLEST 1
/* Index within the heap array of least frequent node in the Huffman tree */
@@ -448,11 +504,7 @@ local void init_block(s)
* when the heap property is re-established (each father smaller than its
* two sons).
*/
-local void pqdownheap(s, tree, k)
- deflate_state *s;
- ct_data *tree; /* the tree to restore */
- int k; /* node to move down */
-{
+local void pqdownheap(deflate_state *s, ct_data *tree, int k) {
int v = s->heap[k];
int j = k << 1; /* left son of k */
while (j <= s->heap_len) {
@@ -483,10 +535,7 @@ local void pqdownheap(s, tree, k)
* The length opt_len is updated; static_len is also updated if stree is
* not null.
*/
-local void gen_bitlen(s, desc)
- deflate_state *s;
- tree_desc *desc; /* the tree descriptor */
-{
+local void gen_bitlen(deflate_state *s, tree_desc *desc) {
ct_data *tree = desc->dyn_tree;
int max_code = desc->max_code;
const ct_data *stree = desc->stat_desc->static_tree;
@@ -561,48 +610,9 @@ local void gen_bitlen(s, desc)
}
}
-/* ===========================================================================
- * Generate the codes for a given tree and bit counts (which need not be
- * optimal).
- * IN assertion: the array bl_count contains the bit length statistics for
- * the given tree and the field len is set for all tree elements.
- * OUT assertion: the field code is set for all tree elements of non
- * zero code length.
- */
-local void gen_codes(tree, max_code, bl_count)
- ct_data *tree; /* the tree to decorate */
- int max_code; /* largest code with non zero frequency */
- ushf *bl_count; /* number of codes at each bit length */
-{
- ush next_code[MAX_BITS+1]; /* next code value for each bit length */
- unsigned code = 0; /* running code value */
- int bits; /* bit index */
- int n; /* code index */
-
- /* The distribution counts are first used to generate the code values
- * without bit reversal.
- */
- for (bits = 1; bits <= MAX_BITS; bits++) {
- code = (code + bl_count[bits - 1]) << 1;
- next_code[bits] = (ush)code;
- }
- /* Check that the bit counts in bl_count are consistent. The last code
- * must be all ones.
- */
- Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
- "inconsistent bit counts");
- Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
-
- for (n = 0; n <= max_code; n++) {
- int len = tree[n].Len;
- if (len == 0) continue;
- /* Now reverse the bits */
- tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
-
- Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
- n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
- }
-}
+#ifdef DUMP_BL_TREE
+# include
+#endif
/* ===========================================================================
* Construct one Huffman tree and assigns the code bit strings and lengths.
@@ -612,10 +622,7 @@ local void gen_codes(tree, max_code, bl_count)
* and corresponding code. The length opt_len is updated; static_len is
* also updated if stree is not null. The field max_code is set.
*/
-local void build_tree(s, desc)
- deflate_state *s;
- tree_desc *desc; /* the tree descriptor */
-{
+local void build_tree(deflate_state *s, tree_desc *desc) {
ct_data *tree = desc->dyn_tree;
const ct_data *stree = desc->stat_desc->static_tree;
int elems = desc->stat_desc->elems;
@@ -700,11 +707,7 @@ local void build_tree(s, desc)
* Scan a literal or distance tree to determine the frequencies of the codes
* in the bit length tree.
*/
-local void scan_tree(s, tree, max_code)
- deflate_state *s;
- ct_data *tree; /* the tree to be scanned */
- int max_code; /* and its largest code of non zero frequency */
-{
+local void scan_tree(deflate_state *s, ct_data *tree, int max_code) {
int n; /* iterates over all tree elements */
int prevlen = -1; /* last emitted length */
int curlen; /* length of current code */
@@ -745,11 +748,7 @@ local void scan_tree(s, tree, max_code)
* Send a literal or distance tree in compressed form, using the codes in
* bl_tree.
*/
-local void send_tree(s, tree, max_code)
- deflate_state *s;
- ct_data *tree; /* the tree to be scanned */
- int max_code; /* and its largest code of non zero frequency */
-{
+local void send_tree(deflate_state *s, ct_data *tree, int max_code) {
int n; /* iterates over all tree elements */
int prevlen = -1; /* last emitted length */
int curlen; /* length of current code */
@@ -796,9 +795,7 @@ local void send_tree(s, tree, max_code)
* Construct the Huffman tree for the bit lengths and return the index in
* bl_order of the last bit length code to send.
*/
-local int build_bl_tree(s)
- deflate_state *s;
-{
+local int build_bl_tree(deflate_state *s) {
int max_blindex; /* index of last bit length code of non zero freq */
/* Determine the bit length frequencies for literal and distance trees */
@@ -831,10 +828,8 @@ local int build_bl_tree(s)
* lengths of the bit length codes, the literal tree and the distance tree.
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
*/
-local void send_all_trees(s, lcodes, dcodes, blcodes)
- deflate_state *s;
- int lcodes, dcodes, blcodes; /* number of codes for each tree */
-{
+local void send_all_trees(deflate_state *s, int lcodes, int dcodes,
+ int blcodes) {
int rank; /* index in bl_order */
Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
@@ -860,12 +855,8 @@ local void send_all_trees(s, lcodes, dcodes, blcodes)
/* ===========================================================================
* Send a stored block
*/
-void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
- deflate_state *s;
- charf *buf; /* input block */
- ulg stored_len; /* length of input block */
- int last; /* one if this is the last block for a file */
-{
+void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last) {
send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */
bi_windup(s); /* align on byte boundary */
put_short(s, (ush)stored_len);
@@ -884,9 +875,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
/* ===========================================================================
* Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
*/
-void ZLIB_INTERNAL _tr_flush_bits(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s) {
bi_flush(s);
}
@@ -894,9 +883,7 @@ void ZLIB_INTERNAL _tr_flush_bits(s)
* Send one empty static block to give enough lookahead for inflate.
* This takes 10 bits, of which 7 may remain in the bit buffer.
*/
-void ZLIB_INTERNAL _tr_align(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_align(deflate_state *s) {
send_bits(s, STATIC_TREES<<1, 3);
send_code(s, END_BLOCK, static_ltree);
#ifdef ZLIB_DEBUG
@@ -905,16 +892,108 @@ void ZLIB_INTERNAL _tr_align(s)
bi_flush(s);
}
+/* ===========================================================================
+ * Send the block data compressed using the given Huffman trees
+ */
+local void compress_block(deflate_state *s, const ct_data *ltree,
+ const ct_data *dtree) {
+ unsigned dist; /* distance of matched string */
+ int lc; /* match length or unmatched char (if dist == 0) */
+ unsigned sx = 0; /* running index in symbol buffers */
+ unsigned code; /* the code to send */
+ int extra; /* number of extra bits to send */
+
+ if (s->sym_next != 0) do {
+#ifdef LIT_MEM
+ dist = s->d_buf[sx];
+ lc = s->l_buf[sx++];
+#else
+ dist = s->sym_buf[sx++] & 0xff;
+ dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
+ lc = s->sym_buf[sx++];
+#endif
+ if (dist == 0) {
+ send_code(s, lc, ltree); /* send a literal byte */
+ Tracecv(isgraph(lc), (stderr," '%c' ", lc));
+ } else {
+ /* Here, lc is the match length - MIN_MATCH */
+ code = _length_code[lc];
+ send_code(s, code + LITERALS + 1, ltree); /* send length code */
+ extra = extra_lbits[code];
+ if (extra != 0) {
+ lc -= base_length[code];
+ send_bits(s, lc, extra); /* send the extra length bits */
+ }
+ dist--; /* dist is now the match distance - 1 */
+ code = d_code(dist);
+ Assert (code < D_CODES, "bad d_code");
+
+ send_code(s, code, dtree); /* send the distance code */
+ extra = extra_dbits[code];
+ if (extra != 0) {
+ dist -= (unsigned)base_dist[code];
+ send_bits(s, dist, extra); /* send the extra distance bits */
+ }
+ } /* literal or match pair ? */
+
+ /* Check for no overlay of pending_buf on needed symbols */
+#ifdef LIT_MEM
+ Assert(s->pending < 2 * (s->lit_bufsize + sx), "pendingBuf overflow");
+#else
+ Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
+#endif
+
+ } while (sx < s->sym_next);
+
+ send_code(s, END_BLOCK, ltree);
+}
+
+/* ===========================================================================
+ * Check if the data type is TEXT or BINARY, using the following algorithm:
+ * - TEXT if the two conditions below are satisfied:
+ * a) There are no non-portable control characters belonging to the
+ * "block list" (0..6, 14..25, 28..31).
+ * b) There is at least one printable character belonging to the
+ * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
+ * - BINARY otherwise.
+ * - The following partially-portable control characters form a
+ * "gray list" that is ignored in this detection algorithm:
+ * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
+ * IN assertion: the fields Freq of dyn_ltree are set.
+ */
+local int detect_data_type(deflate_state *s) {
+ /* block_mask is the bit mask of block-listed bytes
+ * set bits 0..6, 14..25, and 28..31
+ * 0xf3ffc07f = binary 11110011111111111100000001111111
+ */
+ unsigned long block_mask = 0xf3ffc07fUL;
+ int n;
+
+ /* Check for non-textual ("block-listed") bytes. */
+ for (n = 0; n <= 31; n++, block_mask >>= 1)
+ if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0))
+ return Z_BINARY;
+
+ /* Check for textual ("allow-listed") bytes. */
+ if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
+ || s->dyn_ltree[13].Freq != 0)
+ return Z_TEXT;
+ for (n = 32; n < LITERALS; n++)
+ if (s->dyn_ltree[n].Freq != 0)
+ return Z_TEXT;
+
+ /* There are no "block-listed" or "allow-listed" bytes:
+ * this stream either is empty or has tolerated ("gray-listed") bytes only.
+ */
+ return Z_BINARY;
+}
+
/* ===========================================================================
* Determine the best encoding for the current block: dynamic trees, static
* trees or store, and write out the encoded block.
*/
-void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
- deflate_state *s;
- charf *buf; /* input block, or NULL if too old */
- ulg stored_len; /* length of input block */
- int last; /* one if this is the last block for a file */
-{
+void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last) {
ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
int max_blindex = 0; /* index of last bit length code of non zero freq */
@@ -1011,14 +1090,15 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
* Save the match info and tally the frequency counts. Return true if
* the current block must be flushed.
*/
-int ZLIB_INTERNAL _tr_tally(s, dist, lc)
- deflate_state *s;
- unsigned dist; /* distance of matched string */
- unsigned lc; /* match length - MIN_MATCH or unmatched char (dist==0) */
-{
+int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc) {
+#ifdef LIT_MEM
+ s->d_buf[s->sym_next] = (ush)dist;
+ s->l_buf[s->sym_next++] = (uch)lc;
+#else
s->sym_buf[s->sym_next++] = (uch)dist;
s->sym_buf[s->sym_next++] = (uch)(dist >> 8);
s->sym_buf[s->sym_next++] = (uch)lc;
+#endif
if (dist == 0) {
/* lc is the unmatched char */
s->dyn_ltree[lc].Freq++;
@@ -1035,147 +1115,3 @@ int ZLIB_INTERNAL _tr_tally(s, dist, lc)
}
return (s->sym_next == s->sym_end);
}
-
-/* ===========================================================================
- * Send the block data compressed using the given Huffman trees
- */
-local void compress_block(s, ltree, dtree)
- deflate_state *s;
- const ct_data *ltree; /* literal tree */
- const ct_data *dtree; /* distance tree */
-{
- unsigned dist; /* distance of matched string */
- int lc; /* match length or unmatched char (if dist == 0) */
- unsigned sx = 0; /* running index in sym_buf */
- unsigned code; /* the code to send */
- int extra; /* number of extra bits to send */
-
- if (s->sym_next != 0) do {
- dist = s->sym_buf[sx++] & 0xff;
- dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
- lc = s->sym_buf[sx++];
- if (dist == 0) {
- send_code(s, lc, ltree); /* send a literal byte */
- Tracecv(isgraph(lc), (stderr," '%c' ", lc));
- } else {
- /* Here, lc is the match length - MIN_MATCH */
- code = _length_code[lc];
- send_code(s, code + LITERALS + 1, ltree); /* send length code */
- extra = extra_lbits[code];
- if (extra != 0) {
- lc -= base_length[code];
- send_bits(s, lc, extra); /* send the extra length bits */
- }
- dist--; /* dist is now the match distance - 1 */
- code = d_code(dist);
- Assert (code < D_CODES, "bad d_code");
-
- send_code(s, code, dtree); /* send the distance code */
- extra = extra_dbits[code];
- if (extra != 0) {
- dist -= (unsigned)base_dist[code];
- send_bits(s, dist, extra); /* send the extra distance bits */
- }
- } /* literal or match pair ? */
-
- /* Check that the overlay between pending_buf and sym_buf is ok: */
- Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
-
- } while (sx < s->sym_next);
-
- send_code(s, END_BLOCK, ltree);
-}
-
-/* ===========================================================================
- * Check if the data type is TEXT or BINARY, using the following algorithm:
- * - TEXT if the two conditions below are satisfied:
- * a) There are no non-portable control characters belonging to the
- * "block list" (0..6, 14..25, 28..31).
- * b) There is at least one printable character belonging to the
- * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
- * - BINARY otherwise.
- * - The following partially-portable control characters form a
- * "gray list" that is ignored in this detection algorithm:
- * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
- * IN assertion: the fields Freq of dyn_ltree are set.
- */
-local int detect_data_type(s)
- deflate_state *s;
-{
- /* block_mask is the bit mask of block-listed bytes
- * set bits 0..6, 14..25, and 28..31
- * 0xf3ffc07f = binary 11110011111111111100000001111111
- */
- unsigned long block_mask = 0xf3ffc07fUL;
- int n;
-
- /* Check for non-textual ("block-listed") bytes. */
- for (n = 0; n <= 31; n++, block_mask >>= 1)
- if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0))
- return Z_BINARY;
-
- /* Check for textual ("allow-listed") bytes. */
- if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
- || s->dyn_ltree[13].Freq != 0)
- return Z_TEXT;
- for (n = 32; n < LITERALS; n++)
- if (s->dyn_ltree[n].Freq != 0)
- return Z_TEXT;
-
- /* There are no "block-listed" or "allow-listed" bytes:
- * this stream either is empty or has tolerated ("gray-listed") bytes only.
- */
- return Z_BINARY;
-}
-
-/* ===========================================================================
- * Reverse the first len bits of a code, using straightforward code (a faster
- * method would use a table)
- * IN assertion: 1 <= len <= 15
- */
-local unsigned bi_reverse(code, len)
- unsigned code; /* the value to invert */
- int len; /* its bit length */
-{
- register unsigned res = 0;
- do {
- res |= code & 1;
- code >>= 1, res <<= 1;
- } while (--len > 0);
- return res >> 1;
-}
-
-/* ===========================================================================
- * Flush the bit buffer, keeping at most 7 bits in it.
- */
-local void bi_flush(s)
- deflate_state *s;
-{
- if (s->bi_valid == 16) {
- put_short(s, s->bi_buf);
- s->bi_buf = 0;
- s->bi_valid = 0;
- } else if (s->bi_valid >= 8) {
- put_byte(s, (Byte)s->bi_buf);
- s->bi_buf >>= 8;
- s->bi_valid -= 8;
- }
-}
-
-/* ===========================================================================
- * Flush the bit buffer and align the output on a byte boundary
- */
-local void bi_windup(s)
- deflate_state *s;
-{
- if (s->bi_valid > 8) {
- put_short(s, s->bi_buf);
- } else if (s->bi_valid > 0) {
- put_byte(s, (Byte)s->bi_buf);
- }
- s->bi_buf = 0;
- s->bi_valid = 0;
-#ifdef ZLIB_DEBUG
- s->bits_sent = (s->bits_sent + 7) & ~7;
-#endif
-}
diff --git a/src/native/external/zlib/uncompr.c b/src/native/external/zlib/uncompr.c
index f9532f46c1a6..5e256663b451 100644
--- a/src/native/external/zlib/uncompr.c
+++ b/src/native/external/zlib/uncompr.c
@@ -24,12 +24,8 @@
Z_DATA_ERROR if the input data was corrupted, including if the input data is
an incomplete zlib stream.
*/
-int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong *sourceLen;
-{
+int ZEXPORT uncompress2(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong *sourceLen) {
z_stream stream;
int err;
const uInt max = (uInt)-1;
@@ -83,11 +79,7 @@ int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
err;
}
-int ZEXPORT uncompress(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
-{
+int ZEXPORT uncompress(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen) {
return uncompress2(dest, destLen, source, &sourceLen);
}
diff --git a/src/native/external/zlib/zconf.h b/src/native/external/zlib/zconf.h
index bf977d3e70ad..62adc8d8431f 100644
--- a/src/native/external/zlib/zconf.h
+++ b/src/native/external/zlib/zconf.h
@@ -1,5 +1,5 @@
/* zconf.h -- configuration of the zlib compression library
- * Copyright (C) 1995-2016 Jean-loup Gailly, Mark Adler
+ * Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -241,7 +241,11 @@
#endif
#ifdef Z_SOLO
- typedef unsigned long z_size_t;
+# ifdef _WIN64
+ typedef unsigned long long z_size_t;
+# else
+ typedef unsigned long z_size_t;
+# endif
#else
# define z_longlong long long
# if defined(NO_SIZE_T)
@@ -296,14 +300,6 @@
# endif
#endif
-#ifndef Z_ARG /* function prototypes for stdarg */
-# if defined(STDC) || defined(Z_HAVE_STDARG_H)
-# define Z_ARG(args) args
-# else
-# define Z_ARG(args) ()
-# endif
-#endif
-
/* The following definitions for FAR are needed only for MSDOS mixed
* model programming (small or medium model with some far allocations).
* This was tested only with MSC; for other MSDOS compilers you may have
@@ -520,7 +516,7 @@ typedef uLong FAR uLongf;
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
-# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
+# if defined(_WIN32) && !defined(__GNUC__)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
diff --git a/src/native/external/zlib/zconf.h.cmakein b/src/native/external/zlib/zconf.h.cmakein
index 247ba2461dd0..0abe3bc9d8fa 100644
--- a/src/native/external/zlib/zconf.h.cmakein
+++ b/src/native/external/zlib/zconf.h.cmakein
@@ -1,5 +1,5 @@
/* zconf.h -- configuration of the zlib compression library
- * Copyright (C) 1995-2016 Jean-loup Gailly, Mark Adler
+ * Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -243,7 +243,11 @@
#endif
#ifdef Z_SOLO
- typedef unsigned long z_size_t;
+# ifdef _WIN64
+ typedef unsigned long long z_size_t;
+# else
+ typedef unsigned long z_size_t;
+# endif
#else
# define z_longlong long long
# if defined(NO_SIZE_T)
@@ -298,14 +302,6 @@
# endif
#endif
-#ifndef Z_ARG /* function prototypes for stdarg */
-# if defined(STDC) || defined(Z_HAVE_STDARG_H)
-# define Z_ARG(args) args
-# else
-# define Z_ARG(args) ()
-# endif
-#endif
-
/* The following definitions for FAR are needed only for MSDOS mixed
* model programming (small or medium model with some far allocations).
* This was tested only with MSC; for other MSDOS compilers you may have
@@ -522,7 +518,7 @@ typedef uLong FAR uLongf;
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
-# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
+# if defined(_WIN32) && !defined(__GNUC__)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
diff --git a/src/native/external/zlib/zconf.h.in b/src/native/external/zlib/zconf.h.in
index bf977d3e70ad..62adc8d8431f 100644
--- a/src/native/external/zlib/zconf.h.in
+++ b/src/native/external/zlib/zconf.h.in
@@ -1,5 +1,5 @@
/* zconf.h -- configuration of the zlib compression library
- * Copyright (C) 1995-2016 Jean-loup Gailly, Mark Adler
+ * Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -241,7 +241,11 @@
#endif
#ifdef Z_SOLO
- typedef unsigned long z_size_t;
+# ifdef _WIN64
+ typedef unsigned long long z_size_t;
+# else
+ typedef unsigned long z_size_t;
+# endif
#else
# define z_longlong long long
# if defined(NO_SIZE_T)
@@ -296,14 +300,6 @@
# endif
#endif
-#ifndef Z_ARG /* function prototypes for stdarg */
-# if defined(STDC) || defined(Z_HAVE_STDARG_H)
-# define Z_ARG(args) args
-# else
-# define Z_ARG(args) ()
-# endif
-#endif
-
/* The following definitions for FAR are needed only for MSDOS mixed
* model programming (small or medium model with some far allocations).
* This was tested only with MSC; for other MSDOS compilers you may have
@@ -520,7 +516,7 @@ typedef uLong FAR uLongf;
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
-# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
+# if defined(_WIN32) && !defined(__GNUC__)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
diff --git a/src/native/external/zlib/zlib.3 b/src/native/external/zlib/zlib.3
index 6f6e91404dff..c716020ea9c4 100644
--- a/src/native/external/zlib/zlib.3
+++ b/src/native/external/zlib/zlib.3
@@ -1,4 +1,4 @@
-.TH ZLIB 3 "13 Oct 2022"
+.TH ZLIB 3 "22 Jan 2024"
.SH NAME
zlib \- compression/decompression library
.SH SYNOPSIS
@@ -105,9 +105,9 @@ before asking for help.
Send questions and/or comments to zlib@gzip.org,
or (for the Windows DLL version) to Gilles Vollant (info@winimage.com).
.SH AUTHORS AND LICENSE
-Version 1.2.13
+Version 1.3.1
.LP
-Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
+Copyright (C) 1995-2024 Jean-loup Gailly and Mark Adler
.LP
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
diff --git a/src/native/external/zlib/zlib.h b/src/native/external/zlib/zlib.h
index 953cb5012dc2..8d4b932eaf6a 100644
--- a/src/native/external/zlib/zlib.h
+++ b/src/native/external/zlib/zlib.h
@@ -1,7 +1,7 @@
/* zlib.h -- interface of the 'zlib' general purpose compression library
- version 1.2.13, October 13th, 2022
+ version 1.3.1, January 22nd, 2024
- Copyright (C) 1995-2022 Jean-loup Gailly and Mark Adler
+ Copyright (C) 1995-2024 Jean-loup Gailly and Mark Adler
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
@@ -37,11 +37,11 @@
extern "C" {
#endif
-#define ZLIB_VERSION "1.2.13"
-#define ZLIB_VERNUM 0x12d0
+#define ZLIB_VERSION "1.3.1"
+#define ZLIB_VERNUM 0x1310
#define ZLIB_VER_MAJOR 1
-#define ZLIB_VER_MINOR 2
-#define ZLIB_VER_REVISION 13
+#define ZLIB_VER_MINOR 3
+#define ZLIB_VER_REVISION 1
#define ZLIB_VER_SUBREVISION 0
/*
@@ -78,8 +78,8 @@ extern "C" {
even in the case of corrupted input.
*/
-typedef voidpf (*alloc_func) OF((voidpf opaque, uInt items, uInt size));
-typedef void (*free_func) OF((voidpf opaque, voidpf address));
+typedef voidpf (*alloc_func)(voidpf opaque, uInt items, uInt size);
+typedef void (*free_func)(voidpf opaque, voidpf address);
struct internal_state;
@@ -217,7 +217,7 @@ typedef gz_header FAR *gz_headerp;
/* basic functions */
-ZEXTERN const char * ZEXPORT zlibVersion OF((void));
+ZEXTERN const char * ZEXPORT zlibVersion(void);
/* The application can compare zlibVersion and ZLIB_VERSION for consistency.
If the first character differs, the library code actually used is not
compatible with the zlib.h header file used by the application. This check
@@ -225,12 +225,12 @@ ZEXTERN const char * ZEXPORT zlibVersion OF((void));
*/
/*
-ZEXTERN int ZEXPORT deflateInit OF((z_streamp strm, int level));
+ZEXTERN int ZEXPORT deflateInit(z_streamp strm, int level);
Initializes the internal stream state for compression. The fields
zalloc, zfree and opaque must be initialized before by the caller. If
zalloc and zfree are set to Z_NULL, deflateInit updates them to use default
- allocation functions.
+ allocation functions. total_in, total_out, adler, and msg are initialized.
The compression level must be Z_DEFAULT_COMPRESSION, or between 0 and 9:
1 gives best speed, 9 gives best compression, 0 gives no compression at all
@@ -247,7 +247,7 @@ ZEXTERN int ZEXPORT deflateInit OF((z_streamp strm, int level));
*/
-ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
+ZEXTERN int ZEXPORT deflate(z_streamp strm, int flush);
/*
deflate compresses as much data as possible, and stops when the input
buffer becomes empty or the output buffer becomes full. It may introduce
@@ -320,8 +320,8 @@ ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
with the same value of the flush parameter and more output space (updated
avail_out), until the flush is complete (deflate returns with non-zero
avail_out). In the case of a Z_FULL_FLUSH or Z_SYNC_FLUSH, make sure that
- avail_out is greater than six to avoid repeated flush markers due to
- avail_out == 0 on return.
+ avail_out is greater than six when the flush marker begins, in order to avoid
+ repeated flush markers upon calling deflate() again when avail_out == 0.
If the parameter flush is set to Z_FINISH, pending input is processed,
pending output is flushed and deflate returns with Z_STREAM_END if there was
@@ -360,7 +360,7 @@ ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
*/
-ZEXTERN int ZEXPORT deflateEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT deflateEnd(z_streamp strm);
/*
All dynamically allocated data structures for this stream are freed.
This function discards any unprocessed input and does not flush any pending
@@ -375,7 +375,7 @@ ZEXTERN int ZEXPORT deflateEnd OF((z_streamp strm));
/*
-ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateInit(z_streamp strm);
Initializes the internal stream state for decompression. The fields
next_in, avail_in, zalloc, zfree and opaque must be initialized before by
@@ -383,7 +383,8 @@ ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
read or consumed. The allocation of a sliding window will be deferred to
the first call of inflate (if the decompression does not complete on the
first call). If zalloc and zfree are set to Z_NULL, inflateInit updates
- them to use default allocation functions.
+ them to use default allocation functions. total_in, total_out, adler, and
+ msg are initialized.
inflateInit returns Z_OK if success, Z_MEM_ERROR if there was not enough
memory, Z_VERSION_ERROR if the zlib library version is incompatible with the
@@ -397,7 +398,7 @@ ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
*/
-ZEXTERN int ZEXPORT inflate OF((z_streamp strm, int flush));
+ZEXTERN int ZEXPORT inflate(z_streamp strm, int flush);
/*
inflate decompresses as much data as possible, and stops when the input
buffer becomes empty or the output buffer becomes full. It may introduce
@@ -517,7 +518,7 @@ ZEXTERN int ZEXPORT inflate OF((z_streamp strm, int flush));
*/
-ZEXTERN int ZEXPORT inflateEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateEnd(z_streamp strm);
/*
All dynamically allocated data structures for this stream are freed.
This function discards any unprocessed input and does not flush any pending
@@ -535,12 +536,12 @@ ZEXTERN int ZEXPORT inflateEnd OF((z_streamp strm));
*/
/*
-ZEXTERN int ZEXPORT deflateInit2 OF((z_streamp strm,
- int level,
- int method,
- int windowBits,
- int memLevel,
- int strategy));
+ZEXTERN int ZEXPORT deflateInit2(z_streamp strm,
+ int level,
+ int method,
+ int windowBits,
+ int memLevel,
+ int strategy);
This is another version of deflateInit with more compression options. The
fields zalloc, zfree and opaque must be initialized before by the caller.
@@ -607,9 +608,9 @@ ZEXTERN int ZEXPORT deflateInit2 OF((z_streamp strm,
compression: this will be done by deflate().
*/
-ZEXTERN int ZEXPORT deflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
+ZEXTERN int ZEXPORT deflateSetDictionary(z_streamp strm,
+ const Bytef *dictionary,
+ uInt dictLength);
/*
Initializes the compression dictionary from the given byte sequence
without producing any compressed output. When using the zlib format, this
@@ -651,9 +652,9 @@ ZEXTERN int ZEXPORT deflateSetDictionary OF((z_streamp strm,
not perform any compression: this will be done by deflate().
*/
-ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
+ZEXTERN int ZEXPORT deflateGetDictionary(z_streamp strm,
+ Bytef *dictionary,
+ uInt *dictLength);
/*
Returns the sliding dictionary being maintained by deflate. dictLength is
set to the number of bytes in the dictionary, and that many bytes are copied
@@ -673,8 +674,8 @@ ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
stream state is inconsistent.
*/
-ZEXTERN int ZEXPORT deflateCopy OF((z_streamp dest,
- z_streamp source));
+ZEXTERN int ZEXPORT deflateCopy(z_streamp dest,
+ z_streamp source);
/*
Sets the destination stream as a complete copy of the source stream.
@@ -691,20 +692,20 @@ ZEXTERN int ZEXPORT deflateCopy OF((z_streamp dest,
destination.
*/
-ZEXTERN int ZEXPORT deflateReset OF((z_streamp strm));
+ZEXTERN int ZEXPORT deflateReset(z_streamp strm);
/*
This function is equivalent to deflateEnd followed by deflateInit, but
does not free and reallocate the internal compression state. The stream
will leave the compression level and any other attributes that may have been
- set unchanged.
+ set unchanged. total_in, total_out, adler, and msg are initialized.
deflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source
stream state was inconsistent (such as zalloc or state being Z_NULL).
*/
-ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
- int level,
- int strategy));
+ZEXTERN int ZEXPORT deflateParams(z_streamp strm,
+ int level,
+ int strategy);
/*
Dynamically update the compression level and compression strategy. The
interpretation of level and strategy is as in deflateInit2(). This can be
@@ -729,7 +730,7 @@ ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
Then no more input data should be provided before the deflateParams() call.
If this is done, the old level and strategy will be applied to the data
compressed before deflateParams(), and the new level and strategy will be
- applied to the the data compressed after deflateParams().
+ applied to the data compressed after deflateParams().
deflateParams returns Z_OK on success, Z_STREAM_ERROR if the source stream
state was inconsistent or if a parameter was invalid, or Z_BUF_ERROR if
@@ -740,11 +741,11 @@ ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
retried with more output space.
*/
-ZEXTERN int ZEXPORT deflateTune OF((z_streamp strm,
- int good_length,
- int max_lazy,
- int nice_length,
- int max_chain));
+ZEXTERN int ZEXPORT deflateTune(z_streamp strm,
+ int good_length,
+ int max_lazy,
+ int nice_length,
+ int max_chain);
/*
Fine tune deflate's internal compression parameters. This should only be
used by someone who understands the algorithm used by zlib's deflate for
@@ -757,8 +758,8 @@ ZEXTERN int ZEXPORT deflateTune OF((z_streamp strm,
returns Z_OK on success, or Z_STREAM_ERROR for an invalid deflate stream.
*/
-ZEXTERN uLong ZEXPORT deflateBound OF((z_streamp strm,
- uLong sourceLen));
+ZEXTERN uLong ZEXPORT deflateBound(z_streamp strm,
+ uLong sourceLen);
/*
deflateBound() returns an upper bound on the compressed size after
deflation of sourceLen bytes. It must be called after deflateInit() or
@@ -772,9 +773,9 @@ ZEXTERN uLong ZEXPORT deflateBound OF((z_streamp strm,
than Z_FINISH or Z_NO_FLUSH are used.
*/
-ZEXTERN int ZEXPORT deflatePending OF((z_streamp strm,
- unsigned *pending,
- int *bits));
+ZEXTERN int ZEXPORT deflatePending(z_streamp strm,
+ unsigned *pending,
+ int *bits);
/*
deflatePending() returns the number of bytes and bits of output that have
been generated, but not yet provided in the available output. The bytes not
@@ -787,9 +788,9 @@ ZEXTERN int ZEXPORT deflatePending OF((z_streamp strm,
stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT deflatePrime OF((z_streamp strm,
- int bits,
- int value));
+ZEXTERN int ZEXPORT deflatePrime(z_streamp strm,
+ int bits,
+ int value);
/*
deflatePrime() inserts bits in the deflate output stream. The intent
is that this function is used to start off the deflate output with the bits
@@ -804,8 +805,8 @@ ZEXTERN int ZEXPORT deflatePrime OF((z_streamp strm,
source stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT deflateSetHeader OF((z_streamp strm,
- gz_headerp head));
+ZEXTERN int ZEXPORT deflateSetHeader(z_streamp strm,
+ gz_headerp head);
/*
deflateSetHeader() provides gzip header information for when a gzip
stream is requested by deflateInit2(). deflateSetHeader() may be called
@@ -821,16 +822,17 @@ ZEXTERN int ZEXPORT deflateSetHeader OF((z_streamp strm,
gzip file" and give up.
If deflateSetHeader is not used, the default gzip header has text false,
- the time set to zero, and os set to 255, with no extra, name, or comment
- fields. The gzip header is returned to the default state by deflateReset().
+ the time set to zero, and os set to the current operating system, with no
+ extra, name, or comment fields. The gzip header is returned to the default
+ state by deflateReset().
deflateSetHeader returns Z_OK if success, or Z_STREAM_ERROR if the source
stream state was inconsistent.
*/
/*
-ZEXTERN int ZEXPORT inflateInit2 OF((z_streamp strm,
- int windowBits));
+ZEXTERN int ZEXPORT inflateInit2(z_streamp strm,
+ int windowBits);
This is another version of inflateInit with an extra parameter. The
fields next_in, avail_in, zalloc, zfree and opaque must be initialized
@@ -883,9 +885,9 @@ ZEXTERN int ZEXPORT inflateInit2 OF((z_streamp strm,
deferred until inflate() is called.
*/
-ZEXTERN int ZEXPORT inflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
+ZEXTERN int ZEXPORT inflateSetDictionary(z_streamp strm,
+ const Bytef *dictionary,
+ uInt dictLength);
/*
Initializes the decompression dictionary from the given uncompressed byte
sequence. This function must be called immediately after a call of inflate,
@@ -906,9 +908,9 @@ ZEXTERN int ZEXPORT inflateSetDictionary OF((z_streamp strm,
inflate().
*/
-ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
+ZEXTERN int ZEXPORT inflateGetDictionary(z_streamp strm,
+ Bytef *dictionary,
+ uInt *dictLength);
/*
Returns the sliding dictionary being maintained by inflate. dictLength is
set to the number of bytes in the dictionary, and that many bytes are copied
@@ -921,7 +923,7 @@ ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
stream state is inconsistent.
*/
-ZEXTERN int ZEXPORT inflateSync OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateSync(z_streamp strm);
/*
Skips invalid compressed data until a possible full flush point (see above
for the description of deflate with Z_FULL_FLUSH) can be found, or until all
@@ -934,14 +936,14 @@ ZEXTERN int ZEXPORT inflateSync OF((z_streamp strm));
inflateSync returns Z_OK if a possible full flush point has been found,
Z_BUF_ERROR if no more input was provided, Z_DATA_ERROR if no flush point
has been found, or Z_STREAM_ERROR if the stream structure was inconsistent.
- In the success case, the application may save the current current value of
- total_in which indicates where valid compressed data was found. In the
- error case, the application may repeatedly call inflateSync, providing more
- input each time, until success or end of the input data.
+ In the success case, the application may save the current value of total_in
+ which indicates where valid compressed data was found. In the error case,
+ the application may repeatedly call inflateSync, providing more input each
+ time, until success or end of the input data.
*/
-ZEXTERN int ZEXPORT inflateCopy OF((z_streamp dest,
- z_streamp source));
+ZEXTERN int ZEXPORT inflateCopy(z_streamp dest,
+ z_streamp source);
/*
Sets the destination stream as a complete copy of the source stream.
@@ -956,18 +958,19 @@ ZEXTERN int ZEXPORT inflateCopy OF((z_streamp dest,
destination.
*/
-ZEXTERN int ZEXPORT inflateReset OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateReset(z_streamp strm);
/*
This function is equivalent to inflateEnd followed by inflateInit,
but does not free and reallocate the internal decompression state. The
stream will keep attributes that may have been set by inflateInit2.
+ total_in, total_out, adler, and msg are initialized.
inflateReset returns Z_OK if success, or Z_STREAM_ERROR if the source
stream state was inconsistent (such as zalloc or state being Z_NULL).
*/
-ZEXTERN int ZEXPORT inflateReset2 OF((z_streamp strm,
- int windowBits));
+ZEXTERN int ZEXPORT inflateReset2(z_streamp strm,
+ int windowBits);
/*
This function is the same as inflateReset, but it also permits changing
the wrap and window size requests. The windowBits parameter is interpreted
@@ -980,9 +983,9 @@ ZEXTERN int ZEXPORT inflateReset2 OF((z_streamp strm,
the windowBits parameter is invalid.
*/
-ZEXTERN int ZEXPORT inflatePrime OF((z_streamp strm,
- int bits,
- int value));
+ZEXTERN int ZEXPORT inflatePrime(z_streamp strm,
+ int bits,
+ int value);
/*
This function inserts bits in the inflate input stream. The intent is
that this function is used to start inflating at a bit position in the
@@ -1001,7 +1004,7 @@ ZEXTERN int ZEXPORT inflatePrime OF((z_streamp strm,
stream state was inconsistent.
*/
-ZEXTERN long ZEXPORT inflateMark OF((z_streamp strm));
+ZEXTERN long ZEXPORT inflateMark(z_streamp strm);
/*
This function returns two values, one in the lower 16 bits of the return
value, and the other in the remaining upper bits, obtained by shifting the
@@ -1029,8 +1032,8 @@ ZEXTERN long ZEXPORT inflateMark OF((z_streamp strm));
source stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT inflateGetHeader OF((z_streamp strm,
- gz_headerp head));
+ZEXTERN int ZEXPORT inflateGetHeader(z_streamp strm,
+ gz_headerp head);
/*
inflateGetHeader() requests that gzip header information be stored in the
provided gz_header structure. inflateGetHeader() may be called after
@@ -1070,8 +1073,8 @@ ZEXTERN int ZEXPORT inflateGetHeader OF((z_streamp strm,
*/
/*
-ZEXTERN int ZEXPORT inflateBackInit OF((z_streamp strm, int windowBits,
- unsigned char FAR *window));
+ZEXTERN int ZEXPORT inflateBackInit(z_streamp strm, int windowBits,
+ unsigned char FAR *window);
Initialize the internal stream state for decompression using inflateBack()
calls. The fields zalloc, zfree and opaque in strm must be initialized
@@ -1091,13 +1094,13 @@ ZEXTERN int ZEXPORT inflateBackInit OF((z_streamp strm, int windowBits,
the version of the header file.
*/
-typedef unsigned (*in_func) OF((void FAR *,
- z_const unsigned char FAR * FAR *));
-typedef int (*out_func) OF((void FAR *, unsigned char FAR *, unsigned));
+typedef unsigned (*in_func)(void FAR *,
+ z_const unsigned char FAR * FAR *);
+typedef int (*out_func)(void FAR *, unsigned char FAR *, unsigned);
-ZEXTERN int ZEXPORT inflateBack OF((z_streamp strm,
- in_func in, void FAR *in_desc,
- out_func out, void FAR *out_desc));
+ZEXTERN int ZEXPORT inflateBack(z_streamp strm,
+ in_func in, void FAR *in_desc,
+ out_func out, void FAR *out_desc);
/*
inflateBack() does a raw inflate with a single call using a call-back
interface for input and output. This is potentially more efficient than
@@ -1165,7 +1168,7 @@ ZEXTERN int ZEXPORT inflateBack OF((z_streamp strm,
cannot return Z_OK.
*/
-ZEXTERN int ZEXPORT inflateBackEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateBackEnd(z_streamp strm);
/*
All memory allocated by inflateBackInit() is freed.
@@ -1173,7 +1176,7 @@ ZEXTERN int ZEXPORT inflateBackEnd OF((z_streamp strm));
state was inconsistent.
*/
-ZEXTERN uLong ZEXPORT zlibCompileFlags OF((void));
+ZEXTERN uLong ZEXPORT zlibCompileFlags(void);
/* Return flags indicating compile-time options.
Type sizes, two bits each, 00 = 16 bits, 01 = 32, 10 = 64, 11 = other:
@@ -1226,8 +1229,8 @@ ZEXTERN uLong ZEXPORT zlibCompileFlags OF((void));
you need special options.
*/
-ZEXTERN int ZEXPORT compress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
+ZEXTERN int ZEXPORT compress(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen);
/*
Compresses the source buffer into the destination buffer. sourceLen is
the byte length of the source buffer. Upon entry, destLen is the total size
@@ -1241,9 +1244,9 @@ ZEXTERN int ZEXPORT compress OF((Bytef *dest, uLongf *destLen,
buffer.
*/
-ZEXTERN int ZEXPORT compress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen,
- int level));
+ZEXTERN int ZEXPORT compress2(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen,
+ int level);
/*
Compresses the source buffer into the destination buffer. The level
parameter has the same meaning as in deflateInit. sourceLen is the byte
@@ -1257,15 +1260,15 @@ ZEXTERN int ZEXPORT compress2 OF((Bytef *dest, uLongf *destLen,
Z_STREAM_ERROR if the level parameter is invalid.
*/
-ZEXTERN uLong ZEXPORT compressBound OF((uLong sourceLen));
+ZEXTERN uLong ZEXPORT compressBound(uLong sourceLen);
/*
compressBound() returns an upper bound on the compressed size after
compress() or compress2() on sourceLen bytes. It would be used before a
compress() or compress2() call to allocate the destination buffer.
*/
-ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
+ZEXTERN int ZEXPORT uncompress(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen);
/*
Decompresses the source buffer into the destination buffer. sourceLen is
the byte length of the source buffer. Upon entry, destLen is the total size
@@ -1282,8 +1285,8 @@ ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen,
buffer with the uncompressed data up to that point.
*/
-ZEXTERN int ZEXPORT uncompress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong *sourceLen));
+ZEXTERN int ZEXPORT uncompress2(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong *sourceLen);
/*
Same as uncompress, except that sourceLen is a pointer, where the
length of the source is *sourceLen. On return, *sourceLen is the number of
@@ -1302,7 +1305,7 @@ ZEXTERN int ZEXPORT uncompress2 OF((Bytef *dest, uLongf *destLen,
typedef struct gzFile_s *gzFile; /* semi-opaque gzip file descriptor */
/*
-ZEXTERN gzFile ZEXPORT gzopen OF((const char *path, const char *mode));
+ZEXTERN gzFile ZEXPORT gzopen(const char *path, const char *mode);
Open the gzip (.gz) file at path for reading and decompressing, or
compressing and writing. The mode parameter is as in fopen ("rb" or "wb")
@@ -1339,7 +1342,7 @@ ZEXTERN gzFile ZEXPORT gzopen OF((const char *path, const char *mode));
file could not be opened.
*/
-ZEXTERN gzFile ZEXPORT gzdopen OF((int fd, const char *mode));
+ZEXTERN gzFile ZEXPORT gzdopen(int fd, const char *mode);
/*
Associate a gzFile with the file descriptor fd. File descriptors are
obtained from calls like open, dup, creat, pipe or fileno (if the file has
@@ -1362,7 +1365,7 @@ ZEXTERN gzFile ZEXPORT gzdopen OF((int fd, const char *mode));
will not detect if fd is invalid (unless fd is -1).
*/
-ZEXTERN int ZEXPORT gzbuffer OF((gzFile file, unsigned size));
+ZEXTERN int ZEXPORT gzbuffer(gzFile file, unsigned size);
/*
Set the internal buffer size used by this library's functions for file to
size. The default buffer size is 8192 bytes. This function must be called
@@ -1378,7 +1381,7 @@ ZEXTERN int ZEXPORT gzbuffer OF((gzFile file, unsigned size));
too late.
*/
-ZEXTERN int ZEXPORT gzsetparams OF((gzFile file, int level, int strategy));
+ZEXTERN int ZEXPORT gzsetparams(gzFile file, int level, int strategy);
/*
Dynamically update the compression level and strategy for file. See the
description of deflateInit2 for the meaning of these parameters. Previously
@@ -1389,7 +1392,7 @@ ZEXTERN int ZEXPORT gzsetparams OF((gzFile file, int level, int strategy));
or Z_MEM_ERROR if there is a memory allocation error.
*/
-ZEXTERN int ZEXPORT gzread OF((gzFile file, voidp buf, unsigned len));
+ZEXTERN int ZEXPORT gzread(gzFile file, voidp buf, unsigned len);
/*
Read and decompress up to len uncompressed bytes from file into buf. If
the input file is not in gzip format, gzread copies the given number of
@@ -1419,8 +1422,8 @@ ZEXTERN int ZEXPORT gzread OF((gzFile file, voidp buf, unsigned len));
Z_STREAM_ERROR.
*/
-ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
- gzFile file));
+ZEXTERN z_size_t ZEXPORT gzfread(voidp buf, z_size_t size, z_size_t nitems,
+ gzFile file);
/*
Read and decompress up to nitems items of size size from file into buf,
otherwise operating as gzread() does. This duplicates the interface of
@@ -1445,14 +1448,14 @@ ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
file, resetting and retrying on end-of-file, when size is not 1.
*/
-ZEXTERN int ZEXPORT gzwrite OF((gzFile file, voidpc buf, unsigned len));
+ZEXTERN int ZEXPORT gzwrite(gzFile file, voidpc buf, unsigned len);
/*
Compress and write the len uncompressed bytes at buf to file. gzwrite
returns the number of uncompressed bytes written or 0 in case of error.
*/
-ZEXTERN z_size_t ZEXPORT gzfwrite OF((voidpc buf, z_size_t size,
- z_size_t nitems, gzFile file));
+ZEXTERN z_size_t ZEXPORT gzfwrite(voidpc buf, z_size_t size,
+ z_size_t nitems, gzFile file);
/*
Compress and write nitems items of size size from buf to file, duplicating
the interface of stdio's fwrite(), with size_t request and return types. If
@@ -1465,7 +1468,7 @@ ZEXTERN z_size_t ZEXPORT gzfwrite OF((voidpc buf, z_size_t size,
is returned, and the error state is set to Z_STREAM_ERROR.
*/
-ZEXTERN int ZEXPORTVA gzprintf Z_ARG((gzFile file, const char *format, ...));
+ZEXTERN int ZEXPORTVA gzprintf(gzFile file, const char *format, ...);
/*
Convert, format, compress, and write the arguments (...) to file under
control of the string format, as in fprintf. gzprintf returns the number of
@@ -1480,7 +1483,7 @@ ZEXTERN int ZEXPORTVA gzprintf Z_ARG((gzFile file, const char *format, ...));
This can be determined using zlibCompileFlags().
*/
-ZEXTERN int ZEXPORT gzputs OF((gzFile file, const char *s));
+ZEXTERN int ZEXPORT gzputs(gzFile file, const char *s);
/*
Compress and write the given null-terminated string s to file, excluding
the terminating null character.
@@ -1488,7 +1491,7 @@ ZEXTERN int ZEXPORT gzputs OF((gzFile file, const char *s));
gzputs returns the number of characters written, or -1 in case of error.
*/
-ZEXTERN char * ZEXPORT gzgets OF((gzFile file, char *buf, int len));
+ZEXTERN char * ZEXPORT gzgets(gzFile file, char *buf, int len);
/*
Read and decompress bytes from file into buf, until len-1 characters are
read, or until a newline character is read and transferred to buf, or an
@@ -1502,13 +1505,13 @@ ZEXTERN char * ZEXPORT gzgets OF((gzFile file, char *buf, int len));
buf are indeterminate.
*/
-ZEXTERN int ZEXPORT gzputc OF((gzFile file, int c));
+ZEXTERN int ZEXPORT gzputc(gzFile file, int c);
/*
Compress and write c, converted to an unsigned char, into file. gzputc
returns the value that was written, or -1 in case of error.
*/
-ZEXTERN int ZEXPORT gzgetc OF((gzFile file));
+ZEXTERN int ZEXPORT gzgetc(gzFile file);
/*
Read and decompress one byte from file. gzgetc returns this byte or -1
in case of end of file or error. This is implemented as a macro for speed.
@@ -1517,7 +1520,7 @@ ZEXTERN int ZEXPORT gzgetc OF((gzFile file));
points to has been clobbered or not.
*/
-ZEXTERN int ZEXPORT gzungetc OF((int c, gzFile file));
+ZEXTERN int ZEXPORT gzungetc(int c, gzFile file);
/*
Push c back onto the stream for file to be read as the first character on
the next read. At least one character of push-back is always allowed.
@@ -1529,7 +1532,7 @@ ZEXTERN int ZEXPORT gzungetc OF((int c, gzFile file));
gzseek() or gzrewind().
*/
-ZEXTERN int ZEXPORT gzflush OF((gzFile file, int flush));
+ZEXTERN int ZEXPORT gzflush(gzFile file, int flush);
/*
Flush all pending output to file. The parameter flush is as in the
deflate() function. The return value is the zlib error number (see function
@@ -1545,8 +1548,8 @@ ZEXTERN int ZEXPORT gzflush OF((gzFile file, int flush));
*/
/*
-ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile file,
- z_off_t offset, int whence));
+ZEXTERN z_off_t ZEXPORT gzseek(gzFile file,
+ z_off_t offset, int whence);
Set the starting position to offset relative to whence for the next gzread
or gzwrite on file. The offset represents a number of bytes in the
@@ -1564,7 +1567,7 @@ ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile file,
would be before the current position.
*/
-ZEXTERN int ZEXPORT gzrewind OF((gzFile file));
+ZEXTERN int ZEXPORT gzrewind(gzFile file);
/*
Rewind file. This function is supported only for reading.
@@ -1572,7 +1575,7 @@ ZEXTERN int ZEXPORT gzrewind OF((gzFile file));
*/
/*
-ZEXTERN z_off_t ZEXPORT gztell OF((gzFile file));
+ZEXTERN z_off_t ZEXPORT gztell(gzFile file);
Return the starting position for the next gzread or gzwrite on file.
This position represents a number of bytes in the uncompressed data stream,
@@ -1583,7 +1586,7 @@ ZEXTERN z_off_t ZEXPORT gztell OF((gzFile file));
*/
/*
-ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile file));
+ZEXTERN z_off_t ZEXPORT gzoffset(gzFile file);
Return the current compressed (actual) read or write offset of file. This
offset includes the count of bytes that precede the gzip stream, for example
@@ -1592,7 +1595,7 @@ ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile file));
be used for a progress indicator. On error, gzoffset() returns -1.
*/
-ZEXTERN int ZEXPORT gzeof OF((gzFile file));
+ZEXTERN int ZEXPORT gzeof(gzFile file);
/*
Return true (1) if the end-of-file indicator for file has been set while
reading, false (0) otherwise. Note that the end-of-file indicator is set
@@ -1607,7 +1610,7 @@ ZEXTERN int ZEXPORT gzeof OF((gzFile file));
has grown since the previous end of file was detected.
*/
-ZEXTERN int ZEXPORT gzdirect OF((gzFile file));
+ZEXTERN int ZEXPORT gzdirect(gzFile file);
/*
Return true (1) if file is being copied directly while reading, or false
(0) if file is a gzip stream being decompressed.
@@ -1628,7 +1631,7 @@ ZEXTERN int ZEXPORT gzdirect OF((gzFile file));
gzip file reading and decompression, which may not be desired.)
*/
-ZEXTERN int ZEXPORT gzclose OF((gzFile file));
+ZEXTERN int ZEXPORT gzclose(gzFile file);
/*
Flush all pending output for file, if necessary, close file and
deallocate the (de)compression state. Note that once file is closed, you
@@ -1641,8 +1644,8 @@ ZEXTERN int ZEXPORT gzclose OF((gzFile file));
last read ended in the middle of a gzip stream, or Z_OK on success.
*/
-ZEXTERN int ZEXPORT gzclose_r OF((gzFile file));
-ZEXTERN int ZEXPORT gzclose_w OF((gzFile file));
+ZEXTERN int ZEXPORT gzclose_r(gzFile file);
+ZEXTERN int ZEXPORT gzclose_w(gzFile file);
/*
Same as gzclose(), but gzclose_r() is only for use when reading, and
gzclose_w() is only for use when writing or appending. The advantage to
@@ -1653,7 +1656,7 @@ ZEXTERN int ZEXPORT gzclose_w OF((gzFile file));
zlib library.
*/
-ZEXTERN const char * ZEXPORT gzerror OF((gzFile file, int *errnum));
+ZEXTERN const char * ZEXPORT gzerror(gzFile file, int *errnum);
/*
Return the error message for the last error which occurred on file.
errnum is set to zlib error number. If an error occurred in the file system
@@ -1669,7 +1672,7 @@ ZEXTERN const char * ZEXPORT gzerror OF((gzFile file, int *errnum));
functions above that do not distinguish those cases in their return values.
*/
-ZEXTERN void ZEXPORT gzclearerr OF((gzFile file));
+ZEXTERN void ZEXPORT gzclearerr(gzFile file);
/*
Clear the error and end-of-file flags for file. This is analogous to the
clearerr() function in stdio. This is useful for continuing to read a gzip
@@ -1686,7 +1689,7 @@ ZEXTERN void ZEXPORT gzclearerr OF((gzFile file));
library.
*/
-ZEXTERN uLong ZEXPORT adler32 OF((uLong adler, const Bytef *buf, uInt len));
+ZEXTERN uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len);
/*
Update a running Adler-32 checksum with the bytes buf[0..len-1] and
return the updated checksum. An Adler-32 value is in the range of a 32-bit
@@ -1706,15 +1709,15 @@ ZEXTERN uLong ZEXPORT adler32 OF((uLong adler, const Bytef *buf, uInt len));
if (adler != original_adler) error();
*/
-ZEXTERN uLong ZEXPORT adler32_z OF((uLong adler, const Bytef *buf,
- z_size_t len));
+ZEXTERN uLong ZEXPORT adler32_z(uLong adler, const Bytef *buf,
+ z_size_t len);
/*
Same as adler32(), but with a size_t length.
*/
/*
-ZEXTERN uLong ZEXPORT adler32_combine OF((uLong adler1, uLong adler2,
- z_off_t len2));
+ZEXTERN uLong ZEXPORT adler32_combine(uLong adler1, uLong adler2,
+ z_off_t len2);
Combine two Adler-32 checksums into one. For two sequences of bytes, seq1
and seq2 with lengths len1 and len2, Adler-32 checksums were calculated for
@@ -1724,7 +1727,7 @@ ZEXTERN uLong ZEXPORT adler32_combine OF((uLong adler1, uLong adler2,
negative, the result has no meaning or utility.
*/
-ZEXTERN uLong ZEXPORT crc32 OF((uLong crc, const Bytef *buf, uInt len));
+ZEXTERN uLong ZEXPORT crc32(uLong crc, const Bytef *buf, uInt len);
/*
Update a running CRC-32 with the bytes buf[0..len-1] and return the
updated CRC-32. A CRC-32 value is in the range of a 32-bit unsigned integer.
@@ -1742,30 +1745,30 @@ ZEXTERN uLong ZEXPORT crc32 OF((uLong crc, const Bytef *buf, uInt len));
if (crc != original_crc) error();
*/
-ZEXTERN uLong ZEXPORT crc32_z OF((uLong crc, const Bytef *buf,
- z_size_t len));
+ZEXTERN uLong ZEXPORT crc32_z(uLong crc, const Bytef *buf,
+ z_size_t len);
/*
Same as crc32(), but with a size_t length.
*/
/*
-ZEXTERN uLong ZEXPORT crc32_combine OF((uLong crc1, uLong crc2, z_off_t len2));
+ZEXTERN uLong ZEXPORT crc32_combine(uLong crc1, uLong crc2, z_off_t len2);
Combine two CRC-32 check values into one. For two sequences of bytes,
seq1 and seq2 with lengths len1 and len2, CRC-32 check values were
calculated for each, crc1 and crc2. crc32_combine() returns the CRC-32
check value of seq1 and seq2 concatenated, requiring only crc1, crc2, and
- len2.
+ len2. len2 must be non-negative.
*/
/*
-ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t len2));
+ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t len2);
Return the operator corresponding to length len2, to be used with
- crc32_combine_op().
+ crc32_combine_op(). len2 must be non-negative.
*/
-ZEXTERN uLong ZEXPORT crc32_combine_op OF((uLong crc1, uLong crc2, uLong op));
+ZEXTERN uLong ZEXPORT crc32_combine_op(uLong crc1, uLong crc2, uLong op);
/*
Give the same result as crc32_combine(), using op in place of len2. op is
is generated from len2 by crc32_combine_gen(). This will be faster than
@@ -1778,20 +1781,20 @@ ZEXTERN uLong ZEXPORT crc32_combine_op OF((uLong crc1, uLong crc2, uLong op));
/* deflateInit and inflateInit are macros to allow checking the zlib version
* and the compiler's view of z_stream:
*/
-ZEXTERN int ZEXPORT deflateInit_ OF((z_streamp strm, int level,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateInit_ OF((z_streamp strm,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT deflateInit2_ OF((z_streamp strm, int level, int method,
- int windowBits, int memLevel,
- int strategy, const char *version,
- int stream_size));
-ZEXTERN int ZEXPORT inflateInit2_ OF((z_streamp strm, int windowBits,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateBackInit_ OF((z_streamp strm, int windowBits,
- unsigned char FAR *window,
- const char *version,
- int stream_size));
+ZEXTERN int ZEXPORT deflateInit_(z_streamp strm, int level,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT inflateInit_(z_streamp strm,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT deflateInit2_(z_streamp strm, int level, int method,
+ int windowBits, int memLevel,
+ int strategy, const char *version,
+ int stream_size);
+ZEXTERN int ZEXPORT inflateInit2_(z_streamp strm, int windowBits,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT inflateBackInit_(z_streamp strm, int windowBits,
+ unsigned char FAR *window,
+ const char *version,
+ int stream_size);
#ifdef Z_PREFIX_SET
# define z_deflateInit(strm, level) \
deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream))
@@ -1836,7 +1839,7 @@ struct gzFile_s {
unsigned char *next;
z_off64_t pos;
};
-ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
+ZEXTERN int ZEXPORT gzgetc_(gzFile file); /* backward compatibility */
#ifdef Z_PREFIX_SET
# undef z_gzgetc
# define z_gzgetc(g) \
@@ -1853,13 +1856,13 @@ ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
* without large file support, _LFS64_LARGEFILE must also be true
*/
#ifdef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off64_t));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off64_t ZEXPORT gzseek64(gzFile, z_off64_t, int);
+ ZEXTERN z_off64_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off64_t ZEXPORT gzoffset64(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off64_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off64_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off64_t);
#endif
#if !defined(ZLIB_INTERNAL) && defined(Z_WANT64)
@@ -1881,50 +1884,50 @@ ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
# define crc32_combine_gen crc32_combine_gen64
# endif
# ifndef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek64 OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off_t ZEXPORT gzseek64(gzFile, z_off_t, int);
+ ZEXTERN z_off_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off_t ZEXPORT gzoffset64(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off_t);
# endif
#else
- ZEXTERN gzFile ZEXPORT gzopen OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
+ ZEXTERN gzFile ZEXPORT gzopen(const char *, const char *);
+ ZEXTERN z_off_t ZEXPORT gzseek(gzFile, z_off_t, int);
+ ZEXTERN z_off_t ZEXPORT gztell(gzFile);
+ ZEXTERN z_off_t ZEXPORT gzoffset(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t);
#endif
#else /* Z_SOLO */
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
+ ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t);
#endif /* !Z_SOLO */
/* undocumented functions */
-ZEXTERN const char * ZEXPORT zError OF((int));
-ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp));
-ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
-ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
-ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
-ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp));
-ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
-ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
+ZEXTERN const char * ZEXPORT zError(int);
+ZEXTERN int ZEXPORT inflateSyncPoint(z_streamp);
+ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table(void);
+ZEXTERN int ZEXPORT inflateUndermine(z_streamp, int);
+ZEXTERN int ZEXPORT inflateValidate(z_streamp, int);
+ZEXTERN unsigned long ZEXPORT inflateCodesUsed(z_streamp);
+ZEXTERN int ZEXPORT inflateResetKeep(z_streamp);
+ZEXTERN int ZEXPORT deflateResetKeep(z_streamp);
#if defined(_WIN32) && !defined(Z_SOLO)
-ZEXTERN gzFile ZEXPORT gzopen_w OF((const wchar_t *path,
- const char *mode));
+ZEXTERN gzFile ZEXPORT gzopen_w(const wchar_t *path,
+ const char *mode);
#endif
#if defined(STDC) || defined(Z_HAVE_STDARG_H)
# ifndef Z_SOLO
-ZEXTERN int ZEXPORTVA gzvprintf Z_ARG((gzFile file,
- const char *format,
- va_list va));
+ZEXTERN int ZEXPORTVA gzvprintf(gzFile file,
+ const char *format,
+ va_list va);
# endif
#endif
diff --git a/src/native/external/zlib/zlib2ansi b/src/native/external/zlib/zlib2ansi
deleted file mode 100644
index 23b2a1d5a3ec..000000000000
--- a/src/native/external/zlib/zlib2ansi
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/perl
-
-# Transform K&R C function definitions into ANSI equivalent.
-#
-# Author: Paul Marquess
-# Version: 1.0
-# Date: 3 October 2006
-
-# TODO
-#
-# Assumes no function pointer parameters. unless they are typedefed.
-# Assumes no literal strings that look like function definitions
-# Assumes functions start at the beginning of a line
-
-use strict;
-use warnings;
-
-local $/;
-$_ = <>;
-
-my $sp = qr{ \s* (?: /\* .*? \*/ )? \s* }x; # assume no nested comments
-
-my $d1 = qr{ $sp (?: [\w\*\s]+ $sp)* $sp \w+ $sp [\[\]\s]* $sp }x ;
-my $decl = qr{ $sp (?: \w+ $sp )+ $d1 }xo ;
-my $dList = qr{ $sp $decl (?: $sp , $d1 )* $sp ; $sp }xo ;
-
-
-while (s/^
- ( # Start $1
- ( # Start $2
- .*? # Minimal eat content
- ( ^ \w [\w\s\*]+ ) # $3 -- function name
- \s* # optional whitespace
- ) # $2 - Matched up to before parameter list
-
- \( \s* # Literal "(" + optional whitespace
- ( [^\)]+ ) # $4 - one or more anythings except ")"
- \s* \) # optional whitespace surrounding a Literal ")"
-
- ( (?: $dList )+ ) # $5
-
- $sp ^ { # literal "{" at start of line
- ) # Remember to $1
- //xsom
- )
-{
- my $all = $1 ;
- my $prefix = $2;
- my $param_list = $4 ;
- my $params = $5;
-
- StripComments($params);
- StripComments($param_list);
- $param_list =~ s/^\s+//;
- $param_list =~ s/\s+$//;
-
- my $i = 0 ;
- my %pList = map { $_ => $i++ }
- split /\s*,\s*/, $param_list;
- my $pMatch = '(\b' . join('|', keys %pList) . '\b)\W*$' ;
-
- my @params = split /\s*;\s*/, $params;
- my @outParams = ();
- foreach my $p (@params)
- {
- if ($p =~ /,/)
- {
- my @bits = split /\s*,\s*/, $p;
- my $first = shift @bits;
- $first =~ s/^\s*//;
- push @outParams, $first;
- $first =~ /^(\w+\s*)/;
- my $type = $1 ;
- push @outParams, map { $type . $_ } @bits;
- }
- else
- {
- $p =~ s/^\s+//;
- push @outParams, $p;
- }
- }
-
-
- my %tmp = map { /$pMatch/; $_ => $pList{$1} }
- @outParams ;
-
- @outParams = map { " $_" }
- sort { $tmp{$a} <=> $tmp{$b} }
- @outParams ;
-
- print $prefix ;
- print "(\n" . join(",\n", @outParams) . ")\n";
- print "{" ;
-
-}
-
-# Output any trailing code.
-print ;
-exit 0;
-
-
-sub StripComments
-{
-
- no warnings;
-
- # Strip C & C++ comments
- # From the perlfaq
- $_[0] =~
-
- s{
- /\* ## Start of /* ... */ comment
- [^*]*\*+ ## Non-* followed by 1-or-more *'s
- (
- [^/*][^*]*\*+
- )* ## 0-or-more things which don't start with /
- ## but do end with '*'
- / ## End of /* ... */ comment
-
- | ## OR C++ Comment
- // ## Start of C++ comment //
- [^\n]* ## followed by 0-or-more non end of line characters
-
- | ## OR various things which aren't comments:
-
- (
- " ## Start of " ... " string
- (
- \\. ## Escaped char
- | ## OR
- [^"\\] ## Non "\
- )*
- " ## End of " ... " string
-
- | ## OR
-
- ' ## Start of ' ... ' string
- (
- \\. ## Escaped char
- | ## OR
- [^'\\] ## Non '\
- )*
- ' ## End of ' ... ' string
-
- | ## OR
-
- . ## Anything other char
- [^/"'\\]* ## Chars which doesn't start a comment, string or escape
- )
- }{$2}gxs;
-
-}
diff --git a/src/native/external/zlib/zutil.c b/src/native/external/zlib/zutil.c
index 9543ae825e32..b1c5d2d3c6da 100644
--- a/src/native/external/zlib/zutil.c
+++ b/src/native/external/zlib/zutil.c
@@ -24,13 +24,11 @@ z_const char * const z_errmsg[10] = {
};
-const char * ZEXPORT zlibVersion()
-{
+const char * ZEXPORT zlibVersion(void) {
return ZLIB_VERSION;
}
-uLong ZEXPORT zlibCompileFlags()
-{
+uLong ZEXPORT zlibCompileFlags(void) {
uLong flags;
flags = 0;
@@ -121,9 +119,7 @@ uLong ZEXPORT zlibCompileFlags()
# endif
int ZLIB_INTERNAL z_verbose = verbose;
-void ZLIB_INTERNAL z_error(m)
- char *m;
-{
+void ZLIB_INTERNAL z_error(char *m) {
fprintf(stderr, "%s\n", m);
exit(1);
}
@@ -132,9 +128,7 @@ void ZLIB_INTERNAL z_error(m)
/* exported to allow conversion of error code to string for compress() and
* uncompress()
*/
-const char * ZEXPORT zError(err)
- int err;
-{
+const char * ZEXPORT zError(int err) {
return ERR_MSG(err);
}
@@ -148,22 +142,14 @@ const char * ZEXPORT zError(err)
#ifndef HAVE_MEMCPY
-void ZLIB_INTERNAL zmemcpy(dest, source, len)
- Bytef* dest;
- const Bytef* source;
- uInt len;
-{
+void ZLIB_INTERNAL zmemcpy(Bytef* dest, const Bytef* source, uInt len) {
if (len == 0) return;
do {
*dest++ = *source++; /* ??? to be unrolled */
} while (--len != 0);
}
-int ZLIB_INTERNAL zmemcmp(s1, s2, len)
- const Bytef* s1;
- const Bytef* s2;
- uInt len;
-{
+int ZLIB_INTERNAL zmemcmp(const Bytef* s1, const Bytef* s2, uInt len) {
uInt j;
for (j = 0; j < len; j++) {
@@ -172,10 +158,7 @@ int ZLIB_INTERNAL zmemcmp(s1, s2, len)
return 0;
}
-void ZLIB_INTERNAL zmemzero(dest, len)
- Bytef* dest;
- uInt len;
-{
+void ZLIB_INTERNAL zmemzero(Bytef* dest, uInt len) {
if (len == 0) return;
do {
*dest++ = 0; /* ??? to be unrolled */
@@ -216,8 +199,7 @@ local ptr_table table[MAX_PTR];
* a protected system like OS/2. Use Microsoft C instead.
*/
-voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size) {
voidpf buf;
ulg bsize = (ulg)items*size;
@@ -242,8 +224,7 @@ voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
return buf;
}
-void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
int n;
(void)opaque;
@@ -279,14 +260,12 @@ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
# define _hfree hfree
#endif
-voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size)
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size) {
(void)opaque;
return _halloc((long)items, size);
}
-void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
(void)opaque;
_hfree(ptr);
}
@@ -299,25 +278,18 @@ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
#ifndef MY_ZCALLOC /* Any system without a special alloc function */
#ifndef STDC
-extern voidp malloc OF((uInt size));
-extern voidp calloc OF((uInt items, uInt size));
-extern void free OF((voidpf ptr));
+extern voidp malloc(uInt size);
+extern voidp calloc(uInt items, uInt size);
+extern void free(voidpf ptr);
#endif
-voidpf ZLIB_INTERNAL zcalloc(opaque, items, size)
- voidpf opaque;
- unsigned items;
- unsigned size;
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size) {
(void)opaque;
return sizeof(uInt) > 2 ? (voidpf)malloc(items * size) :
(voidpf)calloc(items, size);
}
-void ZLIB_INTERNAL zcfree(opaque, ptr)
- voidpf opaque;
- voidpf ptr;
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
(void)opaque;
free(ptr);
}
diff --git a/src/native/external/zlib/zutil.h b/src/native/external/zlib/zutil.h
index 0bc7f4ecd1c0..48dd7febae65 100644
--- a/src/native/external/zlib/zutil.h
+++ b/src/native/external/zlib/zutil.h
@@ -1,5 +1,5 @@
/* zutil.h -- internal interface and configuration of the compression library
- * Copyright (C) 1995-2022 Jean-loup Gailly, Mark Adler
+ * Copyright (C) 1995-2024 Jean-loup Gailly, Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
@@ -56,7 +56,7 @@ typedef unsigned long ulg;
extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
/* (size given to avoid silly warnings with Visual C++) */
-#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
+#define ERR_MSG(err) z_errmsg[(err) < -6 || (err) > 2 ? 9 : 2 - (err)]
#define ERR_RETURN(strm,err) \
return (strm->msg = ERR_MSG(err), (err))
@@ -137,17 +137,8 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
# endif
#endif
-#if defined(MACOS) || defined(TARGET_OS_MAC)
+#if defined(MACOS)
# define OS_CODE 7
-# ifndef Z_SOLO
-# if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
-# include /* for fdopen */
-# else
-# ifndef fdopen
-# define fdopen(fd,mode) NULL /* No fdopen() */
-# endif
-# endif
-# endif
#endif
#ifdef __acorn
@@ -170,18 +161,6 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
# define OS_CODE 19
#endif
-#if defined(_BEOS_) || defined(RISCOS)
-# define fdopen(fd,mode) NULL /* No fdopen() */
-#endif
-
-#if (defined(_MSC_VER) && (_MSC_VER > 600)) && !defined __INTERIX
-# if defined(_WIN32_WCE)
-# define fdopen(fd,mode) NULL /* No fdopen() */
-# else
-# define fdopen(fd,type) _fdopen(fd,type)
-# endif
-#endif
-
#if defined(__BORLANDC__) && !defined(MSDOS)
#pragma warn -8004
#pragma warn -8008
@@ -191,9 +170,9 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
/* provide prototypes for these when building zlib without LFS */
#if !defined(_WIN32) && \
(!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off_t);
#endif
/* common defaults */
@@ -232,16 +211,16 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
# define zmemzero(dest, len) memset(dest, 0, len)
# endif
#else
- void ZLIB_INTERNAL zmemcpy OF((Bytef* dest, const Bytef* source, uInt len));
- int ZLIB_INTERNAL zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len));
- void ZLIB_INTERNAL zmemzero OF((Bytef* dest, uInt len));
+ void ZLIB_INTERNAL zmemcpy(Bytef* dest, const Bytef* source, uInt len);
+ int ZLIB_INTERNAL zmemcmp(const Bytef* s1, const Bytef* s2, uInt len);
+ void ZLIB_INTERNAL zmemzero(Bytef* dest, uInt len);
#endif
/* Diagnostic functions */
#ifdef ZLIB_DEBUG
# include
extern int ZLIB_INTERNAL z_verbose;
- extern void ZLIB_INTERNAL z_error OF((char *m));
+ extern void ZLIB_INTERNAL z_error(char *m);
# define Assert(cond,msg) {if(!(cond)) z_error(msg);}
# define Trace(x) {if (z_verbose>=0) fprintf x ;}
# define Tracev(x) {if (z_verbose>0) fprintf x ;}
@@ -258,9 +237,9 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
#endif
#ifndef Z_SOLO
- voidpf ZLIB_INTERNAL zcalloc OF((voidpf opaque, unsigned items,
- unsigned size));
- void ZLIB_INTERNAL zcfree OF((voidpf opaque, voidpf ptr));
+ voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items,
+ unsigned size);
+ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr);
#endif
#define ZALLOC(strm, items, size) \
diff --git a/src/native/libs/System.Globalization.Native/CMakeLists.txt b/src/native/libs/System.Globalization.Native/CMakeLists.txt
index 25544b9e8c63..6ae44ea889de 100644
--- a/src/native/libs/System.Globalization.Native/CMakeLists.txt
+++ b/src/native/libs/System.Globalization.Native/CMakeLists.txt
@@ -66,7 +66,15 @@ set(NATIVEGLOBALIZATION_SOURCES
if (DEFINED CMAKE_ICU_DIR)
include_directories(${CMAKE_ICU_DIR}/include)
- link_libraries(${CMAKE_ICU_DIR}/lib/libicuuc.a ${CMAKE_ICU_DIR}/lib/libicui18n.a ${CMAKE_ICU_DIR}/lib/libicudata.a)
+ if (CLR_CMAKE_TARGET_MACCATALYST OR CLR_CMAKE_TARGET_IOS OR CLR_CMAKE_TARGET_TVOS)
+ add_linker_flag(-Wl,-L${CMAKE_ICU_DIR}/lib)
+ add_linker_flag(-Wl,-hidden-licuuc)
+ add_linker_flag(-Wl,-hidden-licui18n)
+ add_linker_flag(-Wl,-hidden-licudata)
+ else()
+ link_libraries(${CMAKE_ICU_DIR}/lib/libicuuc.a ${CMAKE_ICU_DIR}/lib/libicui18n.a ${CMAKE_ICU_DIR}/lib/libicudata.a)
+ endif()
+
link_libraries(stdc++)
endif()
diff --git a/src/native/libs/System.Security.Cryptography.Native.Apple/entrypoints.c b/src/native/libs/System.Security.Cryptography.Native.Apple/entrypoints.c
index 9f91b6d2488f..099fb3439471 100644
--- a/src/native/libs/System.Security.Cryptography.Native.Apple/entrypoints.c
+++ b/src/native/libs/System.Security.Cryptography.Native.Apple/entrypoints.c
@@ -70,6 +70,7 @@ static const Entry s_cryptoAppleNative[] =
DllImportEntry(AppleCryptoNative_RsaGenerateKey)
DllImportEntry(AppleCryptoNative_RsaDecryptOaep)
DllImportEntry(AppleCryptoNative_RsaDecryptPkcs)
+ DllImportEntry(AppleCryptoNative_RsaDecryptRaw)
DllImportEntry(AppleCryptoNative_RsaEncryptOaep)
DllImportEntry(AppleCryptoNative_RsaEncryptPkcs)
DllImportEntry(AppleCryptoNative_RsaSignaturePrimitive)
diff --git a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.c b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.c
index a9aece35fb0f..1746828d5b0d 100644
--- a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.c
+++ b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.c
@@ -134,6 +134,13 @@ int32_t AppleCryptoNative_RsaDecryptOaep(SecKeyRef privateKey,
privateKey, pbData, cbData, pDecryptedOut, pErrorOut, mgfAlgorithm, SecKeyCreateDecryptedData);
}
+int32_t AppleCryptoNative_RsaDecryptRaw(
+ SecKeyRef privateKey, uint8_t* pbData, int32_t cbData, CFDataRef* pDecryptedOut, CFErrorRef* pErrorOut)
+{
+ return RsaPrimitive(
+ privateKey, pbData, cbData, pDecryptedOut, pErrorOut, kSecKeyAlgorithmRSAEncryptionRaw, SecKeyCreateDecryptedData);
+}
+
int32_t AppleCryptoNative_RsaDecryptPkcs(
SecKeyRef privateKey, uint8_t* pbData, int32_t cbData, CFDataRef* pDecryptedOut, CFErrorRef* pErrorOut)
{
diff --git a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.h b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.h
index 253fdae78e4b..34a350f80f9b 100644
--- a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.h
+++ b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_rsa.h
@@ -31,6 +31,14 @@ PALEXPORT int32_t AppleCryptoNative_RsaDecryptOaep(SecKeyRef privateKey,
CFDataRef* pDecryptedOut,
CFErrorRef* pErrorOut);
+/*
+Decrypt the contents of pbData using the provided privateKey without validating or removing padding.
+
+Follows pal_seckey return conventions.
+*/
+PALEXPORT int32_t AppleCryptoNative_RsaDecryptRaw(
+ SecKeyRef privateKey, uint8_t* pbData, int32_t cbData, CFDataRef* pDecryptedOut, CFErrorRef* pErrorOut);
+
/*
Decrypt the contents of pbData using the provided privateKey under PKCS#1 padding.
diff --git a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_x509.c b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_x509.c
index 8335c1c362c0..94e22f28c879 100644
--- a/src/native/libs/System.Security.Cryptography.Native.Apple/pal_x509.c
+++ b/src/native/libs/System.Security.Cryptography.Native.Apple/pal_x509.c
@@ -70,7 +70,6 @@ PAL_X509ContentType AppleCryptoNative_X509GetContentType(uint8_t* pbData, int32_
// The sniffing order is:
// * X509 DER
// * PKCS7 PEM/DER
- // * PKCS12 DER (or PEM if Apple has non-standard support for that)
// * X509 PEM or PEM aggregate (or DER, but that already matched)
//
// If the X509 PEM check is done first SecItemImport will erroneously match
@@ -78,6 +77,11 @@ PAL_X509ContentType AppleCryptoNative_X509GetContentType(uint8_t* pbData, int32_
//
// Likewise, if the X509 DER check isn't done first, Apple will report it as
// being a PKCS#7.
+ //
+ // This does not attempt to open a PFX / PKCS12 as Apple does not provide
+ // a suitable API to determine if it is PKCS12 without doing potentially
+ // unbound MAC / KDF work. Instead, let that return Unknown and let the managed
+ // decoding do the check.
SecCertificateRef certref = SecCertificateCreateWithData(NULL, cfData);
if (certref != NULL)
@@ -104,41 +108,6 @@ PAL_X509ContentType AppleCryptoNative_X509GetContentType(uint8_t* pbData, int32_
}
}
- dataFormat = kSecFormatPKCS12;
- actualFormat = dataFormat;
- itemType = kSecItemTypeAggregate;
- actualType = itemType;
-
- osStatus = SecItemImport(cfData, NULL, &actualFormat, &actualType, 0, NULL, NULL, NULL);
-
- if (osStatus == errSecPassphraseRequired)
- {
- dataFormat = kSecFormatPKCS12;
- actualFormat = dataFormat;
- itemType = kSecItemTypeAggregate;
- actualType = itemType;
-
- SecItemImportExportKeyParameters importParams;
- memset(&importParams, 0, sizeof(SecItemImportExportKeyParameters));
-
- importParams.version = SEC_KEY_IMPORT_EXPORT_PARAMS_VERSION;
- importParams.passphrase = CFSTR("");
-
- osStatus = SecItemImport(cfData, NULL, &actualFormat, &actualType, 0, &importParams, NULL, NULL);
-
- CFRelease(importParams.passphrase);
- importParams.passphrase = NULL;
- }
-
- if (osStatus == noErr || osStatus == errSecPkcs12VerifyFailure)
- {
- if (actualType == itemType && actualFormat == dataFormat)
- {
- CFRelease(cfData);
- return PAL_Pkcs12;
- }
- }
-
dataFormat = kSecFormatX509Cert;
actualFormat = dataFormat;
itemType = kSecItemTypeCertificate;
diff --git a/src/native/libs/System.Security.Cryptography.Native/entrypoints.c b/src/native/libs/System.Security.Cryptography.Native/entrypoints.c
index f69959f36a4b..1045645f1713 100644
--- a/src/native/libs/System.Security.Cryptography.Native/entrypoints.c
+++ b/src/native/libs/System.Security.Cryptography.Native/entrypoints.c
@@ -299,6 +299,7 @@ static const Entry s_cryptoNative[] =
DllImportEntry(CryptoNative_IsSslStateOK)
DllImportEntry(CryptoNative_SslCtxAddExtraChainCert)
DllImportEntry(CryptoNative_SslCtxSetCaching)
+ DllImportEntry(CryptoNative_SslCtxRemoveSession)
DllImportEntry(CryptoNative_SslCtxSetCiphers)
DllImportEntry(CryptoNative_SslCtxSetDefaultOcspCallback)
DllImportEntry(CryptoNative_SslCtxSetEncryptionPolicy)
diff --git a/src/native/libs/System.Security.Cryptography.Native/openssl.c b/src/native/libs/System.Security.Cryptography.Native/openssl.c
index 738220458954..fa2302b780e3 100644
--- a/src/native/libs/System.Security.Cryptography.Native/openssl.c
+++ b/src/native/libs/System.Security.Cryptography.Native/openssl.c
@@ -627,8 +627,8 @@ BIO* CryptoNative_GetX509NameInfo(X509* x509, int32_t nameType, int32_t forIssue
break;
}
- STACK_OF(GENERAL_NAME)* altNames = (STACK_OF(GENERAL_NAME)*)(
- X509_get_ext_d2i(x509, forIssuer ? NID_issuer_alt_name : NID_subject_alt_name, NULL, NULL));
+ GENERAL_NAMES* altNames = (GENERAL_NAMES*)
+ X509_get_ext_d2i(x509, forIssuer ? NID_issuer_alt_name : NID_subject_alt_name, NULL, NULL);
if (altNames)
{
@@ -686,13 +686,13 @@ BIO* CryptoNative_GetX509NameInfo(X509* x509, int32_t nameType, int32_t forIssue
{
BIO* b = BIO_new(BIO_s_mem());
ASN1_STRING_print_ex(b, str, ASN1_STRFLGS_UTF8_CONVERT);
- sk_GENERAL_NAME_free(altNames);
+ GENERAL_NAMES_free(altNames);
return b;
}
}
}
- sk_GENERAL_NAME_free(altNames);
+ GENERAL_NAMES_free(altNames);
}
}
diff --git a/src/native/libs/System.Security.Cryptography.Native/opensslshim.h b/src/native/libs/System.Security.Cryptography.Native/opensslshim.h
index 1f1b1851f098..f94ddc01274c 100644
--- a/src/native/libs/System.Security.Cryptography.Native/opensslshim.h
+++ b/src/native/libs/System.Security.Cryptography.Native/opensslshim.h
@@ -525,6 +525,7 @@ int EVP_DigestFinalXOF(EVP_MD_CTX *ctx, unsigned char *md, size_t len);
REQUIRED_FUNCTION(SSL_CTX_new) \
REQUIRED_FUNCTION(SSL_CTX_sess_set_new_cb) \
REQUIRED_FUNCTION(SSL_CTX_sess_set_remove_cb) \
+ REQUIRED_FUNCTION(SSL_CTX_remove_session) \
LIGHTUP_FUNCTION(SSL_CTX_set_alpn_protos) \
LIGHTUP_FUNCTION(SSL_CTX_set_alpn_select_cb) \
REQUIRED_FUNCTION(SSL_CTX_set_cipher_list) \
@@ -1040,6 +1041,7 @@ FOR_ALL_OPENSSL_FUNCTIONS
#define SSL_CTX_new SSL_CTX_new_ptr
#define SSL_CTX_sess_set_new_cb SSL_CTX_sess_set_new_cb_ptr
#define SSL_CTX_sess_set_remove_cb SSL_CTX_sess_set_remove_cb_ptr
+#define SSL_CTX_remove_session SSL_CTX_remove_session_ptr
#define SSL_CTX_set_alpn_protos SSL_CTX_set_alpn_protos_ptr
#define SSL_CTX_set_alpn_select_cb SSL_CTX_set_alpn_select_cb_ptr
#define SSL_CTX_set_cipher_list SSL_CTX_set_cipher_list_ptr
diff --git a/src/native/libs/System.Security.Cryptography.Native/pal_ssl.c b/src/native/libs/System.Security.Cryptography.Native/pal_ssl.c
index e6bd41143c16..e320d1c73d77 100644
--- a/src/native/libs/System.Security.Cryptography.Native/pal_ssl.c
+++ b/src/native/libs/System.Security.Cryptography.Native/pal_ssl.c
@@ -701,6 +701,11 @@ int CryptoNative_SslCtxSetCaching(SSL_CTX* ctx, int mode, int cacheSize, int con
return retValue;
}
+int CryptoNative_SslCtxRemoveSession(SSL_CTX* ctx, SSL_SESSION* session)
+{
+ return SSL_CTX_remove_session(ctx, session);
+}
+
const char* CryptoNative_SslGetServerName(SSL* ssl)
{
return SSL_get_servername(ssl, TLSEXT_NAMETYPE_host_name);
diff --git a/src/native/libs/System.Security.Cryptography.Native/pal_ssl.h b/src/native/libs/System.Security.Cryptography.Native/pal_ssl.h
index 3c63564cc4e5..9c9d7026119c 100644
--- a/src/native/libs/System.Security.Cryptography.Native/pal_ssl.h
+++ b/src/native/libs/System.Security.Cryptography.Native/pal_ssl.h
@@ -167,6 +167,11 @@ Sets session caching. 0 is disabled.
*/
PALEXPORT int CryptoNative_SslCtxSetCaching(SSL_CTX* ctx, int mode, int cacheSize, int contextIdLength, uint8_t* contextId, SslCtxNewSessionCallback newSessionCb, SslCtxRemoveSessionCallback removeSessionCb);
+/*
+Removes a session from internal cache.
+*/
+PALEXPORT int CryptoNative_SslCtxRemoveSession(SSL_CTX* ctx, SSL_SESSION* session);
+
/*
Sets callback to log TLS session keys
*/
diff --git a/src/tasks/AotCompilerTask/MonoAOTCompiler.cs b/src/tasks/AotCompilerTask/MonoAOTCompiler.cs
index 9e52322770b0..0a761b44a4c8 100644
--- a/src/tasks/AotCompilerTask/MonoAOTCompiler.cs
+++ b/src/tasks/AotCompilerTask/MonoAOTCompiler.cs
@@ -951,7 +951,7 @@ private PrecompileArguments GetPrecompileArgumentsFor(ITaskItem assemblyItem, st
if (isDedup)
{
foreach (var aItem in _assembliesToCompile!)
- processArgs.Add(aItem.ItemSpec);
+ processArgs.Add($"\"{aItem.ItemSpec}\"");
}
else
{
diff --git a/src/tasks/AppleAppBuilder/Xcode.cs b/src/tasks/AppleAppBuilder/Xcode.cs
index 0e05d9168423..f253b41a2b18 100644
--- a/src/tasks/AppleAppBuilder/Xcode.cs
+++ b/src/tasks/AppleAppBuilder/Xcode.cs
@@ -396,9 +396,14 @@ public string GenerateCMake(
}
else if (forceAOT || !(preferDylibs && dylibExists))
{
- // these libraries are pinvoked
- // -force_load will be removed once we enable direct-pinvokes for AOT
- toLink += $" \"-force_load {lib}\"{Environment.NewLine}";
+ // do not export symbols from ICU libraries
+ if (libName == "libicui18n" || libName == "libicudata" || libName == "libicuuc") {
+ toLink += $" \"-load_hidden {lib}\"{Environment.NewLine}";
+ } else {
+ // these libraries are pinvoked
+ // -force_load will be removed once we enable direct-pinvokes for AOT
+ toLink += $" \"-force_load {lib}\"{Environment.NewLine}";
+ }
}
}
diff --git a/src/tests/Common/helixpublishwitharcade.proj b/src/tests/Common/helixpublishwitharcade.proj
index db4ce658d6c7..a439693adf21 100644
--- a/src/tests/Common/helixpublishwitharcade.proj
+++ b/src/tests/Common/helixpublishwitharcade.proj
@@ -46,7 +46,6 @@
<_RuntimeVariant>
BundledNETCoreAppPackageVersion
- <_PALTestsDir>
<_SuperPmiCollect>false
@@ -105,7 +104,6 @@
RuntimeVariant=$(_RuntimeVariant);
BundledNETCoreAppPackageVersion=$(BundledNETCoreAppPackageVersion);
HelixRuntimeRid=$(HelixRuntimeRid);
- PALTestsDir=$(_PALTestsDir);
SuperPmiCollect=$(_SuperPmiCollect)
@@ -132,7 +130,6 @@
-
<_Scenarios Include="$(_Scenarios.Split(','))" />
@@ -324,12 +321,6 @@
-
-
-
-
@@ -929,12 +920,6 @@
$(AppleTestTarget)
$([System.TimeSpan]::FromMinutes($(TimeoutPerTestCollectionInMinutes)))
-
-
- $(LegacyPayloadsRootDirectory)paltests.tar.gz
- $(_WorkaroundForNuGetMigrationsForPrepending) ./runpaltestshelix.sh
- $([System.TimeSpan]::FromMinutes($(TimeoutPerTestCollectionInMinutes)))
-
diff --git a/src/tests/Interop/IJW/CopyConstructorMarshaler/CopyConstructorMarshaler.cs b/src/tests/Interop/IJW/CopyConstructorMarshaler/CopyConstructorMarshaler.cs
index 95afea492be7..baf3ebd4579e 100644
--- a/src/tests/Interop/IJW/CopyConstructorMarshaler/CopyConstructorMarshaler.cs
+++ b/src/tests/Interop/IJW/CopyConstructorMarshaler/CopyConstructorMarshaler.cs
@@ -25,34 +25,62 @@ static int Main()
object testInstance = Activator.CreateInstance(testType);
MethodInfo testMethod = testType.GetMethod("PInvokeNumCopies");
+ // On x86, we have an additional copy on every P/Invoke from the "native" parameter to the actual location on the stack.
+ int platformExtra = 0;
+ if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
+ {
+ platformExtra = 1;
+ }
+
// PInvoke will copy twice. Once from argument to parameter, and once from the managed to native parameter.
- Assert.Equal(2, (int)testMethod.Invoke(testInstance, null));
+ Assert.Equal(2 + platformExtra, (int)testMethod.Invoke(testInstance, null));
testMethod = testType.GetMethod("ReversePInvokeNumCopies");
// Reverse PInvoke will copy 3 times. Two are from the same paths as the PInvoke,
// and the third is from the reverse P/Invoke call.
- Assert.Equal(3, (int)testMethod.Invoke(testInstance, null));
+ Assert.Equal(3 + platformExtra, (int)testMethod.Invoke(testInstance, null));
testMethod = testType.GetMethod("PInvokeNumCopiesDerivedType");
// PInvoke will copy twice. Once from argument to parameter, and once from the managed to native parameter.
- Assert.Equal(2, (int)testMethod.Invoke(testInstance, null));
+ Assert.Equal(2 + platformExtra, (int)testMethod.Invoke(testInstance, null));
testMethod = testType.GetMethod("ReversePInvokeNumCopiesDerivedType");
// Reverse PInvoke will copy 3 times. Two are from the same paths as the PInvoke,
// and the third is from the reverse P/Invoke call.
- Assert.Equal(3, (int)testMethod.Invoke(testInstance, null));
+ Assert.Equal(3 + platformExtra, (int)testMethod.Invoke(testInstance, null));
}
catch (Exception ex)
{
Console.WriteLine(ex);
return 101;
}
+
+ try
+ {
+ CopyConstructorsInArgumentStackSlots();
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine(ex);
+ return 101;
+ }
+
return 100;
}
+ public static void CopyConstructorsInArgumentStackSlots()
+ {
+ Assembly ijwNativeDll = Assembly.Load("IjwCopyConstructorMarshaler");
+ Type testType = ijwNativeDll.GetType("TestClass");
+ object testInstance = Activator.CreateInstance(testType);
+ MethodInfo testMethod = testType.GetMethod("ExposedThisCopyConstructorScenario");
+
+ Assert.Equal(0, (int)testMethod.Invoke(testInstance, null));
+ }
+
[DllImport("kernel32.dll")]
static extern IntPtr LoadLibraryEx(string lpFileName, IntPtr hReservedNull, int dwFlags);
diff --git a/src/tests/Interop/IJW/CopyConstructorMarshaler/IjwCopyConstructorMarshaler.cpp b/src/tests/Interop/IJW/CopyConstructorMarshaler/IjwCopyConstructorMarshaler.cpp
index bd1d1b80829d..c3d50cf77836 100644
--- a/src/tests/Interop/IJW/CopyConstructorMarshaler/IjwCopyConstructorMarshaler.cpp
+++ b/src/tests/Interop/IJW/CopyConstructorMarshaler/IjwCopyConstructorMarshaler.cpp
@@ -1,5 +1,90 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+#pragma unmanaged
+#include
+#include
+
+namespace ExposedThis
+{
+ struct Relative;
+
+ std::vector relatives;
+
+ int numMissedCopies = 0;
+
+ struct Relative
+ {
+ void* relative;
+ Relative()
+ {
+ std::cout << "Registering " << std::hex << this << "\n";
+ relatives.push_back(this);
+ relative = this - 1;
+ }
+
+ Relative(const Relative& other)
+ {
+ std::cout << "Registering copy of " << std::hex << &other << " at " << this << "\n";
+ relatives.push_back(this);
+ relative = this - 1;
+ }
+
+ ~Relative()
+ {
+ auto location = std::find(relatives.begin(), relatives.end(), this);
+ if (location != relatives.end())
+ {
+ std::cout << "Unregistering " << std::hex << this << "\n";
+ relatives.erase(location);
+ }
+ else
+ {
+ std::cout << "Error: Relative object " << std::hex << this << " not registered\n";
+ numMissedCopies++;
+ }
+
+ if (relative != this - 1)
+ {
+ std::cout << " Error: Relative object " << std::hex << this << " has invalid relative pointer " << std::hex << relative << "\n";
+ numMissedCopies++;
+ }
+ }
+ };
+
+ void UseRelative(Relative rel)
+ {
+ std::cout << "Unmanaged: Using relative at address " << std::hex << &rel << "\n";
+ }
+
+ void UseRelativeManaged(Relative rel);
+
+ void CallRelative()
+ {
+ Relative rel;
+ UseRelativeManaged(rel);
+ }
+
+#pragma managed
+
+ int RunScenario()
+ {
+ // Managed to unmanaged
+ {
+ Relative rel;
+ UseRelative(rel);
+ }
+
+ // Unmanaged to managed
+ CallRelative();
+
+ return numMissedCopies;
+ }
+
+ void UseRelativeManaged(Relative rel)
+ {
+ std::cout << "Managed: Using relative at address " << std::hex << &rel << "\n";
+ }
+}
#pragma managed
class A
@@ -102,4 +187,9 @@ public ref class TestClass
B b;
return GetCopyCount_ViaManaged(b);
}
+
+ int ExposedThisCopyConstructorScenario()
+ {
+ return ExposedThis::RunScenario();
+ }
};
diff --git a/src/tests/Interop/PInvoke/Miscellaneous/CopyCtor/CopyCtorTest.cs b/src/tests/Interop/PInvoke/Miscellaneous/CopyCtor/CopyCtorTest.cs
index 37e8e3c48f52..ace0f2ba3152 100644
--- a/src/tests/Interop/PInvoke/Miscellaneous/CopyCtor/CopyCtorTest.cs
+++ b/src/tests/Interop/PInvoke/Miscellaneous/CopyCtor/CopyCtorTest.cs
@@ -13,21 +13,43 @@ static unsafe class CopyCtor
public static unsafe int StructWithCtorTest(StructWithCtor* ptrStruct, ref StructWithCtor refStruct)
{
if (ptrStruct->_instanceField != 1)
+ {
+ Console.WriteLine($"Fail: {ptrStruct->_instanceField} != {1}");
return 1;
+ }
if (refStruct._instanceField != 2)
+ {
+ Console.WriteLine($"Fail: {refStruct._instanceField} != {2}");
return 2;
+ }
- if (StructWithCtor.CopyCtorCallCount != 2)
+ int expectedCallCount = 2;
+ if (RuntimeInformation.ProcessArchitecture == Architecture.X86)
+ {
+ expectedCallCount = 4;
+ }
+
+ if (StructWithCtor.CopyCtorCallCount != expectedCallCount)
+ {
+ Console.WriteLine($"Fail: {StructWithCtor.CopyCtorCallCount} != {expectedCallCount}");
return 3;
- if (StructWithCtor.DtorCallCount != 2)
+ }
+ if (StructWithCtor.DtorCallCount != expectedCallCount)
+ {
+ Console.WriteLine($"Fail: {StructWithCtor.DtorCallCount} != {expectedCallCount}");
return 4;
-
+ }
return 100;
}
public static unsafe int Main()
{
+ if (!TestLibrary.PlatformDetection.IsWindows)
+ {
+ return 100;
+ }
+
TestDelegate del = (TestDelegate)Delegate.CreateDelegate(typeof(TestDelegate), typeof(CopyCtor).GetMethod("StructWithCtorTest"));
StructWithCtor s1 = new StructWithCtor();
StructWithCtor s2 = new StructWithCtor();
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.cs b/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.cs
new file mode 100644
index 000000000000..0725a973faed
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.cs
@@ -0,0 +1,31 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Runtime.CompilerServices;
+using System.Runtime.Intrinsics;
+using Xunit;
+
+public static class Runtime_100404
+{
+ [Fact]
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ public static void TestMultiplyVector128DoubleByConstant()
+ {
+ Vector128 result = Map(Vector128.One, new FloatPoint(2.0, 3.0));
+ Assert.Equal(2.0, result[0]);
+ Assert.Equal(2.0, result[1]);
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static Vector128 Map(Vector128 m0, FloatPoint point)
+ {
+ return m0 * Vector128.Create(point.X);
+ }
+
+ private struct FloatPoint(double x, double y)
+ {
+ public double X = x;
+ public double Y = y;
+ }
+}
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.csproj
new file mode 100644
index 000000000000..15edd99711a1
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100404/Runtime_100404.csproj
@@ -0,0 +1,8 @@
+
+
+ True
+
+
+
+
+
\ No newline at end of file
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.cs b/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.cs
new file mode 100644
index 000000000000..810b99acab30
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.cs
@@ -0,0 +1,112 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.Loader;
+using Xunit;
+
+public class Runtime_100437
+{
+ [Fact]
+ [SkipOnMono("PlatformDetection.IsPreciseGcSupported false on mono", TestPlatforms.Any)]
+ public static void TestNonCollectibleType() => TestCollectibleReadOnlyStatics(nameof(NonCollectibleType));
+
+ [Fact]
+ [SkipOnMono("PlatformDetection.IsPreciseGcSupported false on mono", TestPlatforms.Any)]
+ public static void TestNonCollectibleTypeInSharedGenericCode() => TestCollectibleReadOnlyStatics(nameof(NonCollectibleTypeInSharedGenericCode));
+
+ [Fact]
+ [SkipOnMono("PlatformDetection.IsPreciseGcSupported false on mono", TestPlatforms.Any)]
+ public static void TestNonCollectibleArrayTypeInSharedGenericCode() => TestCollectibleReadOnlyStatics(nameof(NonCollectibleArrayTypeInSharedGenericCode));
+
+ [Fact]
+ [SkipOnMono("PlatformDetection.IsPreciseGcSupported false on mono", TestPlatforms.Any)]
+ public static void TestCollectibleEmptyArray() => TestCollectibleReadOnlyStatics(nameof(CollectibleEmptyArray));
+
+ private static void TestCollectibleReadOnlyStatics(string methodName)
+ {
+ string assemblyPath = typeof(Runtime_100437).Assembly.Location;
+
+ // Skip this test for single file
+ if (string.IsNullOrEmpty(assemblyPath))
+ return;
+
+ WeakReference wr = CreateReadOnlyStaticWeakReference();
+
+ for (int i = 0; i < 10; i++)
+ {
+ GC.Collect();
+ GC.WaitForPendingFinalizers();
+
+ if (!IsTargetAlive(wr))
+ return;
+ }
+
+ throw new Exception("Test failed - readonly static has not been collected.");
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ WeakReference CreateReadOnlyStaticWeakReference()
+ {
+ AssemblyLoadContext alc = new CollectibleAssemblyLoadContext();
+ Assembly a = alc.LoadFromAssemblyPath(assemblyPath);
+ return (WeakReference)a.GetType(nameof(Runtime_100437)).GetMethod(methodName).Invoke(null, new object[] { typeof(Runtime_100437).Assembly });
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ bool IsTargetAlive(WeakReference wr)
+ {
+ return wr.Target != null;
+ }
+ }
+
+ public static WeakReference NonCollectibleType(Assembly assemblyInDefaultContext)
+ {
+ return new WeakReference(Holder.Singleton, trackResurrection: true);
+ }
+
+ public static WeakReference NonCollectibleTypeInSharedGenericCode(Assembly assemblyInDefaultContext)
+ {
+ // Create instance of a non-collectible generic type definition over a collectible type
+ var type = assemblyInDefaultContext.GetType("Runtime_100437+GenericHolder`1", throwOnError: true).MakeGenericType(typeof(Runtime_100437));
+ var field = type.GetField("Singleton", BindingFlags.Static | BindingFlags.Public);
+ return new WeakReference(field.GetValue(null), trackResurrection: true);
+ }
+
+ public static WeakReference NonCollectibleArrayTypeInSharedGenericCode(Assembly assemblyInDefaultContext)
+ {
+ // Create instance of a non-collectible generic type definition over a collectible type
+ var type = assemblyInDefaultContext.GetType("Runtime_100437+GenericArrayHolder`1", throwOnError: true).MakeGenericType(typeof(Runtime_100437));
+ var field = type.GetField("Singleton", BindingFlags.Static | BindingFlags.Public);
+ return new WeakReference(field.GetValue(null), trackResurrection: true);
+ }
+
+ public static WeakReference CollectibleEmptyArray(Assembly assemblyInDefaultContext)
+ {
+ return new WeakReference(Array.Empty(), trackResurrection: true);
+ }
+
+ private class CollectibleAssemblyLoadContext : AssemblyLoadContext
+ {
+ public CollectibleAssemblyLoadContext()
+ : base(isCollectible: true)
+ {
+ }
+ }
+
+ private class Holder
+ {
+ public static readonly object Singleton = new object();
+ }
+
+ private class GenericHolder
+ {
+ public static readonly object Singleton = new object();
+ }
+
+ private class GenericArrayHolder
+ {
+ public static readonly int[] Singleton = new int[0];
+ }
+}
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.csproj
new file mode 100644
index 000000000000..197767e2c4e2
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100437/Runtime_100437.csproj
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.cs b/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.cs
new file mode 100644
index 000000000000..e7c94f11ac2d
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.cs
@@ -0,0 +1,23 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Runtime.CompilerServices;
+using Xunit;
+
+public static class Runtime_100809
+{
+ [Fact]
+ public static int TestEntryPoint()
+ {
+ return AlwaysFalse(96) ? -1 : 100;
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static bool AlwaysFalse(int x)
+ {
+ var result = new byte[x];
+ int count = result.Length - 2;
+ return (x < 0 || result.Length - count < 0);
+ }
+}
\ No newline at end of file
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.csproj
new file mode 100644
index 000000000000..197767e2c4e2
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_100809/Runtime_100809.csproj
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.cs b/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.cs
new file mode 100644
index 000000000000..445cc3f915ef
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.cs
@@ -0,0 +1,174 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Runtime.Intrinsics;
+using Xunit;
+
+namespace JIT.HardwareIntrinsics.General._Vector128
+{
+ public static partial class Program
+ {
+ [Fact]
+ public static void Test()
+ {
+ var test = new VectorBooleanBinaryOpTest__LessThanOrEqualAnySingle();
+
+ // Validates basic functionality works, using Unsafe.Read
+ test.RunBasicScenario_UnsafeRead();
+
+ if (!test.Succeeded)
+ {
+ throw new Exception("One or more scenarios did not complete as expected.");
+ }
+ }
+ }
+
+ public sealed unsafe class VectorBooleanBinaryOpTest__LessThanOrEqualAnySingle
+ {
+ private struct DataTable
+ {
+ private byte[] inArray1;
+ private byte[] inArray2;
+
+ private GCHandle inHandle1;
+ private GCHandle inHandle2;
+
+ private ulong alignment;
+
+ public DataTable(Single[] inArray1, Single[] inArray2, int alignment)
+ {
+ int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf();
+ int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf();
+ if (!int.IsPow2(alignment) || (alignment > 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2)
+ {
+ throw new ArgumentException("Invalid value of alignment");
+ }
+
+ this.inArray1 = new byte[alignment * 2];
+ this.inArray2 = new byte[alignment * 2];
+
+ this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
+ this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
+
+ this.alignment = (ulong)alignment;
+
+ Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef(inArray1Ptr), ref Unsafe.As(ref inArray1[0]), (uint)sizeOfinArray1);
+ Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef(inArray2Ptr), ref Unsafe.As(ref inArray2[0]), (uint)sizeOfinArray2);
+ }
+
+ public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
+ public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
+
+ public void Dispose()
+ {
+ inHandle1.Free();
+ inHandle2.Free();
+ }
+
+ private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
+ {
+ return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
+ }
+ }
+
+ private static readonly int LargestVectorSize = 16;
+
+ private static readonly int Op1ElementCount = Unsafe.SizeOf>() / sizeof(Single);
+ private static readonly int Op2ElementCount = Unsafe.SizeOf>() / sizeof(Single);
+
+ private static Single[] _data1 = new Single[Op1ElementCount];
+ private static Single[] _data2 = new Single[Op2ElementCount];
+
+ private DataTable _dataTable;
+
+ public VectorBooleanBinaryOpTest__LessThanOrEqualAnySingle()
+ {
+ Succeeded = true;
+
+ _data1[0] = 0.168625f;
+ _data1[1] = 0.5899811f;
+ _data1[2] = 0.8042229f;
+ _data1[3] = 0.8173325f;
+
+ _data2[0] = 0.059660614f;
+ _data2[1] = 0.13952714f;
+ _data2[2] = 0.23523656f;
+ _data2[3] = 0.48773053f;
+
+ _dataTable = new DataTable(_data1, _data2, LargestVectorSize);
+ }
+
+ public bool Succeeded { get; set; }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ public bool LessThanOrEqualAnyProblem()
+ {
+ return Vector128.LessThanOrEqualAny(
+ Unsafe.Read>(_dataTable.inArray1Ptr),
+ Unsafe.Read>(_dataTable.inArray2Ptr)
+ );
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ public void* GetPtr1()
+ {
+ return _dataTable.inArray1Ptr;
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ public void* GetPtr2()
+ {
+ return _dataTable.inArray2Ptr;
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ public void CheckResult(bool result)
+ {
+ ValidateResult(GetPtr1(), GetPtr2(), result);
+ }
+
+ public void RunBasicScenario_UnsafeRead()
+ {
+ var result = Vector128.LessThanOrEqualAny(
+ Unsafe.Read>(GetPtr1()),
+ Unsafe.Read>(GetPtr2())
+ );
+
+ CheckResult(result);
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private void ValidateResult(void* op1, void* op2, bool result, [CallerMemberName] string method = "")
+ {
+ Single[] inArray1 = new Single[Op1ElementCount];
+ Single[] inArray2 = new Single[Op2ElementCount];
+
+ Unsafe.CopyBlockUnaligned(ref Unsafe.As(ref inArray1[0]), ref Unsafe.AsRef(op1), (uint)Unsafe.SizeOf>());
+ Unsafe.CopyBlockUnaligned(ref Unsafe.As(ref inArray2[0]), ref Unsafe.AsRef(op2), (uint)Unsafe.SizeOf>());
+
+ ValidateResult(inArray1, inArray2, result, method);
+ }
+
+ private void ValidateResult(Single[] left, Single[] right, bool result, [CallerMemberName] string method = "")
+ {
+ bool succeeded = true;
+
+ var expectedResult = false;
+
+ for (var i = 0; i < Op1ElementCount; i++)
+ {
+ expectedResult |= (left[i] <= right[i]);
+ }
+
+ succeeded = (expectedResult == result);
+
+ if (!succeeded)
+ {
+ Succeeded = false;
+ }
+ }
+ }
+}
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.csproj
new file mode 100644
index 000000000000..717798d9d4e2
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_92201/Runtime_92201.csproj
@@ -0,0 +1,13 @@
+
+
+ True
+ True
+
+
+
+
+
+
+
+
+
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.cs b/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.cs
new file mode 100644
index 000000000000..85aa6e62655d
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.cs
@@ -0,0 +1,44 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using Xunit;
+
+public static class Runtime_94467
+{
+ public interface ITypeChecker
+ {
+ static abstract bool Test(T value);
+ }
+
+ public interface IHandler
+ {
+ bool Test(T value);
+ }
+
+ public struct TypeChecker : ITypeChecker
+ {
+ public static bool Test(T value) => true;
+ }
+
+ public class Handler : IHandler where TChecker : ITypeChecker
+ {
+ public bool Test(T value) => TChecker.Test(value);
+ }
+
+ public static IHandler GetHandler() => new Handler();
+
+ [Fact]
+ public static int Test()
+ {
+ try {
+ var handler = GetHandler();
+ if (handler.Test(true) && handler.Test(true))
+ return 100;
+ else
+ return 101;
+ } catch (Exception) {
+ return -1;
+ }
+ }
+}
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.csproj
new file mode 100644
index 000000000000..15edd99711a1
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_94467/Runtime_94467.csproj
@@ -0,0 +1,8 @@
+
+
+ True
+
+
+
+
+
\ No newline at end of file
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.cs b/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.cs
new file mode 100644
index 000000000000..043e15ad9052
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.cs
@@ -0,0 +1,37 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Numerics;
+using System.Runtime.CompilerServices;
+using Xunit;
+
+public class Runtime_96306
+{
+ [Fact]
+ public static int TestEntryPoint()
+ {
+ return Foo(new Point2D { V = new Vector2(101, -1) }, 100);
+ }
+
+ // 'a' is passed in rcx but homed into xmm1 after promotion.
+ // 'scale' is passed in xmm1 but spilled because of the call to Bar.
+ // We must take care that we spill 'scale' before we home 'a'.
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static int Foo(Point2D a, float scale)
+ {
+ Bar();
+ return ReturnValue(scale);
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static int ReturnValue(float value) => (int)value;
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static void Bar() { }
+
+ private struct Point2D
+ {
+ public Vector2 V;
+ }
+}
+
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.csproj
new file mode 100644
index 000000000000..6c2e6c34ba40
--- /dev/null
+++ b/src/tests/JIT/Regression/JitBlue/Runtime_96306/Runtime_96306.csproj
@@ -0,0 +1,10 @@
+
+
+ True
+ None
+ true
+
+
+
+
+
\ No newline at end of file
diff --git a/src/workloads/workloads.csproj b/src/workloads/workloads.csproj
index 3db05c0a8034..a986fdfdbe6b 100644
--- a/src/workloads/workloads.csproj
+++ b/src/workloads/workloads.csproj
@@ -186,6 +186,15 @@
+
+
+
+
+
+
+
+
+