diff --git a/eng/pipelines/common/platform-matrix.yml b/eng/pipelines/common/platform-matrix.yml index 9262b5d36a165..d990b827e0d89 100644 --- a/eng/pipelines/common/platform-matrix.yml +++ b/eng/pipelines/common/platform-matrix.yml @@ -17,10 +17,6 @@ parameters: # for the given platform and helixQueueGroup. helixQueuesTemplate: '' stagedBuild: false - # When set to false, suppresses reuse of OSX managed build artifacts (for pipelines without an OSX obj) - # When set to true, passes the 'platforms' value as a job parameter also named 'platforms'. - # Handled as an opt-in parameter to avoid excessive yaml. - passPlatforms: false container: '' shouldContinueOnError: false jobParameters: {} @@ -47,8 +43,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/arm' @@ -73,8 +67,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/armv6' @@ -103,8 +95,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/arm64' @@ -131,8 +121,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -157,8 +145,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/arm' @@ -185,8 +171,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/arm64' @@ -216,8 +200,6 @@ jobs: runScriptWindowsCmd: true stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -242,8 +224,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -270,8 +250,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -295,8 +273,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/x86' @@ -323,8 +299,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} buildingOnSourceBuildImage: true @@ -349,8 +323,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/s390x' @@ -376,8 +348,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/ppc64le' @@ -404,8 +374,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} ${{ insert }}: ${{ parameters.jobParameters }} # WebAssembly Linux Firefox @@ -429,8 +397,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} ${{ insert }}: ${{ parameters.jobParameters }} # WebAssembly on Windows @@ -451,8 +417,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} ${{ insert }}: ${{ parameters.jobParameters }} # FreeBSD @@ -476,8 +440,6 @@ jobs: helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/x64' - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} ${{ insert }}: ${{ parameters.jobParameters }} # Android x64 @@ -500,8 +462,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -525,8 +485,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -550,8 +508,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -575,8 +531,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -597,8 +551,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -619,8 +571,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -641,8 +591,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -663,8 +611,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -685,8 +631,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -707,8 +651,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -729,8 +671,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -751,8 +691,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -773,8 +711,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} managedTestBuildOsGroup: OSX ${{ insert }}: ${{ parameters.jobParameters }} @@ -796,8 +732,6 @@ jobs: runtimeFlavor: mono stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -818,8 +752,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true ${{ insert }}: ${{ parameters.jobParameters }} @@ -841,8 +773,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -866,8 +796,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} crossBuild: true crossrootfsDir: '/crossrootfs/armel' @@ -891,8 +819,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -913,8 +839,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -934,8 +858,6 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} @@ -956,7 +878,5 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} stagedBuild: ${{ parameters.stagedBuild }} buildConfig: ${{ parameters.buildConfig }} - ${{ if eq(parameters.passPlatforms, true) }}: - platforms: ${{ parameters.platforms }} helixQueueGroup: ${{ parameters.helixQueueGroup }} ${{ insert }}: ${{ parameters.jobParameters }} diff --git a/eng/pipelines/common/templates/runtimes/run-test-job.yml b/eng/pipelines/common/templates/runtimes/run-test-job.yml index 4bc3e519aec45..7e8fcaf4175f8 100644 --- a/eng/pipelines/common/templates/runtimes/run-test-job.yml +++ b/eng/pipelines/common/templates/runtimes/run-test-job.yml @@ -24,6 +24,7 @@ parameters: shouldContinueOnError: false dependsOn: [] dependOnEvaluatePaths: false + SuperPmiCollect: false ### Test run job @@ -76,6 +77,9 @@ jobs: - ${{ format('{0}_{1}_product_build_{2}{3}_{4}_{5}', parameters.runtimeFlavor, parameters.runtimeVariant, parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.buildConfig) }} - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: - ${{ format('libraries_build_{0}{1}_{2}_{3}', parameters.osGroup, parameters.osSubgroup, parameters.archType, parameters.liveLibrariesBuildConfig) }} + # SuperPMI collection needs to run mcs.exe on the AzDO machine. Assume that's an x64 machine, and download an x64 product build if needed. + - ${{ if and(eq(parameters.SuperPmiCollect, true), ne(parameters.archType, 'x64')) }}: + - ${{ format('coreclr_{0}_product_build_{1}{2}_{3}_{4}', '', parameters.osGroup, parameters.osSubgroup, 'x64', parameters.buildConfig) }} # Compute job name from template parameters ${{ if in(parameters.testGroup, 'innerloop', 'clrinterpreter') }}: @@ -147,6 +151,36 @@ jobs: - name: testTreeFilterArg value: '-tree:GC/Scenarios/GCSimulator' + # Variables used for SuperPMI collection + - ${{ if eq(parameters.SuperPmiCollect, true) }}: + - MchFileTag: '${{ parameters.osGroup }}.${{ parameters.archType }}.${{ parameters.buildConfig }}' + - name: CollectionType + value: 'run' + - name: CollectionName + value: 'coreclr_tests' + - ${{ if eq(parameters.osGroup, 'windows') }}: + - name: PythonScript + value: 'py -3' + - name: PipScript + value: 'py -3 -m pip' + - name: MchFilesLocation + value: '$(Build.SourcesDirectory)\artifacts\helixresults\' + - name: MergedMchFileLocation + value: '$(Build.SourcesDirectory)\artifacts\spmi_collection\' + - name: SpmiLogsLocation + value: '$(Build.SourcesDirectory)\artifacts\spmi_logs\' + - ${{ if ne(parameters.osGroup, 'windows') }}: + - name: PythonScript + value: 'python3' + - name: PipScript + value: 'pip3' + - name: MchFilesLocation + value: '$(Build.SourcesDirectory)/artifacts/helixresults/' + - name: MergedMchFileLocation + value: '$(Build.SourcesDirectory)/artifacts/spmi_collection/' + - name: SpmiLogsLocation + value: '$(Build.SourcesDirectory)/artifacts/spmi_logs/' + # Set job timeouts # # "timeoutPerTestCollectionInMinutes" is the time needed for the "biggest" xUnit test collection to complete. @@ -298,6 +332,16 @@ jobs: displayName: 'native test artifacts' + # SuperPMI collection: Download x64 coreclr if running on non-x64 configuration (needed for mcs.exe) + - ${{ if and(eq(parameters.SuperPmiCollect, true), ne(parameters.archType, 'x64')) }}: + - template: /eng/pipelines/common/download-artifact-step.yml + parameters: + unpackFolder: '$(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper)' + artifactFileName: 'CoreCLRProduct___$(osGroup)$(osSubgroup)_x64_$(buildConfig)$(archiveExtension)' + artifactName: 'CoreCLRProduct___$(osGroup)$(osSubgroup)_x64_$(buildConfig)' + displayName: 'CoreCLR product build (x64)' + + # Publish native test components to test output folder. Sadly we cannot do this # during product build (so that we could zip up the files in their final test location # and directly unzip them there after download). Unfortunately the logic to copy @@ -353,6 +397,7 @@ jobs: runtimeFlavor: ${{ parameters.runtimeFlavor }} shouldContinueOnError: ${{ parameters.shouldContinueOnError }} runtimeVariant: ${{ parameters.runtimeVariant }} + SuperPmiCollect: ${{ parameters.SuperPmiCollect }} ${{ if eq(variables['System.TeamProject'], 'public') }}: creator: $(Build.DefinitionName) @@ -603,3 +648,74 @@ jobs: artifactName: '${{ parameters.runtimeFlavor }}_${{ parameters.runtimeVariant }}_$(LogNamePrefix)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)_${{ parameters.testGroup }}' continueOnError: true condition: always() + + ######################################################################################################## + # + # Finalize SuperPMI collection: (1) merge all MCH files generated by all Helix jobs, (2) upload MCH file to Azure Storage, (3) upload log files + # Note that all these steps are "condition: always()" because we want to upload as much of the collection + # as possible, even if there were test failures. + # + ######################################################################################################## + + - ${{ if eq(parameters.SuperPmiCollect, true) }}: + + # Create required directories for merged mch collection and superpmi logs + - ${{ if ne(parameters.osGroup, 'windows') }}: + - script: | + mkdir -p $(MergedMchFileLocation) + mkdir -p $(SpmiLogsLocation) + displayName: Create SuperPMI directories + condition: always() + - ${{ if eq(parameters.osGroup, 'windows') }}: + - script: | + mkdir $(MergedMchFileLocation) + mkdir $(SpmiLogsLocation) + displayName: Create SuperPMI directories + condition: always() + + - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py merge-mch -log_level DEBUG -pattern $(MchFilesLocation)$(CollectionName).$(CollectionType)*.mch -output_mch_path $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch + displayName: 'Merge $(CollectionName)-$(CollectionType) SuperPMI collections' + condition: always() + + - template: /eng/pipelines/common/upload-artifact-step.yml + parameters: + rootFolder: $(MergedMchFileLocation) + includeRootFolder: false + archiveType: $(archiveType) + tarCompression: $(tarCompression) + archiveExtension: $(archiveExtension) + artifactName: 'SuperPMI_Collection_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)' + displayName: 'Upload artifacts SuperPMI $(CollectionName)-$(CollectionType) collection' + condition: always() + + # Ensure the Python azure-storage-blob package is installed before doing the upload. + - script: $(PipScript) install --user --upgrade pip && $(PipScript) install --user azure.storage.blob==12.5.0 --force-reinstall + displayName: Upgrade Pip to latest and install azure-storage-blob Python package + condition: always() + + - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py upload -log_level DEBUG -arch $(archType) -build_type $(buildConfig) -mch_files $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch -core_root $(Build.SourcesDirectory)/artifacts/bin/coreclr/$(osGroup).x64.$(buildConfigUpper) + displayName: 'Upload SuperPMI $(CollectionName)-$(CollectionType) collection to Azure Storage' + condition: always() + env: + CLRJIT_AZ_KEY: $(clrjit_key1) # secret key stored as variable in pipeline + + - task: CopyFiles@2 + displayName: Copying superpmi.log of all partitions + inputs: + sourceFolder: '$(MchFilesLocation)' + contents: '**/$(CollectionName).$(CollectionType)*.log' + targetFolder: '$(SpmiLogsLocation)' + condition: always() + + - task: PublishPipelineArtifact@1 + displayName: Publish SuperPMI logs + inputs: + targetPath: $(SpmiLogsLocation) + artifactName: 'SuperPMI_Logs_$(CollectionName)_$(CollectionType)_$(osGroup)$(osSubgroup)_$(archType)_$(buildConfig)' + condition: always() + + ######################################################################################################## + # + # End of SuperPMI processing + # + ######################################################################################################## diff --git a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml index 0a0242529eb9d..dfad8c58d880a 100644 --- a/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml +++ b/eng/pipelines/common/templates/runtimes/send-to-helix-step.yml @@ -26,6 +26,7 @@ parameters: runtimeFlavor: 'CoreCLR' runtimeVariant: '' shouldContinueOnError: false + SuperPmiCollect: '' steps: @@ -57,6 +58,7 @@ steps: _TimeoutPerTestInMinutes: ${{ parameters.timeoutPerTestInMinutes }} RuntimeFlavor: ${{ parameters.runtimeFlavor }} _RuntimeVariant: ${{ parameters.runtimeVariant }} + _SuperPmiCollect: ${{ parameters.SuperPmiCollect }} ${{ if eq(parameters.publishTestResults, 'true') }}: SYSTEM_ACCESSTOKEN: $(System.AccessToken) # TODO: remove NUGET_PACKAGES once https://github.com/dotnet/arcade/issues/1578 is fixed diff --git a/eng/pipelines/common/upload-artifact-step.yml b/eng/pipelines/common/upload-artifact-step.yml index df753b1ed0321..249da066c7aae 100644 --- a/eng/pipelines/common/upload-artifact-step.yml +++ b/eng/pipelines/common/upload-artifact-step.yml @@ -6,6 +6,7 @@ parameters: archiveExtension: '' artifactName: '' displayName: '' + condition: succeeded() steps: # Zip Artifact @@ -17,9 +18,11 @@ steps: archiveType: ${{ parameters.archiveType }} tarCompression: ${{ parameters.tarCompression }} includeRootFolder: ${{ parameters.includeRootFolder }} + condition: ${{ parameters.condition }} - task: PublishBuildArtifacts@1 displayName: 'Publish ${{ parameters.displayName }}' inputs: pathtoPublish: $(Build.StagingDirectory)/${{ parameters.artifactName }}${{ parameters.archiveExtension }} artifactName: ${{ parameters.artifactName }} + condition: ${{ parameters.condition }} diff --git a/eng/pipelines/coreclr/superpmi-collect.yml b/eng/pipelines/coreclr/superpmi-collect.yml index 57093bb758a1a..f95a500a3d5fb 100644 --- a/eng/pipelines/coreclr/superpmi-collect.yml +++ b/eng/pipelines/coreclr/superpmi-collect.yml @@ -46,6 +46,8 @@ jobs: - windows_x64 # superpmi-collect-job that targets macOS/arm64 depends on coreclr binaries produced by the macOS/x64 job +# We don't collect osx-x64 (it's essentially the same as linux-x64). If we did, we'd add OSX_x64 in the +# build-coreclr-and-libraries-job.yml above, and remove this. - template: /eng/pipelines/common/platform-matrix.yml parameters: jobTemplate: /eng/pipelines/coreclr/templates/build-job.yml @@ -69,8 +71,6 @@ jobs: jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml buildConfig: checked platforms: - # Linux tests are built on the OSX machines. - # - OSX_x64 - OSX_arm64 - Linux_arm - Linux_arm64 @@ -91,8 +91,6 @@ jobs: jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml buildConfig: checked platforms: - # Linux tests are built on the OSX machines. - # - OSX_x64 - OSX_arm64 - Linux_arm - Linux_arm64 @@ -100,22 +98,19 @@ jobs: - windows_x64 - windows_x86 - windows_arm64 - - CoreClrTestBuildHost # Either OSX_x64 or Linux_x64 helixQueueGroup: ci helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml jobParameters: testGroup: outerloop liveLibrariesBuildConfig: Release collectionType: pmi - collectionName: coreclr_tests + collectionName: libraries_tests - template: /eng/pipelines/common/platform-matrix.yml parameters: jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml buildConfig: checked platforms: - # Linux tests are built on the OSX machines. - # - OSX_x64 - OSX_arm64 - Linux_arm - Linux_arm64 @@ -123,22 +118,19 @@ jobs: - windows_x64 - windows_x86 - windows_arm64 - - CoreClrTestBuildHost # Either OSX_x64 or Linux_x64 helixQueueGroup: ci helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml jobParameters: testGroup: outerloop liveLibrariesBuildConfig: Release - collectionType: pmi - collectionName: libraries_tests + collectionType: crossgen2 + collectionName: libraries - template: /eng/pipelines/common/platform-matrix.yml parameters: jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml buildConfig: checked platforms: - # Linux tests are built on the OSX machines. - # - OSX_x64 - OSX_arm64 - Linux_arm - Linux_arm64 @@ -151,16 +143,17 @@ jobs: jobParameters: testGroup: outerloop liveLibrariesBuildConfig: Release - collectionType: crossgen2 - collectionName: libraries + collectionType: run + collectionName: benchmarks +# +# Collection of coreclr test run +# - template: /eng/pipelines/common/platform-matrix.yml parameters: - jobTemplate: /eng/pipelines/coreclr/templates/superpmi-collect-job.yml + jobTemplate: /eng/pipelines/common/templates/runtimes/run-test-job.yml buildConfig: checked platforms: - # Linux tests are built on the OSX machines. - # - OSX_x64 - OSX_arm64 - Linux_arm - Linux_arm64 @@ -168,10 +161,9 @@ jobs: - windows_x64 - windows_x86 - windows_arm64 - helixQueueGroup: ci + helixQueueGroup: superpmi helixQueuesTemplate: /eng/pipelines/coreclr/templates/helix-queues-setup.yml jobParameters: testGroup: outerloop liveLibrariesBuildConfig: Release - collectionType: run - collectionName: benchmarks + SuperPmiCollect: true \ No newline at end of file diff --git a/eng/pipelines/coreclr/templates/crossdac-pack.yml b/eng/pipelines/coreclr/templates/crossdac-pack.yml index fc3ef404c171a..99a8b115a0cf9 100644 --- a/eng/pipelines/coreclr/templates/crossdac-pack.yml +++ b/eng/pipelines/coreclr/templates/crossdac-pack.yml @@ -38,12 +38,10 @@ jobs: - name: officialBuildIdArg value: '' - name: crossDacArgs - value: '' + value: '/p:CrossDacArtifactsDir=$(crossDacArtifactPath)/$(buildCrossDacArtifactName)' - ${{ if and(eq(variables['System.TeamProject'], 'internal'), ne(variables['Build.Reason'], 'PullRequest')) }}: - name: officialBuildIdArg value: '/p:OfficialBuildId=$(Build.BuildNumber)' - - name: crossDacArgs - value: '/p:CrossDacArtifactsDir=$(crossDacArtifactPath)/$(buildCrossDacArtifactName)' - name: SignType value: $[ coalesce(variables.OfficialSignType, 'real') ] - ${{ parameters.variables }} diff --git a/eng/pipelines/coreclr/templates/helix-queues-setup.yml b/eng/pipelines/coreclr/templates/helix-queues-setup.yml index 8160f934a9d04..796a36abbda25 100644 --- a/eng/pipelines/coreclr/templates/helix-queues-setup.yml +++ b/eng/pipelines/coreclr/templates/helix-queues-setup.yml @@ -11,6 +11,13 @@ parameters: dependOnEvaluatePaths: false jobParameters: {} +# parameters.jobParameters.helixQueueGroup values: +# 'pr' -- pull request +# 'ci' -- continuous integration ("merge") +# 'libraries' -- libraries tests +# 'cet' -- machines supporting CET technology +# 'superpmi' -- for TeamProject 'internal', a smaller set of queues (one per architecture, not several) for SuperPMI collection + jobs: - template: ${{ parameters.jobTemplate }} parameters: @@ -45,7 +52,9 @@ jobs: - ${{ if eq(parameters.platform, 'Linux_arm') }}: - ${{ if eq(variables['System.TeamProject'], 'public') }}: - (Ubuntu.1804.Arm32.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440 - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), in(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: + - (Ubuntu.1804.Arm32)Ubuntu.1804.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440 + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), notIn(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: - (Debian.10.Arm32)Ubuntu.1804.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-10-helix-arm32v7-20210304164340-6616c63 - (Debian.11.Arm32)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm32v7-20210304164347-5a7c380 - (Ubuntu.1804.Arm32)Ubuntu.1804.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440 @@ -57,7 +66,9 @@ jobs: - ${{ if and(eq(variables['System.TeamProject'], 'public'), notIn(parameters.jobParameters.helixQueueGroup, 'pr', 'ci', 'libraries')) }}: - (Debian.10.Arm64.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-10-helix-arm64v8-20220818195427-06f234f - (Debian.11.Arm64.Open)Ubuntu.1804.Armarch.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm64v8-20220818195437-06f234f - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), in(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: + - (Ubuntu.1804.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8-20220824230426-06f234f + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), notIn(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: - (Debian.10.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-10-helix-arm64v8-20220818195427-06f234f - (Debian.11.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-arm64v8-20220818195437-06f234f - (Ubuntu.1804.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8-20220824230426-06f234f @@ -93,7 +104,9 @@ jobs: - Ubuntu.1804.Amd64.Open - (Centos.8.Amd64.Open)Ubuntu.1804.Amd64.Open@mcr.microsoft.com/dotnet-buildtools/prereqs:centos-8-helix-20201229003624-c1bf759 - RedHat.7.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), in(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: + - Ubuntu.1804.Amd64 + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), notIn(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: - (Debian.10.Amd64)Ubuntu.1804.amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-10-helix-amd64-20220810215022-f344011 - (Debian.11.Amd64)Ubuntu.1804.amd64@mcr.microsoft.com/dotnet-buildtools/prereqs:debian-11-helix-amd64-20220810215032-f344011 - Ubuntu.1804.Amd64 @@ -126,7 +139,9 @@ jobs: - (Windows.Nano.1809.Amd64.Open)windows.10.amd64.serverrs5.open@mcr.microsoft.com/dotnet-buildtools/prereqs:nanoserver-1809-helix-amd64-08e8e40-20200107182504 - Windows.7.Amd64.Open - Windows.10.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), in(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: + - Windows.10.Amd64.X86.Rt + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), notIn(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: - Windows.7.Amd64 - Windows.81.Amd64 - Windows.10.Amd64 @@ -140,7 +155,9 @@ jobs: - ${{ if and(eq(variables['System.TeamProject'], 'public'), notIn(parameters.jobParameters.helixQueueGroup, 'pr', 'ci', 'libraries')) }}: - Windows.7.Amd64.Open - Windows.10.Amd64.Open - - ${{ if eq(variables['System.TeamProject'], 'internal') }}: + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), in(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: + - Windows.10.Amd64.X86.Rt + - ${{ if and(eq(variables['System.TeamProject'], 'internal'), notIn(parameters.jobParameters.helixQueueGroup, 'superpmi')) }}: - Windows.7.Amd64 - Windows.81.Amd64 - Windows.10.Amd64 diff --git a/eng/pipelines/coreclr/templates/run-performance-job.yml b/eng/pipelines/coreclr/templates/run-performance-job.yml index 218c8262ad52d..574b131247f1d 100644 --- a/eng/pipelines/coreclr/templates/run-performance-job.yml +++ b/eng/pipelines/coreclr/templates/run-performance-job.yml @@ -92,7 +92,7 @@ jobs: export PERFLAB_UPLOAD_TOKEN="$(HelixPerfUploadTokenValue)" || export PERF_PREREQS_INSTALL_FAILED=1; test "x$PERF_PREREQS_INSTALL_FAILED" = "x1" && echo "** Error: Failed to install prerequites **" - - HelixPreCommandStemMusl: 'export ORIGPYPATH=$PYTHONPATH;sudo apk add icu-libs krb5-libs libgcc libintl libssl1.1 libstdc++ zlib cargo;sudo apk add libgdiplus --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing; python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install --user -U pip;pip3 install --user azure.storage.blob==12.7.1;pip3 install --user azure.storage.queue==12.1.5;export PERFLAB_UPLOAD_TOKEN="$(HelixPerfUploadTokenValue)"' + - HelixPreCommandStemMusl: 'export ORIGPYPATH=$PYTHONPATH;sudo apk add icu-libs krb5-libs libgcc libintl libssl1.1 libstdc++ zlib cargo;sudo apk add libgdiplus --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing; python3 -m venv $HELIX_WORKITEM_PAYLOAD/.venv;source $HELIX_WORKITEM_PAYLOAD/.venv/bin/activate;export PYTHONPATH=;python3 -m pip install -U pip;pip3 install azure.storage.blob==12.7.1;pip3 install azure.storage.queue==12.1.5;export PERFLAB_UPLOAD_TOKEN="$(HelixPerfUploadTokenValue)"' - ExtraMSBuildLogsWindows: 'set MSBUILDDEBUGCOMM=1;set "MSBUILDDEBUGPATH=%HELIX_WORKITEM_UPLOAD_ROOT%"' - ExtraMSBuildLogsLinux: 'export MSBUILDDEBUGCOMM=1;export "MSBUILDDEBUGPATH=$HELIX_WORKITEM_UPLOAD_ROOT"' - HelixPreCommand: '' diff --git a/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml b/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml index 5ea9e3f03b1a6..893857887ef28 100644 --- a/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml +++ b/eng/pipelines/coreclr/templates/run-superpmi-collect-job.yml @@ -73,6 +73,8 @@ jobs: value: '$(Build.SourcesDirectory)\artifacts\spmi_collection\' - name: SpmiLogsLocation value: '$(Build.SourcesDirectory)\artifacts\spmi_logs\' + - name: PayloadLocation + value: '$(Build.SourcesDirectory)\payload' - ${{ if ne(parameters.osGroup, 'windows') }}: - name: PythonScript value: 'python3' @@ -86,6 +88,9 @@ jobs: value: '$(Build.SourcesDirectory)/artifacts/spmi_collection/' - name: SpmiLogsLocation value: '$(Build.SourcesDirectory)/artifacts/spmi_logs/' + - name: PayloadLocation + value: '$(Build.SourcesDirectory)/payload' + - ${{ if eq(parameters.collectionName, 'libraries') }}: - name: InputDirectory value: '$(Core_Root_Dir)' @@ -98,6 +103,7 @@ jobs: - ${{ if eq(parameters.collectionName, 'libraries_tests') }}: - name: InputDirectory value: '$(Build.SourcesDirectory)/artifacts/tests/libraries/$(osGroup).$(archType).$(buildConfigUpper)' + workspace: clean: all pool: @@ -106,7 +112,7 @@ jobs: steps: - ${{ parameters.steps }} - - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_collect_setup.py -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -platform $(osGroup) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) -max_size 25 # size in MB + - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi_collect_setup.py -payload_directory $(PayloadLocation) -source_directory $(Build.SourcesDirectory) -core_root_directory $(Core_Root_Dir) -arch $(archType) -platform $(osGroup) -mch_file_tag $(MchFileTag) -input_directory $(InputDirectory) -collection_name $(CollectionName) -collection_type $(CollectionType) -max_size 25 # size in MB displayName: ${{ format('SuperPMI setup ({0})', parameters.osGroup) }} # Create required directories for merged mch collection and superpmi logs @@ -144,7 +150,7 @@ jobs: # Always run merge step even if collection of some partition fails so we can store collection # of the partitions that succeeded. If all the partitions fail, merge-mch would fail and we won't # run future steps like uploading superpmi collection. - - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py merge-mch -log_level DEBUG -pattern $(MchFilesLocation)$(CollectionName).$(CollectionType)*.mch -output_mch_path $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch + - script: $(PythonScript) $(Build.SourcesDirectory)/src/coreclr/scripts/superpmi.py merge-mch --ci -log_level DEBUG -pattern $(MchFilesLocation)$(CollectionName).$(CollectionType)*.mch -output_mch_path $(MergedMchFileLocation)$(CollectionName).$(CollectionType).$(MchFileTag).mch displayName: ${{ format('Merge {0}-{1} SuperPMI collections', parameters.collectionName, parameters.collectionType) }} condition: always() diff --git a/eng/pipelines/installer/installer-matrix.yml b/eng/pipelines/installer/installer-matrix.yml deleted file mode 100644 index e9ae06bf26769..0000000000000 --- a/eng/pipelines/installer/installer-matrix.yml +++ /dev/null @@ -1,22 +0,0 @@ -parameters: - runtimeFlavor: 'coreclr' - platforms: [] - jobParameters: [] - buildConfig: Release - runtimeVariant: '' - -jobs: - -# -# Build and Test -# -- template: /eng/pipelines/common/platform-matrix.yml - parameters: - jobTemplate: /eng/pipelines/installer/jobs/base-job.yml - buildConfig: ${{ parameters.buildConfig }} - platforms: ${{ parameters.platforms }} - passPlatforms: true - runtimeFlavor: ${{ parameters.runtimeFlavor }} - runtimeVariant: ${{ parameters.runtimeVariant }} - jobParameters: - ${{ insert }}: ${{ parameters.jobParameters }} diff --git a/eng/pipelines/installer/jobs/base-job.yml b/eng/pipelines/installer/jobs/base-job.yml deleted file mode 100644 index d5b83707d7725..0000000000000 --- a/eng/pipelines/installer/jobs/base-job.yml +++ /dev/null @@ -1,489 +0,0 @@ -parameters: - buildConfig: '' - osGroup: '' - archType: '' - osSubgroup: '' - platform: '' - crossBuild: false - crossrootfsDir: '' - timeoutInMinutes: 120 - condition: true - shouldContinueOnError: false - container: '' - buildSteps: [] - dependsOn: [] - dependsOnGlobalBuild: false - dependOnEvaluatePaths: false - globalBuildSuffix: '' - variables: [] - name: '' - displayName: '' - runtimeVariant: '' - pool: '' - pgoType: '' - - packageDistroList: - - image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-debpkg-20220504035737-cfdd435 - packageType: deb - packagingArgs: /p:BuildDebPackage=true - - image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-8-rpmpkg-20210714125410-daa5116 - packageType: rpm - packagingArgs: /p:BuildRpmPackage=true - - isOfficialBuild: false - buildFullPlatformManifest: false - - liveRuntimeBuildConfig: '' - liveLibrariesBuildConfig: '' - runtimeFlavor: 'coreclr' - platforms: [] - -jobs: -- job: ${{ format('installer_{0}_{1}_{2}_{3}_{4}_', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }} - displayName: ${{ format('{0} Installer Build and Test {1} {2} {3} {4}', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }} - - condition: and(succeeded(), ${{ parameters.condition }}) - pool: ${{ parameters.pool }} - timeoutInMinutes: ${{ parameters.timeoutInMinutes }} - - # Do not attempt to clean workspace on Linux: the agent might not be able to remove the files - # because they may be owned by "root" due to the way this job uses Docker. We do our own cleanup - # in this case as a prepare step. - ${{ if ne(parameters.osGroup, 'Linux') }}: - workspace: - clean: all - - variables: - - ${{ each variable in parameters.variables }}: - - ${{ variable }} - - - name: OfficialBuildArg - value: '' - - - name: SkipTests - value: ${{ or( - not(in(parameters.archType, 'x64', 'x86')), - eq(parameters.runtimeFlavor, 'mono'), - eq(parameters.isOfficialBuild, true), - eq(parameters.crossBuild, true), - eq(parameters.pgoType, 'PGO')) }} - - - name: BuildAction - value: -test - - - ${{ if eq(variables.SkipTests, true) }}: - - name: BuildAction - value: '' - - - name: SignType - value: test - - - name: pgoInstrumentArg - value: '' - - ${{ if eq(parameters.pgoType, 'PGO' )}}: - - name: pgoInstrumentArg - value: '-pgoinstrument ' - - # Set up non-PR build from internal project - - ${{ if eq(parameters.isOfficialBuild, true) }}: - - name: SignType - value: $[ coalesce(variables.OfficialSignType, 'real') ] - - name: OfficialBuildArg - value: /p:OfficialBuildId=$(Build.BuildNumber) - - - name: buildCommandSourcesDirectory - ${{ if not(in(parameters.osGroup, 'Linux', 'FreeBSD')) }}: - value: '$(Build.SourcesDirectory)/' - # This job runs within Docker containers, so Build.SourcesDirectory is not accurate. - ${{ if in(parameters.osGroup, 'Linux', 'FreeBSD') }}: - value: '/root/runtime/' - - ### - ### Platform-specific variable setup - ### - - - ${{ if eq(parameters.osGroup, 'windows') }}: - - - name: CommonMSBuildArgs - value: >- - /p:TargetArchitecture=${{ parameters.archType }} - /p:PortableBuild=true - /p:SkipTests=$(SkipTests) - /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} - $(OfficialBuildArg) - - name: MsbuildSigningArguments - value: >- - /p:CertificateId=400 - /p:DotNetSignType=$(SignType) - - name: TargetArchitecture - value: ${{ parameters.archType }} - - - name: BaseJobBuildCommand - value: >- - build.cmd -subset host+packs -ci - $(BuildAction) - -configuration $(_BuildConfig) - $(pgoInstrumentArg) - $(LiveOverridePathArgs) - $(CommonMSBuildArgs) - $(MsbuildSigningArguments) - - - ${{ if eq(parameters.osGroup, 'OSX') }}: - - - name: CommonMSBuildArgs - value: >- - /p:PortableBuild=true - /p:SkipTests=$(SkipTests) - /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} - /p:TargetArchitecture=${{ parameters.archType }} - /p:CrossBuild=${{ parameters.crossBuild }} - - - name: BaseJobBuildCommand - value: >- - $(Build.SourcesDirectory)/build.sh -ci - $(BuildAction) - -configuration $(_BuildConfig) - -arch ${{ parameters.archType }} - $(LiveOverridePathArgs) - $(CommonMSBuildArgs) - $(OfficialBuildArg) - - - ${{ if in(parameters.osGroup, 'iOS', 'tvOS', 'Android', 'Browser') }}: - - - name: CommonMSBuildArgs - value: >- - /p:PortableBuild=true - /p:SkipTests=$(SkipTests) - - - name: BaseJobBuildCommand - value: >- - $(Build.SourcesDirectory)/build.sh -subset packs -ci - $(BuildAction) - -configuration $(_BuildConfig) - -os ${{ parameters.osGroup }} - -arch ${{ parameters.archType }} - /p:StripSymbols=true - $(LiveOverridePathArgs) - $(CommonMSBuildArgs) - $(OfficialBuildArg) - - - ${{ if in(parameters.osGroup, 'Linux', 'FreeBSD') }}: - - # Preserve the NuGet authentication env vars into the Docker container. - # The 'NuGetAuthenticate' build step may have set these. - - name: PreserveNuGetAuthDockerArgs - value: >- - -e VSS_NUGET_URI_PREFIXES - -e VSS_NUGET_ACCESSTOKEN - - - ${{ if ne(parameters.container, '') }}: - - name: RunArguments - value: >- - docker run --privileged --rm - -v "$(Build.SourcesDirectory):/root/runtime" - -w="/root/runtime" - $(PreserveNuGetAuthDockerArgs) - -e ROOTFS_DIR=${{ parameters.crossrootfsDir }} - ${{ parameters.container }} - - - name: BuildScript - value: ./build.sh - - name: MSBuildScript - value: /root/runtime/eng/common/msbuild.sh - - - ${{ if eq(parameters.isOfficialBuild, true) }}: - - name: BuildScript - value: ./eng/install-nuget-credprovider-then-build.sh --subset host+packs - - name: MSBuildScript - value: /root/runtime/eng/install-nuget-credprovider-then-msbuild.sh - - - name: CommonMSBuildArgs - value: >- - /p:Configuration=$(_BuildConfig) - /p:TargetOS=${{ parameters.osGroup }} - /p:TargetArchitecture=${{ parameters.archType }} - /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} - $(OfficialBuildArg) - - - name: _PortableBuild - value: ${{ eq(parameters.osSubgroup, '') }} - - - ${{ if and(eq(parameters.osSubgroup, '_musl'), eq(parameters.osGroup, 'Linux')) }}: - # Set output RID manually: musl isn't properly detected. Make sure to also convert linux to - # lowercase for RID format. (Detection normally converts, but we're preventing it.) - - name: OutputRidArg - value: /p:OutputRid=linux-musl-${{ parameters.archType }} - - name: RuntimeOSArg - value: /p:RuntimeOS=linux-musl - - name: _PortableBuild - value: true - - - name: BuildArguments - value: >- - -subset host+packs -ci - $(BuildAction) - /p:CrossBuild=${{ parameters.crossBuild }} - /p:PortableBuild=$(_PortableBuild) - /p:SkipTests=$(SkipTests) - $(pgoInstrumentArg) - $(LiveOverridePathArgs) - $(CommonMSBuildArgs) - $(OutputRidArg) - $(RuntimeOSArg) - - - name: PublishArguments - value: >- - /p:PortableBuild=$(_PortableBuild) - $(CommonMSBuildArgs) - $(OutputRidArg) - /bl:msbuild.publish.binlog - - - name: DockerRunMSBuild - value: >- - docker run - -v $(Build.SourcesDirectory):/root/runtime - -w=/root/runtime - $(PreserveNuGetAuthDockerArgs) - - - name: installersSubsetArg - value: --subset packs.installers - - - name: BaseJobBuildCommand - value: | - set -x - df -h - docker info - $(RunArguments) $(BuildScript) $(BuildArguments) - - ### - ### Common Live build override variable setup - ### - - - name: LiveOverridePathArgs - value: >- - $(RuntimeArtifactsArgs) - $(LibrariesConfigurationArg) - - - name: RuntimeArtifactsArgs - value: '' - - name: LibrariesConfigurationArg - value: '' - - - name: RuntimeDownloadPath - value: '' - - name: LibrariesDownloadPath - value: '' - - - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: - - name: liveRuntimeLegName - value: ${{ format('{0}{1}_{2}_{3}', - parameters.osGroup, - parameters.osSubgroup, - parameters.archType, - parameters.liveRuntimeBuildConfig) }} - - name: RuntimeDownloadPath - value: 'artifacts/transport/${{ parameters.runtimeFlavor }}' - - name: RuntimeArtifactsArgs - value: >- - /p:RuntimeArtifactsPath=$(buildCommandSourcesDirectory)$(RuntimeDownloadPath) - /p:RuntimeConfiguration=${{ parameters.liveRuntimeBuildConfig }} - - name: RuntimeArtifactName - value: $(runtimeFlavorName)Product_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(liveRuntimeLegName) - - - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: - - name: liveLibrariesLegName - value: ${{ format('{0}{1}_{2}_{3}', - parameters.osGroup, - parameters.osSubgroup, - parameters.archType, - parameters.liveLibrariesBuildConfig) }} - - name: LibrariesDownloadPath - value: 'artifacts' - - name: LibrariesArtifactName - value: libraries_bin_$(liveLibrariesLegName) - - name: LibrariesConfigurationArg - value: ' /p:LibrariesConfiguration=${{ parameters.liveLibrariesBuildConfig }}' - - dependsOn: - - ${{ if eq(parameters.dependOnEvaluatePaths, true) }}: - - evaluate_paths - - ${{ parameters.dependsOn }} - - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: - - ${{ format('{0}_{1}_product_build_{2}{3}_{4}_{5}{6}', - parameters.runtimeFlavor, - parameters.runtimeVariant, - parameters.osGroup, - parameters.osSubgroup, - parameters.archType, - parameters.liveRuntimeBuildConfig, - parameters.pgoType) }} - - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: - - libraries_build_${{ format('{0}{1}_{2}_{3}', - parameters.osGroup, - parameters.osSubgroup, - parameters.archType, - parameters.liveLibrariesBuildConfig) }} - - steps: - - - ${{ if ne(parameters.container, '') }}: - # Builds don't set user ID, so files might be owned by root and unable to be cleaned up by AzDO. - # Clean up the build dirs ourselves in another Docker container to avoid failures. - # Using hosted agents is tracked by https://github.com/dotnet/runtime/issues/3416 - - script: | - set -x - docker run --rm \ - -v "$(Agent.BuildDirectory):/root/build" \ - -w /root/build \ - ${{ parameters.container }} \ - bash -c ' - rm -v -rf a b s' - mkdir "$(Agent.BuildDirectory)/s" - displayName: Clean up old artifacts owned by root - - - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - task: NuGetAuthenticate@0 - - - ${{ if eq(parameters.osGroup, 'windows') }}: - # NuGet's http cache lasts 30 minutes. If we're on a static machine, this may interfere with - # auto-update PRs by preventing the CI build from fetching the new version. Delete the cache. - - powershell: Remove-Item -Recurse -ErrorAction Ignore "$env:LocalAppData\NuGet\v3-cache" - displayName: Clear NuGet http cache (if exists) - - - task: MicroBuildSigningPlugin@2 - displayName: Install MicroBuild plugin for Signing - inputs: - signType: $(SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - continueOnError: false - condition: and(succeeded(), in(variables['SignType'], 'real', 'test'), eq(${{ parameters.isOfficialBuild }}, true)) - - - checkout: self - clean: true - fetchDepth: $(checkoutFetchDepth) - - - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: - - template: /eng/pipelines/common/download-artifact-step.yml - parameters: - unpackFolder: $(Build.SourcesDirectory)/$(RuntimeDownloadPath) - artifactFileName: '$(RuntimeArtifactName)$(archiveExtension)' - artifactName: '$(RuntimeArtifactName)' - displayName: '$(runtimeFlavorName) artifacts' - - - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: - - template: /eng/pipelines/common/download-artifact-step.yml - parameters: - unpackFolder: $(Build.SourcesDirectory)/$(LibrariesDownloadPath) - artifactFileName: '$(LibrariesArtifactName)$(archiveExtension)' - artifactName: '$(LibrariesArtifactName)' - displayName: 'Libraries artifacts' - cleanUnpackFolder: false - - - ${{ if in(parameters.osGroup, 'OSX', 'iOS', 'tvOS') }}: - - script: $(Build.SourcesDirectory)/eng/install-native-dependencies.sh ${{ parameters.osGroup }} ${{ parameters.archType }} azDO - displayName: Install Build Dependencies - - - script: | - du -sh $(Build.SourcesDirectory)/* - df -h - displayName: Disk Usage before Build - - # Build the default subset non-MacOS platforms - - ${{ if ne(parameters.osGroup, 'OSX') }}: - - script: $(BaseJobBuildCommand) - displayName: Build - continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} - - # Build corehost, sign and add entitlements to MacOS binaries - - ${{ if eq(parameters.osGroup, 'OSX') }}: - - script: $(BaseJobBuildCommand) -subset host.native - displayName: Build CoreHost - continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} - - - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: - - template: /eng/pipelines/common/macos-sign-with-entitlements.yml - parameters: - filesToSign: - - name: dotnet - path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost - entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist - - name: apphost - path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost - entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist - - - script: $(BaseJobBuildCommand) -subset host.pkg+host.tools+host.tests+packs - displayName: Build and Package - continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} - - - ${{ if in(parameters.osGroup, 'OSX', 'iOS', 'tvOS') }}: - - script: | - du -sh $(Build.SourcesDirectory)/* - df -h - displayName: Disk Usage after Build - - # Only in glibc leg, we produce RPMs and Debs - - ${{ if and(eq(parameters.runtimeFlavor, 'coreclr'), or(eq(parameters.platform, 'Linux_x64'), eq(parameters.platform, 'Linux_arm64')), eq(parameters.osSubgroup, ''), eq(parameters.pgoType, ''))}}: - - ${{ each packageBuild in parameters.packageDistroList }}: - # This leg's RID matches the build image. Build its distro-dependent packages, as well as - # the distro-independent installers. (There's no particular reason to build the distro- - # independent installers on this leg, but we need to do it somewhere.) - # Currently, Linux_arm64 supports 'rpm' type only. - - ${{ if or(not(eq(parameters.platform, 'Linux_arm64')), eq(packageBuild.packageType, 'rpm')) }}: - - template: steps/build-linux-package.yml - parameters: - packageType: ${{ packageBuild.packageType }} - image: ${{ packageBuild.image }} - packageStepDescription: Runtime Deps, Runtime, Framework Packs installers - subsetArg: $(installersSubsetArg) - packagingArgs: ${{ packageBuild.packagingArgs }} - - - ${{ if ne(parameters.container, '') }}: - # Files may be owned by root because builds don't set user ID. Later build steps run 'find' in - # the source tree, which fails due to permissions in the 'NetCore*-Int-Pool' queues. This step - # prevents the failure by using chown to clean up our source tree. - - script: | - set -x - docker run --rm \ - -v "$(Agent.BuildDirectory):/root/build" \ - -w /root/build \ - ${{ parameters.container }} \ - bash -c "chown -R $(id -u):$(id -g) *" - displayName: Update file ownership from root to build agent account - continueOnError: true - condition: succeededOrFailed() - - - ${{ if and(eq(parameters.osGroup, 'windows'), eq(parameters.isOfficialBuild, true)) }}: - - task: NuGetCommand@2 - displayName: Push Visual Studio NuPkgs - inputs: - command: push - packagesToPush: '$(Build.SourcesDirectory)/artifacts/packages/$(_BuildConfig)/*/VS.Redist.Common.*.nupkg' - nuGetFeedType: external - publishFeedCredentials: 'DevDiv - VS package feed' - condition: and( - succeeded(), - eq(variables['_BuildConfig'], 'Release'), - ne(variables['DisableVSPublish'], 'true'), - ne(variables['PostBuildSign'], 'true')) - - - template: steps/upload-job-artifacts.yml - parameters: - name: ${{ coalesce(parameters.name, parameters.platform) }} - runtimeFlavor: ${{ parameters.runtimeFlavor }} - runtimeVariant: ${{ parameters.runtimeVariant }} - skipTests: ${{ variables.SkipTests }} - isOfficialBuild: ${{ eq(parameters.isOfficialBuild, true) }} - pgoType: ${{ parameters.pgoType }} - - - ${{ if ne(parameters.osGroup, 'windows') }}: - - script: set -x && df -h - displayName: Check remaining storage space - condition: always() - continueOnError: true - - # Force clean up machine in case any docker images are left behind - - ${{ if ne(parameters.container, '') }}: - - script: docker system prune -af && df -h - displayName: Run Docker clean up - condition: succeededOrFailed() diff --git a/eng/pipelines/installer/jobs/build-job.yml b/eng/pipelines/installer/jobs/build-job.yml new file mode 100644 index 0000000000000..44abaeddb39c1 --- /dev/null +++ b/eng/pipelines/installer/jobs/build-job.yml @@ -0,0 +1,402 @@ +parameters: + buildConfig: '' + osGroup: '' + archType: '' + osSubgroup: '' + platform: '' + crossBuild: false + crossrootfsDir: '' + timeoutInMinutes: 120 + condition: true + shouldContinueOnError: false + container: '' + buildSteps: [] + dependsOn: [] + dependsOnGlobalBuild: false + dependOnEvaluatePaths: false + globalBuildSuffix: '' + variables: [] + name: '' + displayName: '' + runtimeVariant: '' + pool: '' + pgoType: '' + + # The target names here should match container names in the resources section in our pipelines, like runtime.yml + packageDistroList: + - target: debpkg + packageType: deb + packagingArgs: /p:BuildDebPackage=true + - target: rpmpkg + packageType: rpm + packagingArgs: /p:BuildRpmPackage=true + + isOfficialBuild: false + buildFullPlatformManifest: false + + liveRuntimeBuildConfig: '' + liveLibrariesBuildConfig: '' + runtimeFlavor: 'coreclr' + +### Product build +jobs: +- template: /eng/common/templates/job/job.yml + parameters: + buildConfig: ${{ parameters.buildConfig }} + archType: ${{ parameters.archType }} + osGroup: ${{ parameters.osGroup }} + osSubgroup: ${{ parameters.osSubgroup }} + runtimeVariant: ${{ parameters.runtimeVariant }} + testGroup: ${{ parameters.testGroup }} + helixType: 'build/product/' + enableMicrobuild: true + stagedBuild: ${{ parameters.stagedBuild }} + pool: ${{ parameters.pool }} + condition: ${{ parameters.condition }} + dependOnEvaluatePaths: ${{ parameters.dependOnEvaluatePaths }} + disableClrTest: ${{ parameters.disableClrTest }} + pgoType: ${{ parameters.pgoType }} + + # Compute job name from template parameters + name: ${{ format('installer_{0}_{1}_{2}_{3}_{4}_', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }} + displayName: ${{ format('{0} Installer Build and Test {1} {2} {3} {4}', parameters.pgoType, parameters.runtimeFlavor, parameters.runtimeVariant, coalesce(parameters.name, parameters.platform), parameters.buildConfig) }} + + # Run all steps in the container. + # Note that the containers are defined in platform-matrix.yml + container: ${{ parameters.container }} + + timeoutInMinutes: ${{ parameters.timeoutInMinutes }} + + crossBuild: ${{ parameters.crossBuild }} + crossrootfsDir: ${{ parameters.crossrootfsDir }} + + gatherAssetManifests: true + variables: + - ${{ each variable in parameters.variables }}: + - ${{ variable }} + + - name: OfficialBuildArg + value: '' + + - name: SkipTests + value: ${{ or( + not(in(parameters.archType, 'x64', 'x86')), + eq(parameters.runtimeFlavor, 'mono'), + eq(parameters.isOfficialBuild, true), + eq(parameters.crossBuild, true), + eq(parameters.pgoType, 'PGO')) }} + + - name: BuildAction + value: -test + + - ${{ if eq(or(not(in(parameters.archType, 'x64', 'x86')), eq(parameters.runtimeFlavor, 'mono'), eq(parameters.isOfficialBuild, true), eq(parameters.crossBuild, true), eq(parameters.pgoType, 'PGO')), true) }}: + - name: BuildAction + value: '' + + - name: SignType + value: test + + - name: pgoInstrumentArg + value: '' + - ${{ if eq(parameters.pgoType, 'PGO' )}}: + - name: pgoInstrumentArg + value: '-pgoinstrument ' + + # Set up non-PR build from internal project + - ${{ if eq(parameters.isOfficialBuild, true) }}: + - name: SignType + value: $[ coalesce(variables.OfficialSignType, 'real') ] + - name: OfficialBuildArg + value: /p:OfficialBuildId=$(Build.BuildNumber) + + - name: buildCommandSourcesDirectory + value: '$(Build.SourcesDirectory)/' + + ### + ### Platform-specific variable setup + ### + + - ${{ if eq(parameters.osGroup, 'windows') }}: + + - name: CommonMSBuildArgs + value: >- + /p:TargetArchitecture=${{ parameters.archType }} + /p:PortableBuild=true + /p:SkipTests=$(SkipTests) + /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} + $(OfficialBuildArg) + - name: MsbuildSigningArguments + value: >- + /p:CertificateId=400 + /p:DotNetSignType=$(SignType) + - name: TargetArchitecture + value: ${{ parameters.archType }} + + - name: BaseJobBuildCommand + value: >- + build.cmd -subset host+packs -ci + $(BuildAction) + -configuration $(_BuildConfig) + $(pgoInstrumentArg) + $(LiveOverridePathArgs) + $(CommonMSBuildArgs) + $(MsbuildSigningArguments) + + - ${{ if eq(parameters.osGroup, 'OSX') }}: + + - name: CommonMSBuildArgs + value: >- + /p:PortableBuild=true + /p:SkipTests=$(SkipTests) + /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} + /p:TargetArchitecture=${{ parameters.archType }} + /p:CrossBuild=${{ parameters.crossBuild }} + + - name: BaseJobBuildCommand + value: >- + $(Build.SourcesDirectory)/build.sh -ci + $(BuildAction) + -configuration $(_BuildConfig) + -arch ${{ parameters.archType }} + $(LiveOverridePathArgs) + $(CommonMSBuildArgs) + $(OfficialBuildArg) + + - ${{ if in(parameters.osGroup, 'iOS', 'tvOS', 'Android', 'Browser') }}: + + - name: CommonMSBuildArgs + value: >- + /p:PortableBuild=true + /p:SkipTests=$(SkipTests) + + - name: BaseJobBuildCommand + value: >- + $(Build.SourcesDirectory)/build.sh -subset packs -ci + $(BuildAction) + -configuration $(_BuildConfig) + -os ${{ parameters.osGroup }} + -arch ${{ parameters.archType }} + /p:StripSymbols=true + $(LiveOverridePathArgs) + $(CommonMSBuildArgs) + $(OfficialBuildArg) + + - ${{ if in(parameters.osGroup, 'Linux', 'FreeBSD') }}: + - name: CommonMSBuildArgs + value: >- + /p:Configuration=$(_BuildConfig) + /p:TargetOS=${{ parameters.osGroup }} + /p:TargetArchitecture=${{ parameters.archType }} + /p:RuntimeFlavor=${{ parameters.runtimeFlavor }} + $(OfficialBuildArg) + + - name: _PortableBuild + value: ${{ eq(parameters.osSubgroup, '') }} + + - ${{ if and(eq(parameters.osSubgroup, '_musl'), eq(parameters.osGroup, 'Linux')) }}: + # Set output RID manually: musl isn't properly detected. Make sure to also convert linux to + # lowercase for RID format. (Detection normally converts, but we're preventing it.) + - name: OutputRidArg + value: /p:OutputRid=linux-musl-${{ parameters.archType }} + - name: RuntimeOSArg + value: /p:RuntimeOS=linux-musl + - name: _PortableBuild + value: true + + - name: BaseJobBuildCommand + value: >- + $(Build.SourcesDirectory)/build.sh -subset host+packs -ci + $(BuildAction) + /p:CrossBuild=${{ parameters.crossBuild }} + /p:PortableBuild=$(_PortableBuild) + /p:SkipTests=$(SkipTests) + $(pgoInstrumentArg) + $(LiveOverridePathArgs) + $(CommonMSBuildArgs) + $(OutputRidArg) + $(RuntimeOSArg) + + - name: PublishArguments + value: >- + /p:PortableBuild=$(_PortableBuild) + $(CommonMSBuildArgs) + $(OutputRidArg) + /bl:msbuild.publish.binlog + + - name: installersSubsetArg + value: --subset packs.installers + + ### + ### Common Live build override variable setup + ### + + - name: LiveOverridePathArgs + value: >- + $(RuntimeArtifactsArgs) + $(LibrariesConfigurationArg) + + - name: RuntimeArtifactsArgs + value: '' + - name: LibrariesConfigurationArg + value: '' + + - name: RuntimeDownloadPath + value: '' + - name: LibrariesDownloadPath + value: '' + + - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: + - name: liveRuntimeLegName + value: ${{ format('{0}{1}_{2}_{3}', + parameters.osGroup, + parameters.osSubgroup, + parameters.archType, + parameters.liveRuntimeBuildConfig) }} + - name: RuntimeDownloadPath + value: 'artifacts/transport/${{ parameters.runtimeFlavor }}' + - name: RuntimeArtifactsArgs + value: >- + /p:RuntimeArtifactsPath=$(buildCommandSourcesDirectory)$(RuntimeDownloadPath) + /p:RuntimeConfiguration=${{ parameters.liveRuntimeBuildConfig }} + - name: RuntimeArtifactName + value: $(runtimeFlavorName)Product_${{ parameters.pgoType }}_${{ parameters.runtimeVariant }}_$(liveRuntimeLegName) + + - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: + - name: liveLibrariesLegName + value: ${{ format('{0}{1}_{2}_{3}', + parameters.osGroup, + parameters.osSubgroup, + parameters.archType, + parameters.liveLibrariesBuildConfig) }} + - name: LibrariesDownloadPath + value: 'artifacts' + - name: LibrariesArtifactName + value: libraries_bin_$(liveLibrariesLegName) + - name: LibrariesConfigurationArg + value: ' /p:LibrariesConfiguration=${{ parameters.liveLibrariesBuildConfig }}' + + dependsOn: + - ${{ if eq(parameters.dependOnEvaluatePaths, true) }}: + - evaluate_paths + - ${{ parameters.dependsOn }} + - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: + - ${{ format('{0}_{1}_product_build_{2}{3}_{4}_{5}{6}', + parameters.runtimeFlavor, + parameters.runtimeVariant, + parameters.osGroup, + parameters.osSubgroup, + parameters.archType, + parameters.liveRuntimeBuildConfig, + parameters.pgoType) }} + - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: + - libraries_build_${{ format('{0}{1}_{2}_{3}', + parameters.osGroup, + parameters.osSubgroup, + parameters.archType, + parameters.liveLibrariesBuildConfig) }} + steps: + - checkout: self + clean: true + fetchDepth: $(checkoutFetchDepth) + - ${{ if ne(parameters.liveRuntimeBuildConfig, '') }}: + - template: /eng/pipelines/common/download-artifact-step.yml + parameters: + unpackFolder: $(Build.SourcesDirectory)/$(RuntimeDownloadPath) + artifactFileName: '$(RuntimeArtifactName)$(archiveExtension)' + artifactName: '$(RuntimeArtifactName)' + displayName: '$(runtimeFlavorName) artifacts' + + - ${{ if ne(parameters.liveLibrariesBuildConfig, '') }}: + - template: /eng/pipelines/common/download-artifact-step.yml + parameters: + unpackFolder: $(Build.SourcesDirectory)/$(LibrariesDownloadPath) + artifactFileName: '$(LibrariesArtifactName)$(archiveExtension)' + artifactName: '$(LibrariesArtifactName)' + displayName: 'Libraries artifacts' + cleanUnpackFolder: false + + - ${{ if in(parameters.osGroup, 'OSX', 'iOS', 'tvOS') }}: + - script: $(Build.SourcesDirectory)/eng/install-native-dependencies.sh ${{ parameters.osGroup }} ${{ parameters.archType }} azDO + displayName: Install Build Dependencies + + - script: | + du -sh $(Build.SourcesDirectory)/* + df -h + displayName: Disk Usage before Build + + # Build the default subset non-MacOS platforms + - ${{ if ne(parameters.osGroup, 'OSX') }}: + - script: $(BaseJobBuildCommand) + displayName: Build + continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} + + # Build corehost, sign and add entitlements to MacOS binaries + - ${{ if eq(parameters.osGroup, 'OSX') }}: + - script: $(BaseJobBuildCommand) -subset host.native + displayName: Build CoreHost + continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} + + - ${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}: + - template: /eng/pipelines/common/macos-sign-with-entitlements.yml + parameters: + filesToSign: + - name: dotnet + path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost + entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist + - name: apphost + path: $(Build.SourcesDirectory)/artifacts/bin/osx-${{ parameters.archType }}.$(_BuildConfig)/corehost + entitlementsFile: $(Build.SourcesDirectory)/eng/pipelines/common/entitlements.plist + + - script: $(BaseJobBuildCommand) -subset host.pkg+host.tools+host.tests+packs + displayName: Build and Package + continueOnError: ${{ and(eq(variables.SkipTests, false), eq(parameters.shouldContinueOnError, true)) }} + + - ${{ if in(parameters.osGroup, 'OSX', 'iOS', 'tvOS') }}: + - script: | + du -sh $(Build.SourcesDirectory)/* + df -h + displayName: Disk Usage after Build + + # Only in glibc leg, we produce RPMs and Debs + - ${{ if and(eq(parameters.runtimeFlavor, 'coreclr'), or(eq(parameters.platform, 'Linux_x64'), eq(parameters.platform, 'Linux_arm64')), eq(parameters.osSubgroup, ''), eq(parameters.pgoType, ''))}}: + - ${{ each packageBuild in parameters.packageDistroList }}: + # This leg's RID matches the build image. Build its distro-dependent packages, as well as + # the distro-independent installers. (There's no particular reason to build the distro- + # independent installers on this leg, but we need to do it somewhere.) + # Currently, Linux_arm64 supports 'rpm' type only. + - ${{ if or(not(eq(parameters.platform, 'Linux_arm64')), eq(packageBuild.packageType, 'rpm')) }}: + - template: /eng/pipelines/installer/jobs/steps/build-linux-package.yml + parameters: + packageType: ${{ packageBuild.packageType }} + target: ${{ packageBuild.target }} + packageStepDescription: Runtime Deps, Runtime, Framework Packs installers + subsetArg: $(installersSubsetArg) + packagingArgs: ${{ packageBuild.packagingArgs }} + + - ${{ if and(eq(parameters.osGroup, 'windows'), eq(parameters.isOfficialBuild, true)) }}: + - task: NuGetCommand@2 + displayName: Push Visual Studio NuPkgs + inputs: + command: push + packagesToPush: '$(Build.SourcesDirectory)/artifacts/packages/$(_BuildConfig)/*/VS.Redist.Common.*.nupkg' + nuGetFeedType: external + publishFeedCredentials: 'DevDiv - VS package feed' + condition: and( + succeeded(), + eq(variables['_BuildConfig'], 'Release'), + ne(variables['DisableVSPublish'], 'true'), + ne(variables['PostBuildSign'], 'true')) + + - template: /eng/pipelines/installer/jobs/steps/upload-job-artifacts.yml + parameters: + name: ${{ coalesce(parameters.name, parameters.platform) }} + runtimeFlavor: ${{ parameters.runtimeFlavor }} + runtimeVariant: ${{ parameters.runtimeVariant }} + isOfficialBuild: ${{ eq(parameters.isOfficialBuild, true) }} + pgoType: ${{ parameters.pgoType }} + + - ${{ if ne(parameters.osGroup, 'windows') }}: + - script: set -x && df -h + displayName: Check remaining storage space + condition: always() + continueOnError: true diff --git a/eng/pipelines/installer/jobs/steps/build-linux-package.yml b/eng/pipelines/installer/jobs/steps/build-linux-package.yml index 95bae9624037a..86c2c54b3351a 100644 --- a/eng/pipelines/installer/jobs/steps/build-linux-package.yml +++ b/eng/pipelines/installer/jobs/steps/build-linux-package.yml @@ -1,15 +1,17 @@ parameters: packageType: null - image: null + target: '' packageStepDescription: null packagingArgs: '' subsetArg: '' steps: +## Run NuGet Authentication for each of the side containers +- ${{ if ne(variables['System.TeamProject'], 'public') }}: + - task: NuGetAuthenticate@0 + target: ${{ parameters.target }} - script: | - set -x - df -h - $(DockerRunMSBuild) ${{ parameters.image }} $(BuildScript) \ + $(Build.SourcesDirectory)/build.sh \ --ci \ ${{ parameters.subsetArg }} \ ${{ parameters.packagingArgs }} \ @@ -17,3 +19,4 @@ steps: $(LiveOverridePathArgs) \ /bl:artifacts/log/$(_BuildConfig)/msbuild.${{ parameters.packageType }}.installers.binlog displayName: Package ${{ parameters.packageStepDescription }} - ${{ parameters.packageType }} + target: ${{ parameters.target }} diff --git a/eng/pipelines/runtime-extra-platforms-wasm.yml b/eng/pipelines/runtime-extra-platforms-wasm.yml index 60d0105dedcc5..887d4be4e2595 100644 --- a/eng/pipelines/runtime-extra-platforms-wasm.yml +++ b/eng/pipelines/runtime-extra-platforms-wasm.yml @@ -86,7 +86,7 @@ jobs: isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }} alwaysRun: ${{ parameters.isWasmOnlyBuild }} scenarios: - - WasmTestOnNodeJs + - WasmTestOnNodeJS # Library tests - Windows - template: /eng/pipelines/common/templates/wasm-library-tests.yml @@ -98,7 +98,7 @@ jobs: isWasmOnlyBuild: ${{ parameters.isWasmOnlyBuild }} scenarios: - WasmTestOnBrowser - - WasmTestOnNodeJs + - WasmTestOnNodeJS # Library tests with full threading - template: /eng/pipelines/common/templates/wasm-library-tests.yml @@ -118,7 +118,7 @@ jobs: scenarios: - normal - WasmTestOnBrowser - - WasmTestOnNodeJs + - WasmTestOnNodeJS # Library tests with internal threads only - template: /eng/pipelines/common/templates/wasm-library-tests.yml @@ -138,7 +138,7 @@ jobs: scenarios: - normal - WasmTestOnBrowser - - WasmTestOnNodeJs + - WasmTestOnNodeJS # EAT Library tests - only run on linux - template: /eng/pipelines/common/templates/wasm-library-aot-tests.yml diff --git a/eng/pipelines/runtime-official.yml b/eng/pipelines/runtime-official.yml index d011ddb32f28f..98cac08563dfd 100644 --- a/eng/pipelines/runtime-official.yml +++ b/eng/pipelines/runtime-official.yml @@ -22,6 +22,14 @@ trigger: # there is no public pipeline associated with it. pr: none +resources: + containers: + # Define auxilary containers used by the installer jobs + - container: debpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-debpkg-20220504035737-cfdd435 + - container: rpmpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-8-rpmpkg-20210714125410-daa5116 + variables: - template: /eng/pipelines/common/variables.yml # TODO: (Consolidation) Switch away from old signing/validation variables from former Core-Setup. https://github.com/dotnet/runtime/issues/1027 @@ -382,8 +390,10 @@ stages: # # Installer Build # - - template: /eng/pipelines/installer/installer-matrix.yml + - template: /eng/pipelines/common/platform-matrix.yml parameters: + jobTemplate: /eng/pipelines/installer/jobs/build-job.yml + buildConfig: Release jobParameters: liveRuntimeBuildConfig: release liveLibrariesBuildConfig: Release @@ -424,8 +434,9 @@ stages: # # PGO Build # - - template: /eng/pipelines/installer/installer-matrix.yml + - template: /eng/pipelines/common/platform-matrix.yml parameters: + jobTemplate: /eng/pipelines/installer/jobs/build-job.yml buildConfig: Release jobParameters: isOfficialBuild: ${{ variables.isOfficialBuild }} diff --git a/eng/pipelines/runtime.yml b/eng/pipelines/runtime.yml index 18490d6aafff2..a32b0155816d1 100644 --- a/eng/pipelines/runtime.yml +++ b/eng/pipelines/runtime.yml @@ -51,6 +51,14 @@ pr: variables: - template: /eng/pipelines/common/variables.yml +resources: + containers: + # Define auxilary containers used by the installer jobs + - container: debpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-debpkg-20220504035737-cfdd435 + - container: rpmpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-8-rpmpkg-20210714125410-daa5116 + jobs: # @@ -876,8 +884,9 @@ jobs: # These are always built since they only take like 15 minutes # we expect these to be done before we finish libraries or coreclr testing. # -- template: /eng/pipelines/installer/installer-matrix.yml +- template: /eng/pipelines/common/platform-matrix.yml parameters: + jobTemplate: /eng/pipelines/installer/jobs/build-job.yml buildConfig: ${{ variables.debugOnPrReleaseOnRolling }} platforms: - Linux_musl_arm @@ -894,8 +903,9 @@ jobs: eq(dependencies.evaluate_paths.outputs['SetPathVars_non_mono_and_wasm.containsChange'], true), eq(variables['isRollingBuild'], true)) -- template: /eng/pipelines/installer/installer-matrix.yml +- template: /eng/pipelines/common/platform-matrix.yml parameters: + jobTemplate: /eng/pipelines/installer/jobs/build-job.yml buildConfig: Release platforms: - OSX_arm64 diff --git a/eng/pipelines/runtimelab.yml b/eng/pipelines/runtimelab.yml index e2fd1e51b3252..4f1780ab1d30c 100644 --- a/eng/pipelines/runtimelab.yml +++ b/eng/pipelines/runtimelab.yml @@ -39,6 +39,14 @@ pr: - PATENTS.TXT - THIRD-PARTY-NOTICES.TXT +resources: + containers: + # Define auxilary containers used by the installer jobs + - container: debpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-debpkg-20220504035737-cfdd435 + - container: rpmpkg + image: mcr.microsoft.com/dotnet-buildtools/prereqs:centos-8-rpmpkg-20210714125410-daa5116 + variables: - template: /eng/pipelines/common/variables.yml - ${{ if and(ne(variables['System.TeamProject'], 'public'), ne(variables['Build.Reason'], 'PullRequest')) }}: @@ -114,8 +122,9 @@ stages: # Installer official builds need to build installers and need the libraries all configurations build - ${{ if eq(variables.isOfficialBuild, true) }}: - - template: /eng/pipelines/installer/installer-matrix.yml + - template: /eng/pipelines/common/platform-matrix.yml parameters: + jobTemplate: /eng/pipelines/installer/jobs/build-job.yml jobParameters: liveRuntimeBuildConfig: Release liveLibrariesBuildConfig: Release diff --git a/eng/testing/WasmRunnerAOTTemplate.sh b/eng/testing/WasmRunnerAOTTemplate.sh index 7348a25674c00..e44faf7f8c0aa 100644 --- a/eng/testing/WasmRunnerAOTTemplate.sh +++ b/eng/testing/WasmRunnerAOTTemplate.sh @@ -33,7 +33,7 @@ fi if [[ "$XHARNESS_COMMAND" == "test" ]]; then if [[ -z "$JS_ENGINE" ]]; then - if [[ "$SCENARIO" == "WasmTestOnNodeJs" || "$SCENARIO" == "wasmtestonnodejs" ]]; then + if [[ "$SCENARIO" == "WasmTestOnNodeJS" || "$SCENARIO" == "wasmtestonnodejs" ]]; then JS_ENGINE="--engine=NodeJS" else JS_ENGINE="--engine=V8" diff --git a/eng/testing/WasmRunnerTemplate.cmd b/eng/testing/WasmRunnerTemplate.cmd index 026a06066c318..1fc0e6ef4f769 100644 --- a/eng/testing/WasmRunnerTemplate.cmd +++ b/eng/testing/WasmRunnerTemplate.cmd @@ -34,7 +34,7 @@ if [%XHARNESS_COMMAND%] == [] ( if /I [%XHARNESS_COMMAND%] == [test] ( if [%JS_ENGINE%] == [] ( - if /I [%SCENARIO%] == [WasmTestOnNodeJs] ( + if /I [%SCENARIO%] == [WasmTestOnNodeJS] ( set "JS_ENGINE=--engine^=NodeJS" ) else ( set "JS_ENGINE=--engine^=V8" diff --git a/eng/testing/WasmRunnerTemplate.sh b/eng/testing/WasmRunnerTemplate.sh index 6c054ba9d23e1..2d5152215635a 100644 --- a/eng/testing/WasmRunnerTemplate.sh +++ b/eng/testing/WasmRunnerTemplate.sh @@ -33,7 +33,7 @@ fi if [[ "$XHARNESS_COMMAND" == "test" ]]; then if [[ -z "$JS_ENGINE" ]]; then - if [[ "$SCENARIO" == "WasmTestOnNodeJs" || "$SCENARIO" == "wasmtestonnodejs" ]]; then + if [[ "$SCENARIO" == "WasmTestOnNodeJS" || "$SCENARIO" == "wasmtestonnodejs" ]]; then JS_ENGINE="--engine=NodeJS" else JS_ENGINE="--engine=V8" diff --git a/eng/testing/scenarios/BuildWasmAppsJobsList.txt b/eng/testing/scenarios/BuildWasmAppsJobsList.txt index bbbca109e575c..143b2fa2d3a21 100644 --- a/eng/testing/scenarios/BuildWasmAppsJobsList.txt +++ b/eng/testing/scenarios/BuildWasmAppsJobsList.txt @@ -17,6 +17,7 @@ Wasm.Build.Tests.RebuildTests Wasm.Build.Tests.SatelliteAssembliesTests Wasm.Build.Tests.WasmBuildAppTest Wasm.Build.Tests.WasmNativeDefaultsTests -Wasm.Build.Tests.WorkloadTests Wasm.Build.Tests.WasmRunOutOfAppBundleTests +Wasm.Build.Tests.WasmSIMDTests Wasm.Build.Tests.WasmTemplateTests +Wasm.Build.Tests.WorkloadTests diff --git a/eng/testing/tests.wasm.targets b/eng/testing/tests.wasm.targets index 02af8e04189c7..0d6a88c5be658 100644 --- a/eng/testing/tests.wasm.targets +++ b/eng/testing/tests.wasm.targets @@ -303,8 +303,8 @@ - - + + diff --git a/src/coreclr/gc/gc.cpp b/src/coreclr/gc/gc.cpp index 2781c1845a4eb..959b3b013d42d 100644 --- a/src/coreclr/gc/gc.cpp +++ b/src/coreclr/gc/gc.cpp @@ -2715,10 +2715,11 @@ uint64_t gc_heap::total_loh_a_last_bgc = 0; #endif //BGC_SERVO_TUNING size_t gc_heap::eph_gen_starts_size = 0; -heap_segment* gc_heap::segment_standby_list; #if defined(USE_REGIONS) region_free_list gc_heap::global_regions_to_decommit[count_free_region_kinds]; region_free_list gc_heap::global_free_huge_regions; +#else +heap_segment* gc_heap::segment_standby_list; #endif //USE_REGIONS bool gc_heap::use_large_pages_p = 0; #ifdef HEAP_BALANCE_INSTRUMENTATION @@ -2756,8 +2757,6 @@ BOOL gc_heap::fgn_last_gc_was_concurrent = FALSE; VOLATILE(bool) gc_heap::full_gc_approach_event_set; -bool gc_heap::special_sweep_p = false; - size_t gc_heap::full_gc_counts[gc_type_max]; bool gc_heap::maxgen_size_inc_p = false; @@ -2856,6 +2855,8 @@ size_t gc_heap::interesting_mechanism_bits_per_heap[max_gc_mechanism_bits_co mark_queue_t gc_heap::mark_queue; +bool gc_heap::special_sweep_p = false; + #endif // MULTIPLE_HEAPS /* end of per heap static initialization */ @@ -5751,11 +5752,7 @@ gc_heap::get_segment (size_t size, gc_oh_num oh) if (result) { - init_heap_segment (result, __this -#ifdef USE_REGIONS - , 0, size, (uoh_p ? max_generation : 0) -#endif //USE_REGIONS - ); + init_heap_segment (result, __this); #ifdef BACKGROUND_GC if (is_bgc_in_progress()) { @@ -13652,7 +13649,9 @@ gc_heap::init_semi_shared() goto cleanup; #endif //FEATURE_BASICFREEZE +#ifndef USE_REGIONS segment_standby_list = 0; +#endif //USE_REGIONS if (!full_gc_approach_event.CreateManualEventNoThrow(FALSE)) { @@ -14124,6 +14123,8 @@ gc_heap::init_gc_heap (int h_number) #ifdef RECORD_LOH_STATE loh_state_index = 0; #endif //RECORD_LOH_STATE + + special_sweep_p = false; #endif //MULTIPLE_HEAPS #ifdef MULTIPLE_HEAPS @@ -30414,11 +30415,6 @@ void gc_heap::plan_phase (int condemned_gen_number) if (condemned_gen_number >= (max_generation -1)) { #ifdef MULTIPLE_HEAPS - // this needs be serialized just because we have one - // segment_standby_list/seg_table for all heaps. We should make it at least - // so that when hoarding is not on we don't need this join because - // decommitting memory can take a long time. - //must serialize on deleting segments gc_t_join.join(this, gc_join_rearrange_segs_compaction); if (gc_t_join.joined()) #endif //MULTIPLE_HEAPS @@ -44599,6 +44595,7 @@ HRESULT GCHeap::StaticShutdown() #endif // FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP } +#ifndef USE_REGIONS //destroy all segments on the standby list while(gc_heap::segment_standby_list != 0) { @@ -44610,6 +44607,7 @@ HRESULT GCHeap::StaticShutdown() #endif //MULTIPLE_HEAPS gc_heap::segment_standby_list = next_seg; } +#endif // USE_REGIONS #ifdef MULTIPLE_HEAPS diff --git a/src/coreclr/gc/gcpriv.h b/src/coreclr/gc/gcpriv.h index c70ea95c85bd9..8cea1afa29230 100644 --- a/src/coreclr/gc/gcpriv.h +++ b/src/coreclr/gc/gcpriv.h @@ -3852,7 +3852,7 @@ class gc_heap PER_HEAP_ISOLATED VOLATILE(bool) full_gc_approach_event_set; - PER_HEAP_ISOLATED + PER_HEAP bool special_sweep_p; #ifdef BACKGROUND_GC @@ -4962,8 +4962,10 @@ class gc_heap PER_HEAP heap_segment* freeable_uoh_segment; +#ifndef USE_REGIONS PER_HEAP_ISOLATED heap_segment* segment_standby_list; +#endif #ifdef USE_REGIONS PER_HEAP_ISOLATED diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index a3b4dba68ccf1..4ef80e2ee019b 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -5890,15 +5890,6 @@ class Compiler Statement* fgInlinePrependStatements(InlineInfo* inlineInfo); void fgInlineAppendStatements(InlineInfo* inlineInfo, BasicBlock* block, Statement* stmt); -#if FEATURE_MULTIREG_RET - GenTree* fgGetStructAsStructPtr(GenTree* tree); - GenTree* fgAssignStructInlineeToVar(GenTree* child, CORINFO_CLASS_HANDLE retClsHnd); - void fgAttachStructInlineeToAsg(GenTree* tree, GenTree* child, CORINFO_CLASS_HANDLE retClsHnd); -#endif // FEATURE_MULTIREG_RET - - static fgWalkPreFn fgUpdateInlineReturnExpressionPlaceHolder; - static fgWalkPostFn fgLateDevirtualization; - #ifdef DEBUG static fgWalkPreFn fgDebugCheckInlineCandidates; diff --git a/src/coreclr/jit/fginline.cpp b/src/coreclr/jit/fginline.cpp index 1deb759ac46c8..206197a1102f2 100644 --- a/src/coreclr/jit/fginline.cpp +++ b/src/coreclr/jit/fginline.cpp @@ -251,7 +251,7 @@ class SubstitutePlaceholdersAndDevirtualizeWalker : public GenTreeVisitorgtOper == GT_ASG) + if (parent->OperIs(GT_ASG)) { - // Either lhs is a call V05 = call(); or lhs is addr, and asg becomes a copyBlk. - AttachStructInlineeToAsg(parent, tree, retClsHnd); + // The inlinee can only be the RHS. + assert(parent->gtGetOp2() == tree); + AttachStructInlineeToAsg(parent->AsOp(), retClsHnd); } else { @@ -310,141 +307,99 @@ class SubstitutePlaceholdersAndDevirtualizeWalker : public GenTreeVisitorgtOper != GT_RET_EXPR && child->gtOper != GT_MKREFANY); + assert(asg->OperIs(GT_ASG)); - unsigned tmpNum = m_compiler->lvaGrabTemp(false DEBUGARG("RetBuf for struct inline return candidates.")); - m_compiler->lvaSetStruct(tmpNum, retClsHnd, false); - var_types structType = m_compiler->lvaGetDesc(tmpNum)->lvType; + GenTree* dst = asg->gtGetOp1(); + GenTree* inlinee = asg->gtGetOp2(); - GenTree* dst = m_compiler->gtNewLclvNode(tmpNum, structType); - - // If we have a call, we'd like it to be: V00 = call(), but first check if - // we have a ", , , call()" -- this is very defensive as we may never get - // an inlinee that is made of commas. If the inlinee is not a call, then - // we use a copy block to do the assignment. - GenTree* src = child; - GenTree* lastComma = nullptr; - while (src->gtOper == GT_COMMA) + // We need to force all assignments from multi-reg nodes into the "lcl = node()" form. + if (inlinee->IsMultiRegNode()) { - lastComma = src; - src = src->AsOp()->gtOp2; - } - - GenTree* newInlinee = nullptr; - if (src->gtOper == GT_CALL) - { - // If inlinee was just a call, new inlinee is v05 = call() - newInlinee = m_compiler->gtNewAssignNode(dst, src); - - // When returning a multi-register value in a local var, make sure the variable is - // marked as lvIsMultiRegRet, so it does not get promoted. - if (src->AsCall()->HasMultiRegRetVal()) + // Special case: we already have a local, the only thing to do is mark it appropriately. Except + // if it may turn into an indirection. + if (dst->OperIs(GT_LCL_VAR) && !m_compiler->lvaIsImplicitByRefLocal(dst->AsLclVar()->GetLclNum())) { - m_compiler->lvaGetDesc(tmpNum)->lvIsMultiRegRet = true; + m_compiler->lvaGetDesc(dst->AsLclVar())->lvIsMultiRegRet = true; } - - // If inlinee was comma, but a deeper call, new inlinee is (, , , v05 = call()) - if (child->gtOper == GT_COMMA) + else { - lastComma->AsOp()->gtOp2 = newInlinee; - newInlinee = child; + // Here, we assign our node into a fresh temp and then use that temp as the new value. + asg->gtOp2 = AssignStructInlineeToVar(inlinee, retClsHnd); } } - else + else if (dst->IsMultiRegLclVar()) { - // Inlinee is not a call, so just create a copy block to the tmp. - src = child; - GenTree* dstAddr = GetStructAsStructPtr(dst); - GenTree* srcAddr = GetStructAsStructPtr(src); - newInlinee = m_compiler->gtNewCpObjNode(dstAddr, srcAddr, retClsHnd, false); + // This is no longer a multi-reg assignment -- clear the flag. + dst->AsLclVar()->ClearMultiReg(); } - - GenTree* production = m_compiler->gtNewLclvNode(tmpNum, structType); - return m_compiler->gtNewOperNode(GT_COMMA, structType, newInlinee, production); } - /*************************************************************************************************** - * tree - The tree pointer that has one of its child nodes as retExpr. - * child - The inlinee child. - * retClsHnd - The struct class handle of the type of the inlinee. - * - * V04 = call() assignments are okay as we codegen it. Everything else needs to be a copy block or - * would need a temp. For example, a cast(ldobj) will then be, cast(v05 = ldobj, v05); But it is - * a very rare (or impossible) scenario that we'd have a retExpr transform into a ldobj other than - * a lclVar/call. So it is not worthwhile to do pattern matching optimizations like addr(ldobj(op1)) - * can just be op1. - */ - void AttachStructInlineeToAsg(GenTree* tree, GenTree* child, CORINFO_CLASS_HANDLE retClsHnd) + //------------------------------------------------------------------------ + // AssignStructInlineeToVar: Assign the struct inlinee to a temp local. + // + // Arguments: + // inlinee - The inlinee of the RET_EXPR node + // retClsHnd - The struct class handle of the type of the inlinee. + // + // Return Value: + // Value representing the freshly assigned temp. + // + GenTree* AssignStructInlineeToVar(GenTree* inlinee, CORINFO_CLASS_HANDLE retClsHnd) { - // We are okay to have: - // 1. V02 = call(); - // 2. copyBlk(dstAddr, srcAddr); - assert(tree->gtOper == GT_ASG); + assert(!inlinee->OperIs(GT_MKREFANY, GT_RET_EXPR)); + + unsigned lclNum = m_compiler->lvaGrabTemp(false DEBUGARG("RetBuf for struct inline return candidates.")); + LclVarDsc* varDsc = m_compiler->lvaGetDesc(lclNum); + m_compiler->lvaSetStruct(lclNum, retClsHnd, false); - // We have an assignment, we codegen only V05 = call(). - if (child->gtOper == GT_CALL && tree->AsOp()->gtOp1->gtOper == GT_LCL_VAR) + // Sink the assignment below any COMMAs: this is required for multi-reg nodes. + GenTree* src = inlinee; + GenTree* lastComma = nullptr; + while (src->OperIs(GT_COMMA)) { - // If it is a multireg return on x64/ux, the local variable should be marked as lvIsMultiRegRet - if (child->AsCall()->HasMultiRegRetVal()) - { - unsigned lclNum = tree->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum(); - m_compiler->lvaGetDesc(lclNum)->lvIsMultiRegRet = true; - } - return; + lastComma = src; + src = src->AsOp()->gtOp2; } - GenTree* dstAddr = GetStructAsStructPtr(tree->AsOp()->gtOp1); - GenTree* srcAddr = GetStructAsStructPtr( - (child->gtOper == GT_CALL) - ? AssignStructInlineeToVar(child, retClsHnd) // Assign to a variable if it is a call. - : child); // Just get the address, if not a call. - - tree->ReplaceWith(m_compiler->gtNewCpObjNode(dstAddr, srcAddr, retClsHnd, false), m_compiler); - } + // When assigning a multi-register value to a local var, make sure the variable is marked as lvIsMultiRegRet. + if (src->IsMultiRegNode()) + { + varDsc->lvIsMultiRegRet = true; + } - /********************************************************************************* - * - * tree - The node which needs to be converted to a struct pointer. - * - * Return the pointer by either __replacing__ the tree node with a suitable pointer - * type or __without replacing__ and just returning a subtree or by __modifying__ - * a subtree. - */ - GenTree* GetStructAsStructPtr(GenTree* tree) - { - noway_assert(tree->OperIs(GT_LCL_VAR, GT_FIELD, GT_IND, GT_BLK, GT_OBJ, GT_COMMA) || - tree->OperIsSimdOrHWintrinsic() || tree->IsCnsVec()); - // GT_CALL, cannot get address of call. - // GT_MKREFANY, inlining should've been aborted due to mkrefany opcode. - // GT_RET_EXPR, cannot happen after fgUpdateInlineReturnExpressionPlaceHolder + GenTree* dst = m_compiler->gtNewLclvNode(lclNum, varDsc->TypeGet()); + GenTree* asg = m_compiler->gtNewBlkOpNode(dst, src, /* isVolatile */ false, /* isCopyBlock */ true); - switch (tree->OperGet()) + // If inlinee was comma, new inlinee is (, , , lcl = inlinee). + if (inlinee->OperIs(GT_COMMA)) { - case GT_BLK: - case GT_OBJ: - case GT_IND: - return tree->AsOp()->gtOp1; - - case GT_COMMA: - tree->AsOp()->gtOp2 = GetStructAsStructPtr(tree->AsOp()->gtOp2); - tree->gtType = TYP_BYREF; - return tree; - - default: - return m_compiler->gtNewOperNode(GT_ADDR, TYP_BYREF, tree); + lastComma->AsOp()->gtOp2 = asg; + asg = inlinee; } - } + // Block morphing does not support (promoted) locals under commas, as such, instead of "COMMA(asg, lcl)" we + // do "OBJ(COMMA(asg, ADDR(LCL)))". TODO-1stClassStructs: improve block morphing and delete this workaround. + // + GenTree* lcl = m_compiler->gtNewLclvNode(lclNum, varDsc->TypeGet()); + GenTree* addr = m_compiler->gtNewOperNode(GT_ADDR, TYP_I_IMPL, lcl); + addr = m_compiler->gtNewOperNode(GT_COMMA, addr->TypeGet(), asg, addr); + GenTree* obj = m_compiler->gtNewObjNode(varDsc->GetLayout(), addr); + + return obj; + } #endif // FEATURE_MULTIREG_RET //------------------------------------------------------------------------ @@ -1453,7 +1408,7 @@ void Compiler::fgInsertInlineeBlocks(InlineInfo* pInlineInfo) // Replace the call with the return expression. Note that iciCall won't be part of the IR // but may still be referenced from a GT_RET_EXPR node. We will replace GT_RET_EXPR node - // in fgUpdateInlineReturnExpressionPlaceHolder. At that time we will also update the flags + // in UpdateInlineReturnExpressionPlaceHolder. At that time we will also update the flags // on the basic block of GT_RET_EXPR node. if (iciCall->gtInlineCandidateInfo->retExpr->OperGet() == GT_RET_EXPR) { diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index e75b1a94889f4..6e5b0db78d7d5 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -7151,6 +7151,15 @@ GenTree* Compiler::gtNewZeroConNode(var_types type) zero = gtNewDconNode(0.0); break; +#ifdef FEATURE_SIMD + case TYP_SIMD8: + case TYP_SIMD12: + case TYP_SIMD16: + case TYP_SIMD32: + zero = gtNewZeroConNode(type, CORINFO_TYPE_FLOAT); + break; +#endif // FEATURE_SIMD + default: noway_assert(!"Bad type in gtNewZeroConNode"); zero = nullptr; diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index 13175d58a056a..78336fed4a3e9 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -17910,8 +17910,7 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) assert(info.compRetNativeType != TYP_VOID && (fgMoreThanOneReturnBlock() || impInlineInfo->HasGcRefLocals())); - // If this method returns a ref type, track the actual types seen - // in the returns. + // If this method returns a ref type, track the actual types seen in the returns. if (info.compRetType == TYP_REF) { bool isExact = false; @@ -17968,13 +17967,12 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) else { // compRetNativeType is TYP_STRUCT. - // This implies that struct return via RetBuf arg or multi-reg struct return + // This implies that struct return via RetBuf arg or multi-reg struct return. GenTreeCall* iciCall = impInlineInfo->iciCall->AsCall(); // Assign the inlinee return into a spill temp. - // spill temp only exists if there are multiple return points - if (lvaInlineeReturnSpillTemp != BAD_VAR_NUM) + if (fgNeedReturnSpillTemp()) { // in this case we have to insert multiple struct copies to the temp // and the retexpr is just the temp. @@ -17985,49 +17983,6 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) (unsigned)CHECK_SPILL_ALL); } -#if defined(TARGET_ARM) || defined(UNIX_AMD64_ABI) -#if defined(TARGET_ARM) - // TODO-ARM64-NYI: HFA - // TODO-AMD64-Unix and TODO-ARM once the ARM64 functionality is implemented the - // next ifdefs could be refactored in a single method with the ifdef inside. - if (IsHfa(retClsHnd)) - { -// Same as !IsHfa but just don't bother with impAssignStructPtr. -#else // defined(UNIX_AMD64_ABI) - ReturnTypeDesc retTypeDesc; - retTypeDesc.InitializeStructReturnType(this, retClsHnd, info.compCallConv); - unsigned retRegCount = retTypeDesc.GetReturnRegCount(); - - if (retRegCount != 0) - { - // If single eightbyte, the return type would have been normalized and there won't be a temp var. - // This code will be called only if the struct return has not been normalized (i.e. 2 eightbytes - - // max allowed.) - assert(retRegCount == MAX_RET_REG_COUNT); - // Same as !structDesc.passedInRegisters but just don't bother with impAssignStructPtr. - CLANG_FORMAT_COMMENT_ANCHOR; -#endif // defined(UNIX_AMD64_ABI) - - if (fgNeedReturnSpillTemp()) - { - if (!impInlineInfo->retExpr) - { -#if defined(TARGET_ARM) - impInlineInfo->retExpr = gtNewLclvNode(lvaInlineeReturnSpillTemp, info.compRetType); -#else // defined(UNIX_AMD64_ABI) - // The inlinee compiler has figured out the type of the temp already. Use it here. - impInlineInfo->retExpr = - gtNewLclvNode(lvaInlineeReturnSpillTemp, lvaTable[lvaInlineeReturnSpillTemp].lvType); -#endif // defined(UNIX_AMD64_ABI) - } - } - else - { - impInlineInfo->retExpr = op2; - } - } - else -#elif defined(TARGET_ARM64) || defined(TARGET_LOONGARCH64) ReturnTypeDesc retTypeDesc; retTypeDesc.InitializeStructReturnType(this, retClsHnd, info.compCallConv); unsigned retRegCount = retTypeDesc.GetReturnRegCount(); @@ -18035,34 +17990,11 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) if (retRegCount != 0) { assert(!iciCall->ShouldHaveRetBufArg()); - assert(retRegCount >= 2); - if (fgNeedReturnSpillTemp()) - { - if (!impInlineInfo->retExpr) - { - // The inlinee compiler has figured out the type of the temp already. Use it here. - impInlineInfo->retExpr = - gtNewLclvNode(lvaInlineeReturnSpillTemp, lvaTable[lvaInlineeReturnSpillTemp].lvType); - } - } - else - { - impInlineInfo->retExpr = op2; - } - } - else -#elif defined(TARGET_X86) - ReturnTypeDesc retTypeDesc; - retTypeDesc.InitializeStructReturnType(this, retClsHnd, info.compCallConv); - unsigned retRegCount = retTypeDesc.GetReturnRegCount(); + assert(retRegCount >= 2); // Otherwise "compRetNativeType" wouldn't have been TYP_STRUCT. - if (retRegCount != 0) - { - assert(!iciCall->ShouldHaveRetBufArg()); - assert(retRegCount == MAX_RET_REG_COUNT); if (fgNeedReturnSpillTemp()) { - if (!impInlineInfo->retExpr) + if (impInlineInfo->retExpr == nullptr) { // The inlinee compiler has figured out the type of the temp already. Use it here. impInlineInfo->retExpr = @@ -18074,16 +18006,15 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) impInlineInfo->retExpr = op2; } } - else -#endif // defined(TARGET_ARM64) + else // The struct was to be returned via a return buffer. { assert(iciCall->gtArgs.HasRetBuffer()); GenTree* dest = gtCloneExpr(iciCall->gtArgs.GetRetBufferArg()->GetEarlyNode()); - // spill temp only exists if there are multiple return points + if (fgNeedReturnSpillTemp()) { - // if this is the first return we have seen set the retExpr - if (!impInlineInfo->retExpr) + // If this is the first return we have seen set the retExpr. + if (impInlineInfo->retExpr == nullptr) { impInlineInfo->retExpr = impAssignStructPtr(dest, gtNewLclvNode(lvaInlineeReturnSpillTemp, info.compRetType), @@ -18109,12 +18040,7 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) return true; } - if (info.compRetType == TYP_VOID) - { - // return void - op1 = new (this, GT_RETURN) GenTreeOp(GT_RETURN, TYP_VOID); - } - else if (info.compRetBuffArg != BAD_VAR_NUM) + if (info.compRetBuffArg != BAD_VAR_NUM) { // Assign value to return buff (first param) GenTree* retBuffAddr = @@ -18123,43 +18049,16 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) op2 = impAssignStructPtr(retBuffAddr, op2, retClsHnd, (unsigned)CHECK_SPILL_ALL); impAppendTree(op2, (unsigned)CHECK_SPILL_NONE, impCurStmtDI); - // There are cases where the address of the implicit RetBuf should be returned explicitly (in RAX). - CLANG_FORMAT_COMMENT_ANCHOR; - -#if defined(TARGET_AMD64) - - // x64 (System V and Win64) calling convention requires to - // return the implicit return buffer explicitly (in RAX). - // Change the return type to be BYREF. - op1 = gtNewOperNode(GT_RETURN, TYP_BYREF, gtNewLclvNode(info.compRetBuffArg, TYP_BYREF)); -#else // !defined(TARGET_AMD64) - // In case of non-AMD64 targets the profiler hook requires to return the implicit RetBuf explicitly (in RAX). - // In such case the return value of the function is changed to BYREF. - // If profiler hook is not needed the return type of the function is TYP_VOID. - if (compIsProfilerHookNeeded()) - { - op1 = gtNewOperNode(GT_RETURN, TYP_BYREF, gtNewLclvNode(info.compRetBuffArg, TYP_BYREF)); - } -#if defined(TARGET_ARM64) - // On ARM64, the native instance calling convention variant - // requires the implicit ByRef to be explicitly returned. - else if (TargetOS::IsWindows && callConvIsInstanceMethodCallConv(info.compCallConv)) - { - op1 = gtNewOperNode(GT_RETURN, TYP_BYREF, gtNewLclvNode(info.compRetBuffArg, TYP_BYREF)); - } -#endif -#if defined(TARGET_X86) - else if (info.compCallConv != CorInfoCallConvExtension::Managed) + // There are cases where the address of the implicit RetBuf should be returned explicitly. + // + if (compMethodReturnsRetBufAddr()) { op1 = gtNewOperNode(GT_RETURN, TYP_BYREF, gtNewLclvNode(info.compRetBuffArg, TYP_BYREF)); } -#endif else { - // return void op1 = new (this, GT_RETURN) GenTreeOp(GT_RETURN, TYP_VOID); } -#endif // !defined(TARGET_AMD64) } else if (varTypeIsStruct(info.compRetType)) { @@ -18169,15 +18068,16 @@ bool Compiler::impReturnInstruction(int prefixFlags, OPCODE& opcode) noway_assert(info.compRetNativeType != TYP_STRUCT); #endif op2 = impFixupStructReturnType(op2, retClsHnd, info.compCallConv); - // return op2 - var_types returnType = info.compRetType; - op1 = gtNewOperNode(GT_RETURN, genActualType(returnType), op2); + op1 = gtNewOperNode(GT_RETURN, genActualType(info.compRetType), op2); } - else + else if (info.compRetType != TYP_VOID) { - // return op2 op1 = gtNewOperNode(GT_RETURN, genActualType(info.compRetType), op2); } + else + { + op1 = new (this, GT_RETURN) GenTreeOp(GT_RETURN, TYP_VOID); + } // We must have imported a tailcall and jumped to RET if (isTailCall) diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index d99a39cbc566c..6da47320d59a4 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -6918,32 +6918,8 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) // if the root node was an `ASG`, `RET` or `CAST`. // Return a zero con node to exit morphing of the old trees without asserts // and forbid POST_ORDER morphing doing something wrong with our call. - var_types callType; - if (varTypeIsStruct(origCallType)) - { - CORINFO_CLASS_HANDLE retClsHnd = call->gtRetClsHnd; - Compiler::structPassingKind howToReturnStruct; - callType = getReturnTypeForStruct(retClsHnd, call->GetUnmanagedCallConv(), &howToReturnStruct); - assert((howToReturnStruct != SPK_Unknown) && (howToReturnStruct != SPK_ByReference)); - if (howToReturnStruct == SPK_ByValue) - { - callType = TYP_I_IMPL; - } - else if (howToReturnStruct == SPK_ByValueAsHfa || varTypeIsSIMD(callType)) - { - callType = TYP_FLOAT; - } - assert((callType != TYP_UNKNOWN) && !varTypeIsStruct(callType)); - } - else - { - callType = origCallType; - } - assert((callType != TYP_UNKNOWN) && !varTypeIsStruct(callType)); - callType = genActualType(callType); - - GenTree* zero = gtNewZeroConNode(callType); - result = fgMorphTree(zero); + var_types zeroType = (origCallType == TYP_STRUCT) ? TYP_INT : genActualType(origCallType); + result = fgMorphTree(gtNewZeroConNode(zeroType)); } else { @@ -8666,12 +8642,6 @@ GenTree* Compiler::fgMorphConst(GenTree* tree) // Return value: // GenTreeLclVar if the obj can be replaced by it, null otherwise. // -// Notes: -// TODO-CQ: currently this transformation is done only under copy block, -// but it is beneficial to do for each OBJ node. However, `PUT_ARG_STACK` -// for some platforms does not expect struct `LCL_VAR` as a source, so -// it needs more work. -// GenTreeLclVar* Compiler::fgMorphTryFoldObjAsLclVar(GenTreeObj* obj, bool destroyNodes) { if (opts.OptimizationEnabled()) @@ -11301,13 +11271,14 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optA temp = nullptr; } } - else if (op1->OperGet() == GT_ADD) + else { #ifdef TARGET_ARM + GenTree* effOp1 = op1->gtEffectiveVal(true); // Check for a misalignment floating point indirection. - if (varTypeIsFloating(typ)) + if (effOp1->OperIs(GT_ADD) && varTypeIsFloating(typ)) { - GenTree* addOp2 = op1->AsOp()->gtGetOp2(); + GenTree* addOp2 = effOp1->gtGetOp2(); if (addOp2->IsCnsIntOrI()) { ssize_t offset = addOp2->AsIntCon()->gtIconVal; diff --git a/src/coreclr/scripts/superpmi-collect.proj b/src/coreclr/scripts/superpmi-collect.proj index a2ca69af7f995..85979cdc265af 100644 --- a/src/coreclr/scripts/superpmi-collect.proj +++ b/src/coreclr/scripts/superpmi-collect.proj @@ -1,30 +1,40 @@ - - - + + + + --> \ @@ -33,62 +43,88 @@ / - + AssembliesPayload - Path that will be sent to helix machine to run collection on + AssembliesDirectoryOnHelix - Path on helix machine itself where superpmi.py will discover the sent assemblies. + --> - - - - - - + - + %HELIX_PYTHONPATH% - $(WorkItemDirectory)\pmiAssembliesDirectory - %HELIX_WORKITEM_PAYLOAD%\binaries %HELIX_CORRELATION_PAYLOAD%\superpmi - - - %HELIX_WORKITEM_PAYLOAD%\performance - %HELIX_WORKITEM_UPLOAD_ROOT% $(BUILD_SOURCESDIRECTORY)\artifacts\helixresults - $(SuperPMIDirectory)\superpmi.py collect --clean -log_level DEBUG --$(CollectionType) -pmi_location $(SuperPMIDirectory)\pmi.dll -pmi_path @(PmiPathDirectories->'$(SuperPMIDirectory)\%(Identity)', ' ') $HELIX_PYTHONPATH - $(WorkItemDirectory)/pmiAssembliesDirectory - $HELIX_WORKITEM_PAYLOAD/binaries $HELIX_CORRELATION_PAYLOAD/superpmi - - - $HELIX_WORKITEM_PAYLOAD/performance - $HELIX_WORKITEM_UPLOAD_ROOT $(BUILD_SOURCESDIRECTORY)/artifacts/helixresults - $(SuperPMIDirectory)/superpmi.py collect --clean -log_level DEBUG --$(CollectionType) -pmi_location $(SuperPMIDirectory)/pmi.dll -pmi_path @(PmiPathDirectories->'$(SuperPMIDirectory)/%(Identity)', ' ') - - $(Python) $(WorkItemCommand) -assemblies $(PmiAssembliesDirectory) -arch $(Architecture) -build_type $(BuildConfig) -core_root $(SuperPMIDirectory) + + + + %HELIX_WORKITEM_PAYLOAD%\binaries + + + $HELIX_WORKITEM_PAYLOAD/binaries + + + + + + %HELIX_WORKITEM_PAYLOAD%\performance + + + $HELIX_WORKITEM_PAYLOAD/performance + + + + + + $(WorkItemDirectory)$(FileSeparatorChar)collectAssembliesDirectory$(FileSeparatorChar)$(CollectionName) + + + + + + + + + + + + -pmi_location $(SuperPMIDirectory)\pmi.dll -pmi_path @(PmiPathDirectories->'$(SuperPMIDirectory)\%(Identity)', ' ') + + + -pmi_location $(SuperPMIDirectory)/pmi.dll -pmi_path @(PmiPathDirectories->'$(SuperPMIDirectory)/%(Identity)', ' ') + + + + + + + + $(Python) $(SuperPMIDirectory)$(FileSeparatorChar)superpmi.py collect --clean -log_level DEBUG --$(CollectionType) $(PmiArguments) -assemblies $(AssembliesDirectoryOnHelix) -arch $(Architecture) -build_type $(BuildConfig) -core_root $(SuperPMIDirectory) 2:00 - $(Python) $(SuperPMIDirectory)/superpmi_benchmarks.py -performance_directory $(PerformanceDirectory) -superpmi_directory $(SuperPMIDirectory) -core_root $(SuperPMIDirectory) -arch $(Architecture) + $(Python) $(SuperPMIDirectory)$(FileSeparatorChar)superpmi_benchmarks.py -performance_directory $(PerformanceDirectory) -superpmi_directory $(SuperPMIDirectory) -core_root $(SuperPMIDirectory) -arch $(Architecture) 3:00 @@ -101,12 +137,12 @@ + - + - + + 30 - + @@ -167,7 +209,7 @@ $(CollectionName).$(CollectionType).%(HelixWorkItem.PartitionId).$(MchFileTag) - $(PmiAssembliesPayload)$(FileSeparatorChar)$(CollectionName)$(FileSeparatorChar)%(HelixWorkItem.PmiAssemblies) + $(AssembliesPayload)$(FileSeparatorChar)%(HelixWorkItem.CollectAssemblies) $(WorkItemCommand) -output_mch_path $(OutputMchPath)$(FileSeparatorChar)%(OutputFileName).mch -log_file $(OutputMchPath)$(FileSeparatorChar)%(OutputFileName).log $(WorkItemTimeout) %(OutputFileName).mch;%(OutputFileName).mch.mct;%(OutputFileName).log diff --git a/src/coreclr/scripts/superpmi.py b/src/coreclr/scripts/superpmi.py index b33983133b0aa..627d71d7255c5 100644 --- a/src/coreclr/scripts/superpmi.py +++ b/src/coreclr/scripts/superpmi.py @@ -295,12 +295,14 @@ def add_core_root_arguments(parser, build_type_default, build_type_help): collect_parser.add_argument("-mch_files", metavar="MCH_FILE", nargs='+', help="Pass a sequence of MCH files which will be merged. Required by --merge_mch_files.") collect_parser.add_argument("--use_zapdisable", action="store_true", help="Sets COMPlus_ZapDisable=1 and COMPlus_ReadyToRun=0 when doing collection to cause NGEN/ReadyToRun images to not be used, and thus causes JIT compilation and SuperPMI collection of these methods.") collect_parser.add_argument("--tiered_compilation", action="store_true", help="Sets COMPlus_TieredCompilation=1 when doing collections.") +collect_parser.add_argument("--ci", action="store_true", help="Special collection mode for handling zero-sized files in Azure DevOps + Helix pipelines collections.") # Allow for continuing a collection in progress collect_parser.add_argument("-temp_dir", help="Specify an existing temporary directory to use. Useful if continuing an ongoing collection process, or forcing a temporary directory to a particular hard drive. Optional; default is to create a temporary directory in the usual TEMP location.") collect_parser.add_argument("--clean", action="store_true", help="Clean the collection by removing contexts that fail to replay without error.") collect_parser.add_argument("--skip_collection_step", action="store_true", help="Do not run the collection step.") collect_parser.add_argument("--skip_merge_step", action="store_true", help="Do not run the merge step.") +collect_parser.add_argument("--skip_toc_step", action="store_true", help="Do not run the TOC creation step.") collect_parser.add_argument("--skip_collect_mc_files", action="store_true", help="Do not collect .MC files") # Create a set of arguments common to all SuperPMI replay commands, namely basic replay and ASM diffs. @@ -387,6 +389,7 @@ def add_core_root_arguments(parser, build_type_default, build_type_help): merge_mch_parser.add_argument("-output_mch_path", required=True, help="Location to place the final MCH file.") merge_mch_parser.add_argument("-pattern", required=True, help=merge_mch_pattern_help) +merge_mch_parser.add_argument("--ci", action="store_true", help="Special collection mode for handling zero-sized files in Azure DevOps + Helix pipelines collections.") ################################################################################ # Helper functions @@ -731,12 +734,16 @@ def collect(self): else: self.__copy_to_final_mch_file__() - self.__create_toc__() + if not self.coreclr_args.skip_toc_step: + self.__create_toc__() if self.coreclr_args.clean: # There is no point to verify unless we have run the clean step. self.__verify_final_mch__() + if self.coreclr_args.ci: + self.__process_for_ci__() + passed = True except Exception as exception: @@ -1175,6 +1182,21 @@ def __verify_final_mch__(self): if not passed: raise RuntimeError("Error, unclean replay.") + def __process_for_ci__(self): + """ Helix doesn't upload zero-sized files. Sometimes we end up with zero-sized .mch files if + there is no data collected. Convert these to special "sentinel" files that are later processed + by "merge-mch" by deleting them. This prevents the + file download to succeed (because the file exists) and the MCH merge to also succeed. + """ + + logging.info("Process MCH files for CI") + + if os.path.getsize(self.final_mch_file) == 0: + # Convert to sentinel file + logging.info("Converting zero-length MCH file {} to ZEROLENGTH sentinel file".format(self.final_mch_file)) + with open(self.final_mch_file, "w") as write_fh: + write_fh.write("ZEROLENGTH") + ################################################################################ # SuperPMI Replay helpers ################################################################################ @@ -2909,6 +2931,12 @@ def merge_mch(coreclr_args): mcs -merge -recursive -dedup -thin mcs -toc + If `--ci` is passed, it looks for special "ZEROLENGTH" files and deletes them + before invoking the merge. This is to handle a CI issue where we generate zero-length + .MCH files in Helix, convert them to non-zero-length sentinel files so they will be + successfully uploaded to artifacts storage, then downloaded on AzDO using the + HelixWorkItem.DownloadFilesFromResults mechanism. Then, we delete them before merging. + Args: coreclr_args (CoreclrArguments) : parsed args @@ -2916,6 +2944,23 @@ def merge_mch(coreclr_args): True on success, else False """ + if coreclr_args.ci: + # Handle zero-length sentinel files. + # Recurse looking for small files that have the text ZEROLENGTH, and delete them. + try: + for dirpath, _, filenames in os.walk(os.path.dirname(coreclr_args.pattern)): + for file_name in filenames: + file_path = os.path.join(dirpath, file_name) + if os.path.getsize(file_path) < 15: + with open(file_path, "rb") as fh: + contents = fh.read() + match = re.search(b'ZEROLENGTH', contents) + if match is not None: + logging.info("Removing zero-length MCH sentinel file {}".format(file_path)) + os.remove(file_path) + except Exception: + pass + logging.info("Merging %s -> %s", coreclr_args.pattern, coreclr_args.output_mch_path) mcs_path = determine_mcs_tool_path(coreclr_args) command = [mcs_path, "-merge", coreclr_args.output_mch_path, coreclr_args.pattern, "-recursive", "-dedup", "-thin"] @@ -3523,6 +3568,16 @@ def verify_base_diff_args(): lambda unused: True, "Unable to set skip_merge_step.") + coreclr_args.verify(args, + "skip_toc_step", + lambda unused: True, + "Unable to set skip_toc_step.") + + coreclr_args.verify(args, + "ci", + lambda unused: True, + "Unable to set ci.") + coreclr_args.verify(args, "clean", lambda unused: True, @@ -3543,8 +3598,8 @@ def verify_base_diff_args(): lambda unused: True, "Unable to set pmi_path") - if (args.collection_command is None) and (args.pmi is False) and (args.crossgen2 is False): - print("Either a collection command or `--pmi` or `--crossgen2` must be specified") + if (args.collection_command is None) and (args.pmi is False) and (args.crossgen2 is False) and not coreclr_args.skip_collection_step: + print("Either a collection command or `--pmi` or `--crossgen2` or `--skip_collection_step` must be specified") sys.exit(1) if (args.collection_command is not None) and (len(args.assemblies) > 0): @@ -3565,7 +3620,7 @@ def verify_base_diff_args(): if args.pmi_location is not None: logging.warning("Warning: -pmi_location is set but --pmi is not.") - if args.collection_command is None and args.merge_mch_files is not True: + if args.collection_command is None and args.merge_mch_files is not True and not coreclr_args.skip_collection_step: assert args.collection_args is None assert (args.pmi is True) or (args.crossgen2 is True) assert len(args.assemblies) > 0 @@ -3872,6 +3927,11 @@ def verify_base_diff_args(): lambda unused: True, "Unable to set pattern") + coreclr_args.verify(args, + "ci", + lambda unused: True, + "Unable to set ci.") + if coreclr_args.mode == "replay" or coreclr_args.mode == "asmdiffs" or coreclr_args.mode == "tpdiff" or coreclr_args.mode == "download": if hasattr(coreclr_args, "private_store") and coreclr_args.private_store is not None: logging.info("Using private stores:") diff --git a/src/coreclr/scripts/superpmi_aspnet.py b/src/coreclr/scripts/superpmi_aspnet.py index eac558e161f46..5be3cacd68c85 100644 --- a/src/coreclr/scripts/superpmi_aspnet.py +++ b/src/coreclr/scripts/superpmi_aspnet.py @@ -16,6 +16,7 @@ import shutil import sys import zipfile +import stat from os import path from coreclr_arguments import * @@ -123,12 +124,29 @@ def build_and_run(coreclr_args): checked_root = path.join(source_directory, "artifacts", "bin", "coreclr", target_os + "." + coreclr_args.arch + ".Checked") release_root = path.join(source_directory, "artifacts", "bin", "coreclr", target_os + "." + coreclr_args.arch + ".Release") + spmi_temp = path.join(source_directory, "artifacts", "spmi_aspnet_collection") + + # Set up/clean up temp dir + if not os.path.exists(spmi_temp): + os.makedirs(spmi_temp) + + def remove_readonly(func, path, _): + "Clear the readonly bit and reattempt the removal" + os.chmod(path, stat.S_IWRITE) + func(path) + + spmi_temp_items = [os.path.join(spmi_temp, item) for item in os.listdir(spmi_temp)] + for item in spmi_temp_items: + if os.path.isdir(item): + shutil.rmtree(item, onerror=remove_readonly) + else: + os.remove(item) # We'll use repo script to install dotnet dotnet_install_script_name = "dotnet-install.cmd" if is_windows else "dotnet-install.sh" dotnet_install_script_path = path.join(source_directory, "eng", "common", dotnet_install_script_name) - with TempDir(skip_cleanup=True) as temp_location: + with TempDir(spmi_temp, skip_cleanup=True) as temp_location: print ("Executing in " + temp_location) @@ -179,9 +197,14 @@ def build_and_run(coreclr_args): # note tricks to get one element tuples - runtime_options_list = [("Dummy=0",), ("TieredCompilation=0", ), ("TieredPGO=1",), ("TieredPGO=1", "ReadyToRun=0"), + runtime_options_list = [ + ("Dummy=0",), + ("TieredCompilation=0", ), + ("TieredPGO=1",), + ("TieredPGO=1", "ReadyToRun=0"), ("ReadyToRun=0", "OSR_HitLimit=0", "TC_OnStackReplacement_InitialCounter=10"), - ("TieredPGO=1", "ReadyToRun=0", "OSR_HitLimit=0", "TC_OnStackReplacement_InitialCounter=10")] + ("TieredPGO=1", "ReadyToRun=0", "OSR_HitLimit=0", "TC_OnStackReplacement_InitialCounter=10") + ] # runtime_options_list = [("TieredCompilation=0", )] diff --git a/src/coreclr/scripts/superpmi_collect_setup.py b/src/coreclr/scripts/superpmi_collect_setup.py index 497ff9f64d57b..815041c97685d 100644 --- a/src/coreclr/scripts/superpmi_collect_setup.py +++ b/src/coreclr/scripts/superpmi_collect_setup.py @@ -9,22 +9,25 @@ # # Script to setup directory structure required to perform SuperPMI collection in CI. # It does the following steps: -# 1. It creates `correlation_payload_directory` that contains files from CORE_ROOT, src\coreclr\scripts. -# This directory is the one that is sent to all the helix machines that performs SPMI collection. -# 2. It clones dotnet/jitutils, builds it and then copies the `pmi.dll` to `correlation_payload_directory` folder. -# This file is needed to do pmi SPMI runs. -# 3. The script takes `input_artifacts` parameter which contains managed .dlls and .exes on -# which SPMI needs to be run. This script will partition these folders into equal buckets of approximately `max_size` -# bytes and stores them under `payload` directory. Each sub-folder inside `payload` directory is sent to individual -# helix machine to do SPMI collection on. E.g. for `input_artifacts` to be run on libraries, the parameter would be path to -# `CORE_ROOT` folder and this script will copy `max_size` bytes of those files under `payload/libraries/0/binaries`, -# `payload/libraries/1/binaries` and so forth. -# 4. Lastly, it sets the pipeline variables. +# 1. Create `correlation_payload_directory` that contains files from CORE_ROOT, src\coreclr\scripts. +# This directory is the one that is sent to all the helix machines that perform SPMI collections. +# 2. For PMI collections, clone dotnet/jitutils, build it and then copy the `pmi.dll` to +# `correlation_payload_directory` folder. +# 3. For PMI/crossgen2 collections, the `input_directory` directory contains the set of assemblies +# to collect over. This script will partition these folders into equal buckets of approximately +# `max_size` bytes and stores them under the workitem payload directory. Each sub-folder inside +# this directory is sent to an individual helix machine to do SPMI collection on. E.g. for +# `input_directory` to be run on libraries, the parameter is the path to `CORE_ROOT` folder and +# this script will copy `max_size` bytes of those files under +# `payload/collectAssembliesDirectory/libraries/0/binaries`, +# `payload/collectAssembliesDirectory/libraries/1/binaries` and so forth. +# 4. For benchmarks collections, a specialized script is called to set up the benchmarks collection. +# 5. Lastly, it sets the pipeline variables. # # Below are the helix queues it sets depending on the OS/architecture: # | Arch | windows | Linux | macOS | # |-------|-------------------------|--------------------------------------------------------------------------------------------------------------------------------------|----------------| -# | x86 | Windows.10.Amd64.X86.Rt | | - | +# | x86 | Windows.10.Amd64.X86.Rt | - | - | # | x64 | Windows.10.Amd64.X86.Rt | Ubuntu.1804.Amd64 | OSX.1014.Amd64 | # | arm | - | (Ubuntu.1804.Arm32)Ubuntu.1804.Armarch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440 | - | # | arm64 | Windows.10.Arm64 | (Ubuntu.1804.Arm64)Ubuntu.1804.ArmArch@mcr.microsoft.com/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8-20210531091519-97d8652 | OSX.1100.ARM64 | @@ -39,22 +42,25 @@ from coreclr_arguments import * from jitutil import run_command, copy_directory, copy_files, set_pipeline_variable, ChangeDir, TempDir - # Start of parser object creation. parser = argparse.ArgumentParser(description="description") -parser.add_argument("-source_directory", help="path to source directory") -parser.add_argument("-core_root_directory", help="path to core_root directory") -parser.add_argument("-arch", help="Architecture") -parser.add_argument("-platform", help="OS platform") +parser.add_argument("-collection_type", required=True, help="Type of the SPMI collection to be done (crossgen2, pmi, run)") +parser.add_argument("-collection_name", required=True, help="Name of the SPMI collection to be done (e.g., libraries, libraries_tests, coreclr_tests, benchmarks)") +parser.add_argument("-payload_directory", required=True, help="Path to payload directory to create: subdirectories are created for the correlation payload as well as the per-partition work items") +parser.add_argument("-source_directory", required=True, help="Path to source directory") +parser.add_argument("-core_root_directory", required=True, help="Path to Core_Root directory") +parser.add_argument("-arch", required=True, help="Architecture") +parser.add_argument("-platform", required=True, help="OS platform") parser.add_argument("-mch_file_tag", help="Tag to be used to mch files") -parser.add_argument("-collection_name", help="Name of the SPMI collection to be done (e.g., libraries, tests)") -parser.add_argument("-collection_type", help="Type of the SPMI collection to be done (crossgen, crossgen2, pmi)") -parser.add_argument("-input_directory", help="directory containing assemblies for which superpmi collection to be done") -parser.add_argument("-max_size", help="Max size of each partition in MB") +parser.add_argument("-input_directory", help="Directory containing assemblies which SuperPMI will use for collection (for pmi/crossgen2 collections)") +parser.add_argument("-max_size", help="Max size of each partition in MB (for pmi/crossgen2 collections)") + is_windows = platform.system() == "Windows" +legal_collection_types = [ "crossgen2", "pmi", "run" ] + native_binaries_to_ignore = [ "api-ms-win-core-console-l1-1-0.dll", "api-ms-win-core-datetime-l1-1-0.dll", @@ -187,15 +193,28 @@ def setup_args(args): coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False, require_built_test_dir=False, default_build_type="Checked") + coreclr_args.verify(args, + "payload_directory", + lambda unused: True, + "Unable to set payload_directory", + modify_arg=lambda payload_directory: os.path.abspath(payload_directory)) + coreclr_args.verify(args, "source_directory", lambda source_directory: os.path.isdir(source_directory), - "source_directory doesn't exist") + "source_directory doesn't exist", + modify_arg=lambda source_directory: os.path.abspath(source_directory)) + + check_dir = os.path.join(coreclr_args.source_directory, 'src', 'coreclr', 'scripts') + if not os.path.isdir(check_dir): + print("Specified directory {0} doesn't looks like a source directory".format(coreclr_args.source_directory)) + sys.exit(1) coreclr_args.verify(args, "core_root_directory", lambda core_root_directory: os.path.isdir(core_root_directory), - "core_root_directory doesn't exist") + "core_root_directory doesn't exist", + modify_arg=lambda core_root_directory: os.path.abspath(core_root_directory)) coreclr_args.verify(args, "arch", @@ -219,17 +238,18 @@ def setup_args(args): coreclr_args.verify(args, "collection_type", - lambda unused: True, - "Unable to set collection_type") + lambda collection_type: collection_type in legal_collection_types, + "Please specify one of the allowed collection types: " + ' '.join(legal_collection_types)) coreclr_args.verify(args, "input_directory", - lambda input_directory: os.path.isdir(input_directory), - "input_directory doesn't exist") + lambda input_directory: coreclr_args.collection_type not in [ "pmi", "crossgen2" ] or os.path.isdir(input_directory), + "input_directory doesn't exist", + modify_arg=lambda input_directory: None if input_directory is None else os.path.abspath(input_directory)) coreclr_args.verify(args, "max_size", - lambda max_size: max_size > 0, + lambda max_size: coreclr_args.collection_type not in [ "pmi", "crossgen2" ] or max_size > 0, "Please enter valid positive numeric max_size", modify_arg=lambda max_size: int( max_size) * 1000 * 1000 if max_size is not None and max_size.isnumeric() else 0 @@ -390,15 +410,31 @@ def main(main_args): coreclr_args = setup_args(main_args) source_directory = coreclr_args.source_directory - # CorrelationPayload directories - correlation_payload_directory = os.path.join(coreclr_args.source_directory, "payload") + # If the payload directory doesn't already exist (it probably shouldn't) then create it. + if not os.path.isdir(coreclr_args.payload_directory): + os.makedirs(coreclr_args.payload_directory) + + correlation_payload_directory = os.path.join(coreclr_args.payload_directory, 'correlation') + workitem_payload_directory = os.path.join(coreclr_args.payload_directory, 'workitem') + superpmi_src_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts') + + # Correlation payload directories (sent to every Helix machine). + # Currently, all the Core_Root files, superpmi script files, and pmi.dll go in the same place. superpmi_dst_directory = os.path.join(correlation_payload_directory, "superpmi") + core_root_dst_directory = superpmi_dst_directory + + # Workitem directories + # input_artifacts is only used for pmi/crossgen2 collections. + input_artifacts = "" + arch = coreclr_args.arch platform_name = coreclr_args.platform.lower() helix_source_prefix = "official" creator = "" ci = True + + # Determine the Helix queue name to use when running jobs. if platform_name == "windows": helix_queue = "Windows.10.Arm64" if arch == "arm64" else "Windows.10.Amd64.X86.Rt" elif platform_name == "linux": @@ -411,10 +447,13 @@ def main(main_args): elif platform_name == "osx": helix_queue = "OSX.1100.ARM64" if arch == "arm64" else "OSX.1014.Amd64" - # create superpmi directory + # Copy the superpmi scripts + print('Copying {} -> {}'.format(superpmi_src_directory, superpmi_dst_directory)) copy_directory(superpmi_src_directory, superpmi_dst_directory, verbose_output=True, match_func=lambda path: any(path.endswith(extension) for extension in [".py"])) + # Copy Core_Root + if platform_name == "windows": acceptable_copy = lambda path: any(path.endswith(extension) for extension in [".py", ".dll", ".exe", ".json"]) else: @@ -423,80 +462,78 @@ def main(main_args): # Need to accept files without any extension, which is how executable file's names look. acceptable_copy = lambda path: (os.path.basename(path).find(".") == -1) or any(path.endswith(extension) for extension in acceptable_extensions) - print('Copying {} -> {}'.format(coreclr_args.core_root_directory, superpmi_dst_directory)) - copy_directory(coreclr_args.core_root_directory, superpmi_dst_directory, verbose_output=True, match_func=acceptable_copy) - - # Copy all the test files to CORE_ROOT - # The reason is there are lot of dependencies with *.Tests.dll and to ensure we do not get - # Reflection errors, just copy everything to CORE_ROOT so for all individual partitions, the - # references will be present in CORE_ROOT. - if coreclr_args.collection_name == "libraries_tests": - print('Copying {} -> {}'.format(coreclr_args.input_directory, superpmi_dst_directory)) - - def make_readable(folder_name): - """Make file executable by changing the permission - - Args: - folder_name (string): folder to mark with 744 - """ - if is_windows: - return - - print("Inside make_readable") - run_command(["ls", "-l", folder_name]) - for file_path, dirs, files in os.walk(folder_name, topdown=True): - for d in dirs: - os.chmod(os.path.join(file_path, d), - # read+write+execute for owner - (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | - # read for group - (stat.S_IRGRP) | - # read for other - (stat.S_IROTH)) - - for f in files: - os.chmod(os.path.join(file_path, f), - # read+write+execute for owner - (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | - # read for group - (stat.S_IRGRP) | - # read for other - (stat.S_IROTH)) - run_command(["ls", "-l", folder_name]) - - make_readable(coreclr_args.input_directory) - copy_directory(coreclr_args.input_directory, superpmi_dst_directory, verbose_output=True, match_func=acceptable_copy) - - # Workitem directories - workitem_directory = os.path.join(source_directory, "workitem") - input_artifacts = "" + print('Copying {} -> {}'.format(coreclr_args.core_root_directory, core_root_dst_directory)) + copy_directory(coreclr_args.core_root_directory, core_root_dst_directory, verbose_output=True, match_func=acceptable_copy) if coreclr_args.collection_name == "benchmarks": # Setup microbenchmarks - setup_microbenchmark(workitem_directory, arch) + setup_microbenchmark(workitem_payload_directory, arch) else: - # Setup for pmi/crossgen runs + # Setup for pmi/crossgen2 runs - # Clone and build jitutils - try: - with TempDir() as jitutils_directory: - run_command( - ["git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory]) - - # Make sure ".dotnet" directory exists, by running the script at least once - dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh" - dotnet_script_path = os.path.join(source_directory, dotnet_script_name) - run_command([dotnet_script_path, "--info"], jitutils_directory) - - # Set dotnet path to run build - os.environ["PATH"] = os.path.join(source_directory, ".dotnet") + os.pathsep + os.environ["PATH"] - build_file = "build.cmd" if is_windows else "build.sh" - run_command([os.path.join(jitutils_directory, build_file), "-p"], jitutils_directory) + # For libraries tests, copy all the test files to the single + # The reason is there are lot of dependencies with *.Tests.dll and to ensure we do not get + # Reflection errors, just copy everything to CORE_ROOT so for all individual partitions, the + # references will be present in CORE_ROOT. + if coreclr_args.collection_name == "libraries_tests": - copy_files(os.path.join(jitutils_directory, "bin"), superpmi_dst_directory, [os.path.join(jitutils_directory, "bin", "pmi.dll")]) - except PermissionError as pe_error: - # Details: https://bugs.python.org/issue26660 - print('Ignoring PermissionError: {0}'.format(pe_error)) + def make_readable(folder_name): + """Make file executable by changing the permission + + Args: + folder_name (string): folder to mark with 744 + """ + if is_windows: + return + + print("Inside make_readable") + run_command(["ls", "-l", folder_name]) + for file_path, dirs, files in os.walk(folder_name, topdown=True): + for d in dirs: + os.chmod(os.path.join(file_path, d), + # read+write+execute for owner + (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | + # read for group + (stat.S_IRGRP) | + # read for other + (stat.S_IROTH)) + + for f in files: + os.chmod(os.path.join(file_path, f), + # read+write+execute for owner + (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | + # read for group + (stat.S_IRGRP) | + # read for other + (stat.S_IROTH)) + run_command(["ls", "-l", folder_name]) + + make_readable(coreclr_args.input_directory) + print('Copying {} -> {}'.format(coreclr_args.input_directory, core_root_dst_directory)) + copy_directory(coreclr_args.input_directory, core_root_dst_directory, verbose_output=True, match_func=acceptable_copy) + + # We need the PMI tool if we're doing a PMI collection. We could download a cached copy from Azure DevOps JIT blob + # storage, but instead we clone and build jitutils to build pmi.dll. + if coreclr_args.collection_type == "pmi": + try: + with TempDir() as jitutils_directory: + run_command( + ["git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory]) + + # Make sure ".dotnet" directory exists, by running the script at least once + dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh" + dotnet_script_path = os.path.join(source_directory, dotnet_script_name) + run_command([dotnet_script_path, "--info"], jitutils_directory) + + # Set dotnet path to run build + os.environ["PATH"] = os.path.join(source_directory, ".dotnet") + os.pathsep + os.environ["PATH"] + build_file = "build.cmd" if is_windows else "build.sh" + run_command([os.path.join(jitutils_directory, build_file), "-p"], jitutils_directory) + + copy_files(os.path.join(jitutils_directory, "bin"), core_root_dst_directory, [os.path.join(jitutils_directory, "bin", "pmi.dll")]) + except PermissionError as pe_error: + # Details: https://bugs.python.org/issue26660 + print('Ignoring PermissionError: {0}'.format(pe_error)) # NOTE: we can't use the build machine ".dotnet" to run on all platforms. E.g., the Windows x86 build uses a # Windows x64 .dotnet\dotnet.exe that can't load a 32-bit shim. Thus, we always use corerun from Core_Root to invoke crossgen2. @@ -509,9 +546,7 @@ def make_readable(folder_name): # print('Copying {} -> {}'.format(dotnet_src_directory, dotnet_dst_directory)) # copy_directory(dotnet_src_directory, dotnet_dst_directory, verbose_output=False) - # payload - pmiassemblies_directory = os.path.join(workitem_directory, "pmiAssembliesDirectory") - input_artifacts = os.path.join(pmiassemblies_directory, coreclr_args.collection_name) + input_artifacts = os.path.join(workitem_payload_directory, "collectAssembliesDirectory", coreclr_args.collection_name) exclude_directory = ['Core_Root'] if coreclr_args.collection_name == "coreclr_tests" else [] exclude_files = native_binaries_to_ignore if coreclr_args.collection_type == "crossgen2": @@ -531,7 +566,7 @@ def make_readable(folder_name): # Set variables print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) - set_pipeline_variable("WorkItemDirectory", workitem_directory) + set_pipeline_variable("WorkItemDirectory", workitem_payload_directory) set_pipeline_variable("InputArtifacts", input_artifacts) set_pipeline_variable("Python", ' '.join(get_python_name())) set_pipeline_variable("Architecture", arch) diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ArgIterator.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ArgIterator.cs index 73e0a1d4087d7..70e258d8d297d 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ArgIterator.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/ArgIterator.cs @@ -1364,6 +1364,7 @@ public int GetNextOffset() { if (isValueType && ((floatFieldFlags & (uint)StructFloatFieldInfoFlags.STRUCT_HAS_ONE_FLOAT_MASK) != 0)) { + Debug.Assert(cFPRegs == 1); if ((_loongarch64IdxFPReg < 8) && (_loongarch64IdxGenReg < 8)) { _argLocDescForStructInRegs = new ArgLocDesc(); @@ -1381,15 +1382,23 @@ public int GetNextOffset() _loongarch64IdxGenReg++; return argOfsInner; } - else - { - _loongarch64IdxFPReg = 8; - } } else if (cFPRegs + _loongarch64IdxFPReg <= 8) { // Each floating point register in the argument area is 8 bytes. int argOfsInner = _transitionBlock.OffsetOfFloatArgumentRegisters + _loongarch64IdxFPReg * 8; + if (floatFieldFlags == (uint)StructFloatFieldInfoFlags.STRUCT_FLOAT_FIELD_ONLY_TWO) + { + // struct with two single-float fields. + _argLocDescForStructInRegs = new ArgLocDesc(); + _argLocDescForStructInRegs.m_idxFloatReg = _loongarch64IdxFPReg; + _argLocDescForStructInRegs.m_cFloatReg = 2; + Debug.Assert(cFPRegs == 2); + Debug.Assert(argSize == 8); + + _hasArgLocDescForStructInRegs = true; + _argLocDescForStructInRegs.m_floatFlags = (uint)StructFloatFieldInfoFlags.STRUCT_FLOAT_FIELD_ONLY_TWO; + } _loongarch64IdxFPReg += cFPRegs; return argOfsInner; } @@ -1418,11 +1427,6 @@ public int GetNextOffset() _loongarch64OfsStack += 8; return argOfsInner; } - else - { - // Don't use reg slots for this. It will be passed purely on the stack arg space. - _loongarch64IdxGenReg = 8; - } } argOfs = _transitionBlock.OffsetOfArgs + _loongarch64OfsStack; diff --git a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/TransitionBlock.cs b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/TransitionBlock.cs index 8cf01a8193871..2228cb248e760 100644 --- a/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/TransitionBlock.cs +++ b/src/coreclr/tools/aot/ILCompiler.ReadyToRun/Compiler/DependencyAnalysis/ReadyToRun/TransitionBlock.cs @@ -383,7 +383,12 @@ public void ComputeReturnValueTreatment(CorElementType type, TypeHandle thRetTyp } if (size <= EnregisteredReturnTypeIntegerMaxSize) + { + if (IsLoongArch64) + fpReturnSize = LoongArch64PassStructInRegister.GetLoongArch64PassStructInRegisterFlags(thRetType.GetRuntimeTypeHandle()) & 0xff; break; + } + } } diff --git a/src/coreclr/tools/superpmi/mcs/commandline.cpp b/src/coreclr/tools/superpmi/mcs/commandline.cpp index d7b29b2c279bb..86ce6023375d1 100644 --- a/src/coreclr/tools/superpmi/mcs/commandline.cpp +++ b/src/coreclr/tools/superpmi/mcs/commandline.cpp @@ -121,6 +121,8 @@ void CommandLine::DumpHelp(const char* program) printf(" -strip range inputfile outputfile\n"); printf(" Copy method contexts from one file to another, skipping ranged items.\n"); printf(" inputfile is read and records not in range are written to outputfile.\n"); + printf(" If range is empty (e.g., from an empty .mcl file due to a clean replay),\n"); + printf(" the file is simply copied.\n"); printf(" e.g. -strip 2 a.mc b.mc\n"); printf("\n"); printf(" -toc inputfile\n"); @@ -627,12 +629,6 @@ bool CommandLine::Parse(int argc, char* argv[], /* OUT */ Options* o) DumpHelp(argv[0]); return false; } - if (o->indexCount == 0) - { - LogError("CommandLine::Parse() -strip requires a range."); - DumpHelp(argv[0]); - return false; - } return true; } if (o->actionPrintJITEEVersion) diff --git a/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.cpp b/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.cpp index 6536e5b8cb0a8..7bac6fdb36575 100644 --- a/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.cpp +++ b/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.cpp @@ -7007,8 +7007,15 @@ int MethodContext::dumpMethodIdentityInfoToBuffer(char* buff, int len, bool igno char* obuff = buff; - // Add the Method Signature - int t = sprintf_s(buff, len, "%s -- ", CallUtils::GetMethodFullName(this, pInfo->ftn, pInfo->args, ignoreMethodName)); + // Add the Method Signature. Be careful about potentially huge method signatures; truncate if necessary. + const char* methodFullName = CallUtils::GetMethodFullName(this, pInfo->ftn, pInfo->args, ignoreMethodName); + int t = _snprintf_s(buff, len - METHOD_IDENTITY_INFO_NON_NAME_RESERVE, _TRUNCATE, "%s -- ", methodFullName); + if (t == -1) + { + // We truncated the name string, meaning we wrote exactly `len - METHOD_IDENTITY_INFO_NON_NAME_RESERVE` characters + // (including the terminating null). We advance the buffer pointer by this amount, not including that terminating null. + t = len - METHOD_IDENTITY_INFO_NON_NAME_RESERVE - 1; + } buff += t; len -= t; diff --git a/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.h b/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.h index 66a3b90e49a60..f9769119e20b5 100644 --- a/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.h +++ b/src/coreclr/tools/superpmi/superpmi-shared/methodcontext.h @@ -42,6 +42,7 @@ extern bool g_debugRep; const char* toString(CorInfoType cit); #define METHOD_IDENTITY_INFO_SIZE 0x10000 // We assume that the METHOD_IDENTITY_INFO_SIZE will not exceed 64KB +#define METHOD_IDENTITY_INFO_NON_NAME_RESERVE 0x400 // Reserve 1KB of METHOD_IDENTITY_INFO_SIZE for everything except for the method name. // Special "jit flags" for noting some method context features diff --git a/src/coreclr/vm/arm64/stubs.cpp b/src/coreclr/vm/arm64/stubs.cpp index e7c73097640e9..5f8673b702da5 100644 --- a/src/coreclr/vm/arm64/stubs.cpp +++ b/src/coreclr/vm/arm64/stubs.cpp @@ -2026,7 +2026,7 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, *(DWORD*)p = 0xd280000a | ((UINT32)slotOffset << 5); p += 4; dataOffset -= 4; - // cmp x9,x10 + // cmp x11,x10 *(DWORD*)p = 0xeb0a017f; p += 4; dataOffset -= 4; diff --git a/src/coreclr/vm/loongarch64/stubs.cpp b/src/coreclr/vm/loongarch64/stubs.cpp index d868fd9c1e6ea..12d56e177ebac 100644 --- a/src/coreclr/vm/loongarch64/stubs.cpp +++ b/src/coreclr/vm/loongarch64/stubs.cpp @@ -1791,22 +1791,26 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, { int codeSize = 0; int indirectionsDataSize = 0; - if (pLookup->sizeOffset != CORINFO_NO_SIZE_CHECK) + if (pLookup->testForNull || pLookup->sizeOffset != CORINFO_NO_SIZE_CHECK) { - codeSize += (pLookup->sizeOffset > 2047 ? 8 : 4); - indirectionsDataSize += (pLookup->sizeOffset > 2047 ? 4 : 0); - codeSize += 12; + codeSize += 4; } for (WORD i = 0; i < pLookup->indirections; i++) { _ASSERTE(pLookup->offsets[i] >= 0); + if (i == pLookup->indirections - 1 && pLookup->sizeOffset != CORINFO_NO_SIZE_CHECK) + { + codeSize += (pLookup->sizeOffset > 2047 ? 24 : 16); + indirectionsDataSize += (pLookup->sizeOffset > 2047 ? 4 : 0); + } + codeSize += (pLookup->offsets[i] > 2047 ? 8 : 4); // if( > 2047) (8 bytes) else 4 bytes for instructions. indirectionsDataSize += (pLookup->offsets[i] > 2047 ? 4 : 0); // 4 bytes for storing indirection offset values } codeSize += indirectionsDataSize ? 4 : 0; // pcaddi - if(pLookup->testForNull) + if (pLookup->testForNull) { codeSize += 12; // ori-beq-jr @@ -1821,7 +1825,7 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, codeSize += 4; /* jilr */ } - // the offset value of data. + // the offset value of data_label. uint dataOffset = codeSize; codeSize += indirectionsDataSize; @@ -1877,9 +1881,9 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, *(DWORD*)p = 0x03800210 | (((UINT32)slotOffset & 0xfff) << 10); p += 4; dataOffset -= 8; - // bge $t4,$t3, // CALL HELPER: + // bge $t4,$t5, // CALL HELPER: pBLECall = p; // Offset filled later - *(DWORD*)p = 0x6400020f; p += 4; + *(DWORD*)p = 0x64000211; p += 4; dataOffset -= 4; } @@ -1889,10 +1893,12 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, // ld.wu $t4,$r21,0 *(DWORD*)p = 0x2a8002b0 | (dataOffset<<10); p += 4; - dataOffset += 4; // ldx.d $a0,$a0,$t4 *(DWORD*)p = 0x380c4084; p += 4; + + // move to next indirection offset data + dataOffset = dataOffset - 8 + 4; // subtract 8 as we have moved PC by 8 and add 4 as next data is at 4 bytes from previous data } else { @@ -1902,6 +1908,7 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, // ld.d $a0,$a0,pLookup->offsets[i] *(DWORD*)p = 0x28c00084 | ((pLookup->offsets[i] & 0xfff)<<10); p += 4; + dataOffset -= 4; // subtract 4 as we have moved PC by 4 } } @@ -1925,7 +1932,7 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, // CALL HELPER: if(pBLECall != NULL) - *(DWORD*)pBLECall |= ((UINT32)(p - pBLECall) << 10); + *(DWORD*)pBLECall |= ((UINT32)(p - pBLECall) << 8); // ori $a0,$t3,0 *(DWORD*)p = 0x038001e4; @@ -1943,7 +1950,7 @@ PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, EmitHelperWithArg(p, rxOffset, pAllocator, (TADDR)pArgs, helperAddress); } - // datalabel: + // data_label: for (WORD i = 0; i < pLookup->indirections; i++) { if (i == pLookup->indirections - 1 && pLookup->sizeOffset != CORINFO_NO_SIZE_CHECK && pLookup->sizeOffset > 2047) diff --git a/src/libraries/Common/src/Interop/Linux/procfs/Interop.ProcFsStat.cs b/src/libraries/Common/src/Interop/Linux/procfs/Interop.ProcFsStat.cs index 8dd29a54221d0..ee9c2b7597188 100644 --- a/src/libraries/Common/src/Interop/Linux/procfs/Interop.ProcFsStat.cs +++ b/src/libraries/Common/src/Interop/Linux/procfs/Interop.ProcFsStat.cs @@ -19,10 +19,6 @@ internal static partial class @procfs private const string FileDescriptorDirectoryName = "/fd/"; private const string TaskDirectoryName = "/task/"; - internal const string SelfExeFilePath = RootPath + "self" + ExeFileName; - internal const string SelfCmdLineFilePath = RootPath + "self" + CmdLineFileName; - internal const string ProcStatFilePath = RootPath + "stat"; - internal struct ParsedStat { // Commented out fields are available in the stat data file but diff --git a/src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptKeyDataBlob.cs b/src/libraries/Common/src/Interop/Unix/System.Native/Interop.GetBootTimeTicks.cs similarity index 51% rename from src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptKeyDataBlob.cs rename to src/libraries/Common/src/Interop/Unix/System.Native/Interop.GetBootTimeTicks.cs index 10b6b905ee62a..d6c327578c218 100644 --- a/src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptKeyDataBlob.cs +++ b/src/libraries/Common/src/Interop/Unix/System.Native/Interop.GetBootTimeTicks.cs @@ -1,15 +1,14 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. -using System; -using System.Diagnostics; using System.Runtime.InteropServices; internal static partial class Interop { - internal static partial class BCrypt + internal static partial class Sys { - internal const int BCRYPT_KEY_DATA_BLOB_MAGIC = 0x4d42444b; // 'KDBM' - internal const int BCRYPT_KEY_DATA_BLOB_VERSION1 = 1; + [LibraryImport(Libraries.SystemNative, EntryPoint = "SystemNative_GetBootTimeTicks")] + [SuppressGCTransition] + internal static partial long GetBootTimeTicks(); } } diff --git a/src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptChainingModes.cs b/src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptChainingModes.cs deleted file mode 100644 index 4a5b7df46c67d..0000000000000 --- a/src/libraries/Common/src/Interop/Windows/BCrypt/Interop.BCryptChainingModes.cs +++ /dev/null @@ -1,16 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; -using System.Diagnostics; -using System.Runtime.InteropServices; - -internal static partial class Interop -{ - internal static partial class BCrypt - { - internal const string BCRYPT_CHAIN_MODE_CBC = "ChainingModeCBC"; - internal const string BCRYPT_CHAIN_MODE_ECB = "ChainingModeECB"; - internal const string BCRYPT_CHAIN_MODE_CFB = "ChainingModeCFB"; - } -} diff --git a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptAlgorithms.cs b/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptAlgorithms.cs deleted file mode 100644 index 22ca92d254c5d..0000000000000 --- a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptAlgorithms.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; - -internal static partial class Interop -{ - internal static partial class NCrypt - { - internal const string NCRYPT_3DES_ALGORITHM = "3DES"; - internal const string NCRYPT_AES_ALGORITHM = "AES"; - } -} diff --git a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptCipherKeyBlob.cs b/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptCipherKeyBlob.cs deleted file mode 100644 index 68c1561bae603..0000000000000 --- a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptCipherKeyBlob.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; - -internal static partial class Interop -{ - internal static partial class NCrypt - { - internal const string NCRYPT_CIPHER_KEY_BLOB = "CipherKeyBlob"; - internal const int NCRYPT_CIPHER_KEY_BLOB_MAGIC = 0x52485043; //'CPHR' - } -} diff --git a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptPropertyNames.cs b/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptPropertyNames.cs deleted file mode 100644 index 1c20f73651307..0000000000000 --- a/src/libraries/Common/src/Interop/Windows/NCrypt/Interop.NCryptPropertyNames.cs +++ /dev/null @@ -1,13 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System; - -internal static partial class Interop -{ - internal static partial class NCrypt - { - internal const string NCRYPT_CHAINING_MODE_PROPERTY = "Chaining Mode"; - internal const string NCRYPT_INITIALIZATION_VECTOR = "IV"; - } -} diff --git a/src/libraries/Common/src/System/Security/Cryptography/ECDiffieHellmanSecurityTransforms.macOS.cs b/src/libraries/Common/src/System/Security/Cryptography/ECDiffieHellmanSecurityTransforms.macOS.cs deleted file mode 100644 index ae92c6b48f6e2..0000000000000 --- a/src/libraries/Common/src/System/Security/Cryptography/ECDiffieHellmanSecurityTransforms.macOS.cs +++ /dev/null @@ -1,21 +0,0 @@ -// Licensed to the .NET Foundation under one or more agreements. -// The .NET Foundation licenses this file to you under the MIT license. - -using System.Diagnostics; -using System.Security.Cryptography.Apple; - -namespace System.Security.Cryptography -{ - internal static partial class ECDiffieHellmanImplementation - { - public sealed partial class ECDiffieHellmanSecurityTransforms : ECDiffieHellman - { - public override void ImportSubjectPublicKeyInfo( - ReadOnlySpan source, - out int bytesRead) - { - KeySizeValue = _ecc.ImportSubjectPublicKeyInfo(source, out bytesRead); - } - } - } -} diff --git a/src/libraries/Common/tests/System/Net/Prerequisites/LocalEchoServer.props b/src/libraries/Common/tests/System/Net/Prerequisites/LocalEchoServer.props index 29501d4d6c47f..9b7b8670f463a 100644 --- a/src/libraries/Common/tests/System/Net/Prerequisites/LocalEchoServer.props +++ b/src/libraries/Common/tests/System/Net/Prerequisites/LocalEchoServer.props @@ -19,7 +19,7 @@ - + diff --git a/src/libraries/Microsoft.NETCore.Platforms/src/runtime.compatibility.json b/src/libraries/Microsoft.NETCore.Platforms/src/runtime.compatibility.json index 15c8c0a74777f..5a39f5d745044 100644 --- a/src/libraries/Microsoft.NETCore.Platforms/src/runtime.compatibility.json +++ b/src/libraries/Microsoft.NETCore.Platforms/src/runtime.compatibility.json @@ -3454,6 +3454,38 @@ "any", "base" ], + "fedora.38": [ + "fedora.38", + "fedora", + "linux", + "unix", + "any", + "base" + ], + "fedora.38-arm64": [ + "fedora.38-arm64", + "fedora.38", + "fedora-arm64", + "fedora", + "linux-arm64", + "linux", + "unix-arm64", + "unix", + "any", + "base" + ], + "fedora.38-x64": [ + "fedora.38-x64", + "fedora.38", + "fedora-x64", + "fedora", + "linux-x64", + "linux", + "unix-x64", + "unix", + "any", + "base" + ], "freebsd": [ "freebsd", "unix", @@ -4285,22 +4317,22 @@ "any", "base" ], - "linux-musl-s390x": [ - "linux-musl-s390x", + "linux-musl-ppc64le": [ + "linux-musl-ppc64le", "linux-musl", - "linux-s390x", + "linux-ppc64le", "linux", - "unix-s390x", + "unix-ppc64le", "unix", "any", "base" ], - "linux-musl-ppc64le": [ - "linux-musl-ppc64le", + "linux-musl-s390x": [ + "linux-musl-s390x", "linux-musl", - "linux-ppc64le", + "linux-s390x", "linux", - "unix-ppc64le", + "unix-s390x", "unix", "any", "base" @@ -4325,18 +4357,18 @@ "any", "base" ], - "linux-s390x": [ - "linux-s390x", + "linux-ppc64le": [ + "linux-ppc64le", "linux", - "unix-s390x", + "unix-ppc64le", "unix", "any", "base" ], - "linux-ppc64le": [ - "linux-ppc64le", + "linux-s390x": [ + "linux-s390x", "linux", - "unix-ppc64le", + "unix-s390x", "unix", "any", "base" @@ -8895,14 +8927,14 @@ "any", "base" ], - "unix-s390x": [ - "unix-s390x", + "unix-ppc64le": [ + "unix-ppc64le", "unix", "any", "base" ], - "unix-ppc64le": [ - "unix-ppc64le", + "unix-s390x": [ + "unix-s390x", "unix", "any", "base" @@ -9549,4 +9581,4 @@ "any", "base" ] -} +} \ No newline at end of file diff --git a/src/libraries/Microsoft.NETCore.Platforms/src/runtime.json b/src/libraries/Microsoft.NETCore.Platforms/src/runtime.json index 050ce1e4e8ce3..17a0a1c4db0b7 100644 --- a/src/libraries/Microsoft.NETCore.Platforms/src/runtime.json +++ b/src/libraries/Microsoft.NETCore.Platforms/src/runtime.json @@ -1215,6 +1215,23 @@ "fedora-x64" ] }, + "fedora.38": { + "#import": [ + "fedora" + ] + }, + "fedora.38-arm64": { + "#import": [ + "fedora.38", + "fedora-arm64" + ] + }, + "fedora.38-x64": { + "#import": [ + "fedora.38", + "fedora-x64" + ] + }, "freebsd": { "#import": [ "unix" @@ -1637,16 +1654,16 @@ "linux-armel" ] }, - "linux-musl-s390x": { + "linux-musl-ppc64le": { "#import": [ "linux-musl", - "linux-s390x" + "linux-ppc64le" ] }, - "linux-musl-ppc64le": { + "linux-musl-s390x": { "#import": [ "linux-musl", - "linux-ppc64le" + "linux-s390x" ] }, "linux-musl-x64": { @@ -1661,16 +1678,16 @@ "linux-x86" ] }, - "linux-s390x": { + "linux-ppc64le": { "#import": [ "linux", - "unix-s390x" + "unix-ppc64le" ] }, - "linux-ppc64le": { + "linux-s390x": { "#import": [ "linux", - "unix-ppc64le" + "unix-s390x" ] }, "linux-x64": { @@ -3636,12 +3653,12 @@ "unix" ] }, - "unix-s390x": { + "unix-ppc64le": { "#import": [ "unix" ] }, - "unix-ppc64le": { + "unix-s390x": { "#import": [ "unix" ] @@ -3980,4 +3997,4 @@ ] } } -} +} \ No newline at end of file diff --git a/src/libraries/Microsoft.NETCore.Platforms/src/runtimeGroups.props b/src/libraries/Microsoft.NETCore.Platforms/src/runtimeGroups.props index 1b10a2604af28..38deb3f186335 100644 --- a/src/libraries/Microsoft.NETCore.Platforms/src/runtimeGroups.props +++ b/src/libraries/Microsoft.NETCore.Platforms/src/runtimeGroups.props @@ -81,7 +81,7 @@ linux x64;arm64 - 23;24;25;26;27;28;29;30;31;32;33;34;35;36;37 + 23;24;25;26;27;28;29;30;31;32;33;34;35;36;37;38 false diff --git a/src/libraries/System.Diagnostics.Process/src/System.Diagnostics.Process.csproj b/src/libraries/System.Diagnostics.Process/src/System.Diagnostics.Process.csproj index 2c273820afb4c..6f729f78ed0a4 100644 --- a/src/libraries/System.Diagnostics.Process/src/System.Diagnostics.Process.csproj +++ b/src/libraries/System.Diagnostics.Process/src/System.Diagnostics.Process.csproj @@ -302,6 +302,8 @@ Link="Common\Interop\Linux\Interop.ProcFsStat.ParseMapModules.cs" /> + diff --git a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Linux.cs b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Linux.cs index cfad0e51326b6..ca59d944dc970 100644 --- a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Linux.cs +++ b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Linux.cs @@ -9,6 +9,7 @@ using System.IO; using System.Runtime.Versioning; using System.Text; +using System.Threading; namespace System.Diagnostics { @@ -79,33 +80,25 @@ internal static DateTime BootTimeToDateTime(TimeSpan timespanAfterBoot) return dt.ToLocalTime(); } + private static long s_bootTimeTicks; /// Gets the system boot time. private static DateTime BootTime { get { - // '/proc/stat -> btime' gets the boot time. - // btime is the time of system boot in seconds since the Unix epoch. - // It includes suspended time and is updated based on the system time (settimeofday). - const string StatFile = Interop.procfs.ProcStatFilePath; - string text = File.ReadAllText(StatFile); - int btimeLineStart = text.IndexOf("\nbtime ", StringComparison.Ordinal); - if (btimeLineStart >= 0) - { - int btimeStart = btimeLineStart + "\nbtime ".Length; - int btimeEnd = text.IndexOf('\n', btimeStart); - if (btimeEnd > btimeStart) + long bootTimeTicks = Interlocked.Read(ref s_bootTimeTicks); + if (bootTimeTicks == 0) + { + bootTimeTicks = Interop.Sys.GetBootTimeTicks(); + long oldValue = Interlocked.CompareExchange(ref s_bootTimeTicks, bootTimeTicks, 0); + if (oldValue != 0) // a different thread has managed to update the ticks first { - if (long.TryParse(text.AsSpan(btimeStart, btimeEnd - btimeStart), out long bootTimeSeconds)) - { - return DateTime.UnixEpoch + TimeSpan.FromSeconds(bootTimeSeconds); - } + bootTimeTicks = oldValue; // consistency } - } - - return DateTime.UtcNow; - } - } + } + return new DateTime(bootTimeTicks); + } + } /// Gets the parent process ID private int ParentProcessId => @@ -260,11 +253,8 @@ private static void SetWorkingSetLimitsCore(IntPtr? newMin, IntPtr? newMax, out /// The pid for the target process, or -1 for the current process. internal static string? GetExePath(int processId = -1) { - string exeFilePath = processId == -1 ? - Interop.procfs.SelfExeFilePath : - Interop.procfs.GetExeFilePathForProcess(processId); - - return Interop.Sys.ReadLink(exeFilePath); + return processId == -1 ? Environment.ProcessPath : + Interop.Sys.ReadLink(Interop.procfs.GetExeFilePathForProcess(processId)); } /// Gets the name that was used to start the process, or null if it could not be retrieved. diff --git a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs index b5f6fdb9c54c2..7d977b0d4e468 100644 --- a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs +++ b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs @@ -1152,13 +1152,13 @@ public void TestGetProcesses() // Get all the processes running on the machine, and check if the current process is one of them. var foundCurrentProcess = (from p in Process.GetProcesses() - where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName)) + where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName)) && (p.StartTime == currentProcess.StartTime) select p).Any(); Assert.True(foundCurrentProcess, "TestGetProcesses001 failed"); foundCurrentProcess = (from p in Process.GetProcesses(currentProcess.MachineName) - where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName)) + where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName)) && (p.StartTime == currentProcess.StartTime) select p).Any(); Assert.True(foundCurrentProcess, "TestGetProcesses002 failed"); @@ -1248,6 +1248,7 @@ public void GetProcessesByName_ProcessName_ReturnsExpected() Assert.All(processes, process => Assert.Equal(currentProcess.ProcessName, process.ProcessName)); Assert.All(processes, process => Assert.Equal(".", process.MachineName)); + Assert.All(processes, process => Assert.Equal(currentProcess.StartTime, process.StartTime)); } // Outputs a list of active processes in case of failure: https://github.com/dotnet/runtime/issues/28874 diff --git a/src/libraries/System.Net.WebProxy/src/System/Net/WebProxy.cs b/src/libraries/System.Net.WebProxy/src/System/Net/WebProxy.cs index e76ae87717ad4..2c8cb1374cfc4 100644 --- a/src/libraries/System.Net.WebProxy/src/System/Net/WebProxy.cs +++ b/src/libraries/System.Net.WebProxy/src/System/Net/WebProxy.cs @@ -8,6 +8,7 @@ using System.Globalization; using System.Runtime.Serialization; using System.Text.RegularExpressions; +using System.Threading; namespace System.Net { @@ -127,10 +128,9 @@ public bool UseDefaultCredentials private void UpdateRegexList() { - Regex[]? regexBypassList = null; if (_bypassList is ChangeTrackingArrayList bypassList) { - bypassList.IsChanged = false; + Regex[]? regexBypassList = null; if (bypassList.Count > 0) { regexBypassList = new Regex[bypassList.Count]; @@ -139,9 +139,14 @@ private void UpdateRegexList() regexBypassList[i] = new Regex((string)bypassList[i]!, RegexOptions.IgnoreCase | RegexOptions.CultureInvariant); } } - } - _regexBypassList = regexBypassList; + _regexBypassList = regexBypassList; + bypassList.IsChanged = false; + } + else + { + _regexBypassList = null; + } } private bool IsMatchInBypassList(Uri input) @@ -219,7 +224,10 @@ public ChangeTrackingArrayList() { } public ChangeTrackingArrayList(ICollection c) : base(c) { } - public bool IsChanged { get; set; } + // While this type isn't intended to mutated concurrently with reads, non-concurrent updates + // to the list might result in lazy initialization, and it's possible concurrent requests could race + // to trigger that initialization. + public volatile bool IsChanged; // Override the methods that can add, remove, or change the regexes in the bypass list. // Methods that only read (like CopyTo, BinarySearch, etc.) and methods that reorder diff --git a/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.GateThread.cs b/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.GateThread.cs index 4564af6f9ca89..9eff225e7941d 100644 --- a/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.GateThread.cs +++ b/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.GateThread.cs @@ -229,10 +229,8 @@ private static int GetRunningStateForNumRuns(int numRuns) return GateThreadRunningMask | numRuns; } - [MethodImpl(MethodImplOptions.NoInlining)] private static void CreateGateThread(PortableThreadPool threadPoolInstance) { - bool created = false; try { // Thread pool threads must start in the default execution context without transferring the context, so @@ -244,14 +242,10 @@ private static void CreateGateThread(PortableThreadPool threadPoolInstance) Name = ".NET ThreadPool Gate" }; gateThread.UnsafeStart(); - created = true; } - finally + catch (Exception e) { - if (!created) - { - Interlocked.Exchange(ref threadPoolInstance._separated.gateThreadRunningState, 0); - } + Environment.FailFast("Failed to create the thread pool Gate thread.", e); } } diff --git a/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.WorkerThread.cs b/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.WorkerThread.cs index 18777f4a555ab..0e0323e3bc446 100644 --- a/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.WorkerThread.cs +++ b/src/libraries/System.Private.CoreLib/src/System/Threading/PortableThreadPool.WorkerThread.cs @@ -239,27 +239,8 @@ internal static void MaybeAddWorkingWorker(PortableThreadPool threadPoolInstance while (toCreate > 0) { - if (TryCreateWorkerThread()) - { - toCreate--; - continue; - } - - counts = threadPoolInstance._separated.counts; - while (true) - { - ThreadCounts newCounts = counts; - newCounts.NumProcessingWork -= (short)toCreate; - newCounts.NumExistingThreads -= (short)toCreate; - - ThreadCounts oldCounts = threadPoolInstance._separated.counts.InterlockedCompareExchange(newCounts, counts); - if (oldCounts == counts) - { - break; - } - counts = oldCounts; - } - break; + CreateWorkerThread(); + toCreate--; } } @@ -314,28 +295,15 @@ private static bool TakeActiveRequest(PortableThreadPool threadPoolInstance) return false; } - private static bool TryCreateWorkerThread() + private static void CreateWorkerThread() { - try - { - // Thread pool threads must start in the default execution context without transferring the context, so - // using UnsafeStart() instead of Start() - Thread workerThread = new Thread(s_workerThreadStart); - workerThread.IsThreadPoolThread = true; - workerThread.IsBackground = true; - // thread name will be set in thread proc - workerThread.UnsafeStart(); - } - catch (ThreadStartException) - { - return false; - } - catch (OutOfMemoryException) - { - return false; - } - - return true; + // Thread pool threads must start in the default execution context without transferring the context, so + // using UnsafeStart() instead of Start() + Thread workerThread = new Thread(s_workerThreadStart); + workerThread.IsThreadPoolThread = true; + workerThread.IsBackground = true; + // thread name will be set in thread proc + workerThread.UnsafeStart(); } } } diff --git a/src/libraries/System.Reflection.Metadata/src/System/Reflection/Internal/Utilities/MemoryBlock.cs b/src/libraries/System.Reflection.Metadata/src/System/Reflection/Internal/Utilities/MemoryBlock.cs index c205f5875f9c2..5931cb2043ab9 100644 --- a/src/libraries/System.Reflection.Metadata/src/System/Reflection/Internal/Utilities/MemoryBlock.cs +++ b/src/libraries/System.Reflection.Metadata/src/System/Reflection/Internal/Utilities/MemoryBlock.cs @@ -2,7 +2,6 @@ // The .NET Foundation licenses this file to you under the MIT license. using System.Diagnostics; -using System.Diagnostics.CodeAnalysis; using System.Reflection.Metadata; using System.Reflection.Metadata.Ecma335; using System.Runtime.CompilerServices; @@ -315,33 +314,23 @@ internal string PeekUtf8NullTerminated(int offset, byte[]? prefix, MetadataStrin /// If a value other than '\0' is passed we still stop at the null terminator if encountered first. /// The number of bytes read, which includes the terminator if we did not hit the end of the block. /// Length (byte count) not including terminator. - internal int GetUtf8NullTerminatedLength(int offset, out int numberOfBytesRead, char terminator = '\0') + internal int GetUtf8NullTerminatedLength(int offset, out int numberOfBytesRead, char terminator) { CheckBounds(offset, 0); Debug.Assert(terminator <= 0x7f); - byte* start = Pointer + offset; - byte* end = Pointer + Length; - byte* current = start; - - while (current < end) + ReadOnlySpan span = new ReadOnlySpan(Pointer + offset, Length - offset); + int length = terminator != '\0' ? + span.IndexOfAny((byte)0, (byte)terminator) : + span.IndexOf((byte)0); + if (length >= 0) { - byte b = *current; - if (b == 0 || b == terminator) - { - break; - } - - current++; + numberOfBytesRead = length + 1; // we also read the terminator } - - int length = (int)(current - start); - numberOfBytesRead = length; - if (current < end) + else { - // we also read the terminator - numberOfBytesRead++; + numberOfBytesRead = length = span.Length; } return length; @@ -353,22 +342,11 @@ internal int Utf8NullTerminatedOffsetOfAsciiChar(int startOffset, char asciiChar Debug.Assert(asciiChar != 0 && asciiChar <= 0x7f); - for (int i = startOffset; i < Length; i++) - { - byte b = Pointer[i]; - - if (b == 0) - { - break; - } - - if (b == asciiChar) - { - return i; - } - } - - return -1; + ReadOnlySpan span = new ReadOnlySpan(Pointer + startOffset, Length - startOffset); + int i = span.IndexOfAny((byte)asciiChar, (byte)0); + return i >= 0 && span[i] == asciiChar ? + startOffset + i : + -1; } // comparison stops at null terminator, terminator parameter, or end-of-block -- whichever comes first. @@ -545,19 +523,10 @@ internal int IndexOf(byte b, int start) internal int IndexOfUnchecked(byte b, int start) { - byte* p = Pointer + start; - byte* end = Pointer + Length; - while (p < end) - { - if (*p == b) - { - return (int)(p - Pointer); - } - - p++; - } - - return -1; + int i = new ReadOnlySpan(Pointer + start, Length - start).IndexOf(b); + return i >= 0 ? + i + start : + -1; } // same as Array.BinarySearch, but without using IComparer diff --git a/src/libraries/System.Security.Cryptography/ref/System.Security.Cryptography.cs b/src/libraries/System.Security.Cryptography/ref/System.Security.Cryptography.cs index 5be736fec669e..99e9063d8ec78 100644 --- a/src/libraries/System.Security.Cryptography/ref/System.Security.Cryptography.cs +++ b/src/libraries/System.Security.Cryptography/ref/System.Security.Cryptography.cs @@ -215,6 +215,7 @@ public void Dispose() { } protected virtual void Dispose(bool disposing) { } public virtual byte[] ExportEncryptedPkcs8PrivateKey(System.ReadOnlySpan passwordBytes, System.Security.Cryptography.PbeParameters pbeParameters) { throw null; } public virtual byte[] ExportEncryptedPkcs8PrivateKey(System.ReadOnlySpan password, System.Security.Cryptography.PbeParameters pbeParameters) { throw null; } + public string ExportEncryptedPkcs8PrivateKeyPem(System.ReadOnlySpan passwordBytes, System.Security.Cryptography.PbeParameters pbeParameters) { throw null; } public string ExportEncryptedPkcs8PrivateKeyPem(System.ReadOnlySpan password, System.Security.Cryptography.PbeParameters pbeParameters) { throw null; } public virtual byte[] ExportPkcs8PrivateKey() { throw null; } public string ExportPkcs8PrivateKeyPem() { throw null; } @@ -231,6 +232,7 @@ public virtual void ImportFromPem(System.ReadOnlySpan input) { } public virtual string ToXmlString(bool includePrivateParameters) { throw null; } public virtual bool TryExportEncryptedPkcs8PrivateKey(System.ReadOnlySpan passwordBytes, System.Security.Cryptography.PbeParameters pbeParameters, System.Span destination, out int bytesWritten) { throw null; } public virtual bool TryExportEncryptedPkcs8PrivateKey(System.ReadOnlySpan password, System.Security.Cryptography.PbeParameters pbeParameters, System.Span destination, out int bytesWritten) { throw null; } + public bool TryExportEncryptedPkcs8PrivateKeyPem(System.ReadOnlySpan passwordBytes, System.Security.Cryptography.PbeParameters pbeParameters, System.Span destination, out int charsWritten) { throw null; } public bool TryExportEncryptedPkcs8PrivateKeyPem(System.ReadOnlySpan password, System.Security.Cryptography.PbeParameters pbeParameters, System.Span destination, out int charsWritten) { throw null; } public virtual bool TryExportPkcs8PrivateKey(System.Span destination, out int bytesWritten) { throw null; } public bool TryExportPkcs8PrivateKeyPem(System.Span destination, out int charsWritten) { throw null; } diff --git a/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/AsymmetricAlgorithm.cs b/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/AsymmetricAlgorithm.cs index 6d671cd841482..04eed09e0f869 100644 --- a/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/AsymmetricAlgorithm.cs +++ b/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/AsymmetricAlgorithm.cs @@ -462,6 +462,54 @@ public unsafe string ExportEncryptedPkcs8PrivateKeyPem(ReadOnlySpan passwo } } + /// + /// Exports the current key in the PKCS#8 EncryptedPrivateKeyInfo format + /// with a byte-based password, PEM encoded. + /// + /// + /// The bytes to use as a password when encrypting the key material. + /// + /// + /// The password-based encryption (PBE) parameters to use when encrypting the key material. + /// + /// A string containing the PEM-encoded PKCS#8 EncryptedPrivateKeyInfo. + /// + /// An implementation for or + /// has not been provided. + /// + /// + /// The key could not be exported. + /// + /// + /// + /// A PEM-encoded PKCS#8 EncryptedPrivateKeyInfo will begin with + /// -----BEGIN ENCRYPTED PRIVATE KEY----- and end with + /// -----END ENCRYPTED PRIVATE KEY-----, with the base64 encoded DER + /// contents of the key between the PEM boundaries. + /// + /// + /// The PEM is encoded according to the IETF RFC 7468 "strict" + /// encoding rules. + /// + /// + public unsafe string ExportEncryptedPkcs8PrivateKeyPem(ReadOnlySpan passwordBytes, PbeParameters pbeParameters) + { + byte[] exported = ExportEncryptedPkcs8PrivateKey(passwordBytes, pbeParameters); + + // Fixed to prevent GC moves. + fixed (byte* pExported = exported) + { + try + { + return PemEncoding.WriteString(PemLabels.EncryptedPkcs8PrivateKey, exported); + } + finally + { + CryptographicOperations.ZeroMemory(exported); + } + } + } + /// /// Exports the public-key portion of the current key in the X.509 /// SubjectPublicKeyInfo format, PEM encoded. @@ -658,6 +706,68 @@ static bool Export( out charsWritten); } + /// + /// Attempts to export the current key in the PKCS#8 EncryptedPrivateKeyInfo format + /// with a byte-based password, PEM encoded. + /// + /// + /// The bytes to use as a password when encrypting the key material. + /// + /// + /// The password-based encryption (PBE) parameters to use when encrypting the key material. + /// + /// + /// The character span to receive the PEM-encoded PKCS#8 EncryptedPrivateKeyInfo data. + /// + /// + /// When this method returns, contains a value that indicates the number + /// of characters written to . This + /// parameter is treated as uninitialized. + /// + /// + /// if is big enough + /// to receive the output; otherwise, . + /// + /// + /// An implementation for + /// has not been provided. + /// + /// + /// The key could not be exported. + /// + /// + /// + /// A PEM-encoded PKCS#8 EncryptedPrivateKeyInfo will begin with + /// -----BEGIN ENCRYPTED PRIVATE KEY----- and end with + /// -----END ENCRYPTED PRIVATE KEY-----, with the base64 encoded DER + /// contents of the key between the PEM boundaries. + /// + /// + /// The PEM is encoded according to the IETF RFC 7468 "strict" + /// encoding rules. + /// + /// + public bool TryExportEncryptedPkcs8PrivateKeyPem(ReadOnlySpan passwordBytes, PbeParameters pbeParameters, Span destination, out int charsWritten) + { + static bool Export( + AsymmetricAlgorithm alg, + ReadOnlySpan passwordBytes, + PbeParameters pbeParameters, + Span destination, + out int bytesWritten) + { + return alg.TryExportEncryptedPkcs8PrivateKey(passwordBytes, pbeParameters, destination, out bytesWritten); + } + + return PemKeyHelpers.TryExportToEncryptedPem( + this, + passwordBytes, + pbeParameters, + Export, + destination, + out charsWritten); + } + private delegate bool TryExportPbe( ReadOnlySpan password, PbeParameters pbeParameters, diff --git a/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/PemKeyHelpers.cs b/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/PemKeyHelpers.cs index b5324601c5bf5..e8f1cd95c61f5 100644 --- a/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/PemKeyHelpers.cs +++ b/src/libraries/System.Security.Cryptography/src/System/Security/Cryptography/PemKeyHelpers.cs @@ -10,18 +10,18 @@ namespace System.Security.Cryptography internal static class PemKeyHelpers { public delegate bool TryExportKeyAction(T arg, Span destination, out int bytesWritten); - public delegate bool TryExportEncryptedKeyAction( + public delegate bool TryExportEncryptedKeyAction( T arg, - ReadOnlySpan password, + ReadOnlySpan password, PbeParameters pbeParameters, Span destination, out int bytesWritten); - public static unsafe bool TryExportToEncryptedPem( + public static unsafe bool TryExportToEncryptedPem( T arg, - ReadOnlySpan password, + ReadOnlySpan password, PbeParameters pbeParameters, - TryExportEncryptedKeyAction exporter, + TryExportEncryptedKeyAction exporter, Span destination, out int charsWritten) { diff --git a/src/libraries/System.Security.Cryptography/tests/AsymmetricAlgorithmTests.cs b/src/libraries/System.Security.Cryptography/tests/AsymmetricAlgorithmTests.cs index ca40479e6c258..54a6fabb3197d 100644 --- a/src/libraries/System.Security.Cryptography/tests/AsymmetricAlgorithmTests.cs +++ b/src/libraries/System.Security.Cryptography/tests/AsymmetricAlgorithmTests.cs @@ -224,7 +224,7 @@ public static void ExportPem_ExportSubjectPublicKeyInfoPem() "-----BEGIN PUBLIC KEY-----\n" + "cGVubnk=\n" + "-----END PUBLIC KEY-----"; - + using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) { alg.ExportSubjectPublicKeyInfoImpl = static () => new byte[] { 0x70, 0x65, 0x6e, 0x6e, 0x79 }; @@ -344,7 +344,7 @@ public static void ExportPem_ExportPkcs8PrivateKeyPem() "-----END PRIVATE KEY-----"; byte[] exportedBytes = new byte[] { 0x70, 0x65, 0x6e, 0x6e, 0x79 }; - + using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) { alg.ExportPkcs8PrivateKeyPemImpl = () => exportedBytes; @@ -358,7 +358,7 @@ public static void ExportPem_ExportPkcs8PrivateKeyPem() } [Fact] - public static void ExportPem_ExportEncryptedPkcs8PrivateKeyPem() + public static void ExportPem_CharPassword_ExportEncryptedPkcs8PrivateKeyPem() { string expectedPem = "-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + @@ -381,10 +381,10 @@ byte[] ExportEncryptedPkcs8PrivateKey(ReadOnlySpan password, PbeParameters return exportedBytes; } - + using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) { - alg.ExportEncryptedPkcs8PrivateKeyImpl = ExportEncryptedPkcs8PrivateKey; + alg.ExportEncryptedPkcs8PrivateKeyCharImpl = ExportEncryptedPkcs8PrivateKey; string pem = alg.ExportEncryptedPkcs8PrivateKeyPem(expectedPassword, expectedPbeParameters); Assert.Equal(expectedPem, pem); @@ -395,7 +395,44 @@ byte[] ExportEncryptedPkcs8PrivateKey(ReadOnlySpan password, PbeParameters } [Fact] - public static void ExportPem_TryExportEncryptedPkcs8PrivateKeyPem() + public static void ExportPem_BytePassword_ExportEncryptedPkcs8PrivateKeyPem() + { + string expectedPem = + "-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + + "cGVubnk=\n" + + "-----END ENCRYPTED PRIVATE KEY-----"; + + byte[] exportedBytes = new byte[] { 0x70, 0x65, 0x6e, 0x6e, 0x79 }; + byte[] expectedPassword = new byte[] { 0x01, 0x02, 0x0FF }; + PbeParameters expectedPbeParameters = new PbeParameters( + PbeEncryptionAlgorithm.Aes256Cbc, + HashAlgorithmName.SHA384, + RandomNumberGenerator.GetInt32(0, 100_000)); + + byte[] ExportEncryptedPkcs8PrivateKey(ReadOnlySpan passwordBytes, PbeParameters pbeParameters) + { + Assert.Equal(expectedPbeParameters.EncryptionAlgorithm, pbeParameters.EncryptionAlgorithm); + Assert.Equal(expectedPbeParameters.HashAlgorithm, pbeParameters.HashAlgorithm); + Assert.Equal(expectedPbeParameters.IterationCount, pbeParameters.IterationCount); + AssertExtensions.SequenceEqual(expectedPassword, passwordBytes); + + return exportedBytes; + } + + using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) + { + alg.ExportEncryptedPkcs8PrivateKeyByteImpl = ExportEncryptedPkcs8PrivateKey; + string pem = alg.ExportEncryptedPkcs8PrivateKeyPem(expectedPassword, expectedPbeParameters); + Assert.Equal(expectedPem, pem); + + // Test that the PEM export cleared the PKCS8 bytes from memory + // that were returned from ExportEncryptedPkcs8PrivateKey. + AssertExtensions.FilledWith((byte)0, exportedBytes); + } + } + + [Fact] + public static void ExportPem_CharPassword_TryExportEncryptedPkcs8PrivateKeyPem() { string expectedPem = "-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + @@ -427,7 +464,73 @@ bool TryExportEncryptedPkcs8PrivateKey( using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) { - alg.TryExportEncryptedPkcs8PrivateKeyImpl = TryExportEncryptedPkcs8PrivateKey; + alg.TryExportEncryptedPkcs8PrivateKeyCharImpl = TryExportEncryptedPkcs8PrivateKey; + int written; + bool result; + char[] buffer; + + // buffer not enough + buffer = new char[expectedPem.Length - 1]; + result = alg.TryExportEncryptedPkcs8PrivateKeyPem(expectedPassword, expectedPbeParameters, buffer, out written); + Assert.False(result, nameof(alg.TryExportEncryptedPkcs8PrivateKeyPem)); + Assert.Equal(0, written); + + // buffer just enough + buffer = new char[expectedPem.Length]; + result = alg.TryExportEncryptedPkcs8PrivateKeyPem(expectedPassword, expectedPbeParameters, buffer, out written); + Assert.True(result, nameof(alg.TryExportEncryptedPkcs8PrivateKeyPem)); + Assert.Equal(expectedPem.Length, written); + Assert.Equal(expectedPem, new string(buffer)); + + // buffer more than enough + buffer = new char[expectedPem.Length + 20]; + buffer.AsSpan().Fill('!'); + Span bufferSpan = buffer.AsSpan(10); + result = alg.TryExportEncryptedPkcs8PrivateKeyPem(expectedPassword, expectedPbeParameters, bufferSpan, out written); + Assert.True(result, nameof(alg.TryExportEncryptedPkcs8PrivateKeyPem)); + Assert.Equal(expectedPem.Length, written); + Assert.Equal(expectedPem, new string(bufferSpan.Slice(0, written))); + + // Ensure padding has not been touched. + AssertExtensions.FilledWith('!', buffer[0..10]); + AssertExtensions.FilledWith('!', buffer[^10..]); + } + } + + [Fact] + public static void ExportPem_BytePassword_TryExportEncryptedPkcs8PrivateKeyPem() + { + string expectedPem = + "-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + + "cGVubnk=\n" + + "-----END ENCRYPTED PRIVATE KEY-----"; + + byte[] exportedBytes = new byte[] { 0x70, 0x65, 0x6e, 0x6e, 0x79 }; + byte[] expectedPassword = new byte[] { 0x01, 0x02, 0x03 }; + PbeParameters expectedPbeParameters = new PbeParameters( + PbeEncryptionAlgorithm.Aes256Cbc, + HashAlgorithmName.SHA384, + RandomNumberGenerator.GetInt32(0, 100_000)); + + bool TryExportEncryptedPkcs8PrivateKey( + ReadOnlySpan passwordBytes, + PbeParameters pbeParameters, + Span destination, + out int bytesWritten) + { + Assert.Equal(expectedPbeParameters.EncryptionAlgorithm, pbeParameters.EncryptionAlgorithm); + Assert.Equal(expectedPbeParameters.HashAlgorithm, pbeParameters.HashAlgorithm); + Assert.Equal(expectedPbeParameters.IterationCount, pbeParameters.IterationCount); + AssertExtensions.SequenceEqual(expectedPassword, passwordBytes); + + exportedBytes.AsSpan().CopyTo(destination); + bytesWritten = exportedBytes.Length; + return true; + } + + using (StubAsymmetricAlgorithm alg = new StubAsymmetricAlgorithm()) + { + alg.TryExportEncryptedPkcs8PrivateKeyByteImpl = TryExportEncryptedPkcs8PrivateKey; int written; bool result; char[] buffer; @@ -464,7 +567,7 @@ private class StubAsymmetricAlgorithm : AsymmetricAlgorithm { public delegate byte[] ExportSubjectPublicKeyInfoFunc(); public delegate byte[] ExportPkcs8PrivateKeyPemFunc(); - public delegate byte[] ExportEncryptedPkcs8PrivateKeyFunc(ReadOnlySpan password, PbeParameters pbeParameters); + public delegate byte[] ExportEncryptedPkcs8PrivateKeyFunc(ReadOnlySpan password, PbeParameters pbeParameters); public delegate bool TryExportSubjectPublicKeyInfoFunc(Span destination, out int bytesWritten); public delegate bool TryExportPkcs8PrivateKeyFunc(Span destination, out int bytesWritten); public delegate void ImportSubjectPublicKeyInfoFunc(ReadOnlySpan source, out int bytesRead); @@ -473,8 +576,8 @@ public delegate void ImportEncryptedPkcs8PrivateKeyFunc( ReadOnlySpan password, ReadOnlySpan source, out int bytesRead); - public delegate bool TryExportEncryptedPkcs8PrivateKeyFunc( - ReadOnlySpan password, + public delegate bool TryExportEncryptedPkcs8PrivateKeyFunc( + ReadOnlySpan password, PbeParameters pbeParameters, Span destination, out int bytesWritten); @@ -487,8 +590,10 @@ public delegate bool TryExportEncryptedPkcs8PrivateKeyFunc( public TryExportSubjectPublicKeyInfoFunc TryExportSubjectPublicKeyInfoImpl { get; set; } public ExportPkcs8PrivateKeyPemFunc ExportPkcs8PrivateKeyPemImpl { get; set; } public TryExportPkcs8PrivateKeyFunc TryExportPkcs8PrivateKeyImpl { get; set; } - public ExportEncryptedPkcs8PrivateKeyFunc ExportEncryptedPkcs8PrivateKeyImpl { get; set; } - public TryExportEncryptedPkcs8PrivateKeyFunc TryExportEncryptedPkcs8PrivateKeyImpl { get; set; } + public ExportEncryptedPkcs8PrivateKeyFunc ExportEncryptedPkcs8PrivateKeyCharImpl { get; set; } + public TryExportEncryptedPkcs8PrivateKeyFunc TryExportEncryptedPkcs8PrivateKeyCharImpl { get; set; } + public ExportEncryptedPkcs8PrivateKeyFunc ExportEncryptedPkcs8PrivateKeyByteImpl { get; set; } + public TryExportEncryptedPkcs8PrivateKeyFunc TryExportEncryptedPkcs8PrivateKeyByteImpl { get; set; } public override void ImportSubjectPublicKeyInfo(ReadOnlySpan source, out int bytesRead) => ImportSubjectPublicKeyInfoImpl(source, out bytesRead); @@ -501,7 +606,12 @@ public override void ImportPkcs8PrivateKey(ReadOnlySpan source, out int by public override byte[] ExportEncryptedPkcs8PrivateKey(ReadOnlySpan password, PbeParameters pbeParameters) { - return ExportEncryptedPkcs8PrivateKeyImpl(password, pbeParameters); + return ExportEncryptedPkcs8PrivateKeyCharImpl(password, pbeParameters); + } + + public override byte[] ExportEncryptedPkcs8PrivateKey(ReadOnlySpan passwordBytes, PbeParameters pbeParameters) + { + return ExportEncryptedPkcs8PrivateKeyByteImpl(passwordBytes, pbeParameters); } public override void ImportEncryptedPkcs8PrivateKey( @@ -536,7 +646,16 @@ public override bool TryExportEncryptedPkcs8PrivateKey( Span destination, out int bytesWritten) { - return TryExportEncryptedPkcs8PrivateKeyImpl(password, pbeParameters, destination, out bytesWritten); + return TryExportEncryptedPkcs8PrivateKeyCharImpl(password, pbeParameters, destination, out bytesWritten); + } + + public override bool TryExportEncryptedPkcs8PrivateKey( + ReadOnlySpan passwordBytes, + PbeParameters pbeParameters, + Span destination, + out int bytesWritten) + { + return TryExportEncryptedPkcs8PrivateKeyByteImpl(passwordBytes, pbeParameters, destination, out bytesWritten); } } } diff --git a/src/libraries/sendtohelix-wasm.targets b/src/libraries/sendtohelix-wasm.targets index acf037e988b27..1d0008ab7799f 100644 --- a/src/libraries/sendtohelix-wasm.targets +++ b/src/libraries/sendtohelix-wasm.targets @@ -30,7 +30,7 @@ $(Scenario)- true - true + true @@ -53,7 +53,7 @@ true true - true + true true true @@ -76,7 +76,7 @@ - + @@ -89,7 +89,7 @@ - + @@ -196,8 +196,8 @@ <_WasmWorkItem Include="$(TestArchiveRoot)browseronly/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnBrowser'" /> <_WasmWorkItem Include="$(TestArchiveRoot)browserornodejs/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnBrowser'" /> - <_WasmWorkItem Include="$(TestArchiveRoot)browserornodejs/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnNodeJs'" /> - <_WasmWorkItem Include="$(TestArchiveRoot)nodejsonly/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnNodeJs'" /> + <_WasmWorkItem Include="$(TestArchiveRoot)browserornodejs/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnNodeJS'" /> + <_WasmWorkItem Include="$(TestArchiveRoot)nodejsonly/**/*.zip" Condition="'$(Scenario)' == 'WasmTestOnNodeJS'" /> %(Identity) @@ -209,7 +209,7 @@ <_WasmSampleZipFile Condition="'$(Scenario)' == 'normal' or '$(Scenario)' == ''" Include="$(TestArchiveRoot)runonly/**/*.Console.V8.*.Sample.zip" /> - <_WasmSampleZipFile Condition="'$(Scenario)' == 'WasmTestOnNodeJs'" Include="$(TestArchiveRoot)runonly/**/*.Console.Node.*.Sample.zip" /> + <_WasmSampleZipFile Condition="'$(Scenario)' == 'WasmTestOnNodeJS'" Include="$(TestArchiveRoot)runonly/**/*.Console.Node.*.Sample.zip" /> <_WasmSampleZipFile Condition="'$(Scenario)' == 'WasmTestOnBrowser'" Include="$(TestArchiveRoot)runonly/**/*.Browser.*.Sample.zip" /> diff --git a/src/mono/mono/mini/llvm-intrinsics.h b/src/mono/mono/mini/llvm-intrinsics.h index 9a585e7f3d38e..ce6e4ef7ac5dd 100644 --- a/src/mono/mono/mini/llvm-intrinsics.h +++ b/src/mono/mono/mini/llvm-intrinsics.h @@ -262,6 +262,7 @@ INTRINS_OVR(WASM_BITMASK_V8, wasm_bitmask, Wasm, sse_i1_t) INTRINS_OVR(WASM_BITMASK_V4, wasm_bitmask, Wasm, sse_i4_t) INTRINS_OVR(WASM_BITMASK_V2, wasm_bitmask, Wasm, sse_i8_t) INTRINS(WASM_SHUFFLE, wasm_shuffle, Wasm) +INTRINS(WASM_SWIZZLE, wasm_swizzle, Wasm) #endif #if defined(TARGET_ARM64) INTRINS_OVR(BITREVERSE_I32, bitreverse, Generic, LLVMInt32Type ()) diff --git a/src/mono/mono/mini/mini-llvm.c b/src/mono/mono/mini/mini-llvm.c index d6fb90c6c6fad..30f0e88a58b07 100644 --- a/src/mono/mono/mini/mini-llvm.c +++ b/src/mono/mono/mini/mini-llvm.c @@ -9726,6 +9726,12 @@ MONO_RESTORE_WARNING } case OP_WASM_SIMD_SWIZZLE: { int nelems = LLVMGetVectorSize (LLVMTypeOf (lhs)); + if (nelems == 16) { + LLVMValueRef args [] = { lhs, rhs }; + values [ins->dreg] = call_intrins (ctx, INTRINS_WASM_SWIZZLE, args, ""); + break; + } + LLVMValueRef indexes [16]; for (int i = 0; i < nelems; ++i) indexes [i] = LLVMBuildExtractElement (builder, rhs, const_int32 (i), ""); diff --git a/src/mono/mono/mini/mini.c b/src/mono/mono/mini/mini.c index 168d3d3a3c093..46586039a1a67 100644 --- a/src/mono/mono/mini/mini.c +++ b/src/mono/mono/mini/mini.c @@ -3027,7 +3027,7 @@ static gboolean is_simd_supported (MonoCompile *cfg) { #ifdef DISABLE_SIMD - return FALSE; + return FALSE; #endif // FIXME: Clean this up #ifdef TARGET_WASM @@ -3036,6 +3036,10 @@ is_simd_supported (MonoCompile *cfg) #else if (cfg->llvm_only) return FALSE; + // FIXME We disable simd intrinsics when mixing between llvmaot and jit since the llvm backend could + // see that certain simd operations are supported while with jit we fail to emit correct code. + if (cfg->compile_aot && cfg->compile_llvm && !cfg->full_aot) + return FALSE; #endif return TRUE; } diff --git a/src/mono/mono/mini/simd-intrinsics.c b/src/mono/mono/mini/simd-intrinsics.c index 3c045fae1b7bc..2640109e05181 100644 --- a/src/mono/mono/mini/simd-intrinsics.c +++ b/src/mono/mono/mini/simd-intrinsics.c @@ -719,12 +719,6 @@ emit_hardware_intrinsics ( goto support_probe_complete; id = info->id; -#ifdef TARGET_ARM64 - if (!(cfg->compile_aot && cfg->full_aot && !cfg->interp) && !intrin_group->jit_supported) { - goto support_probe_complete; - } -#endif - // Hardware intrinsics are LLVM-only. if (!COMPILE_LLVM (cfg) && !intrin_group->jit_supported) goto support_probe_complete; diff --git a/src/mono/nuget/Microsoft.NET.Workload.Mono.Toolchain.Manifest/WorkloadManifest.targets.in b/src/mono/nuget/Microsoft.NET.Workload.Mono.Toolchain.Manifest/WorkloadManifest.targets.in index f9b7e05164a03..2bbf37d3d038f 100644 --- a/src/mono/nuget/Microsoft.NET.Workload.Mono.Toolchain.Manifest/WorkloadManifest.targets.in +++ b/src/mono/nuget/Microsoft.NET.Workload.Mono.Toolchain.Manifest/WorkloadManifest.targets.in @@ -9,7 +9,7 @@ - true + true $(WasmNativeWorkload) diff --git a/src/mono/sample/wasm/browser-bench/BenchTask.cs b/src/mono/sample/wasm/browser-bench/BenchTask.cs index 6f5314e6e999f..d9da4502b2d16 100644 --- a/src/mono/sample/wasm/browser-bench/BenchTask.cs +++ b/src/mono/sample/wasm/browser-bench/BenchTask.cs @@ -14,9 +14,11 @@ public abstract class BenchTask public virtual bool BrowserOnly => false; - public async Task RunBatch(List results, int measurementIdx, int milliseconds = 5000) + public async Task RunBatch(List results, int measurementIdx, int milliseconds = -1) { var measurement = Measurements[measurementIdx]; + if (milliseconds == -1) + milliseconds = measurement.RunLength; await measurement.BeforeBatch(); var result = await measurement.RunBatch(this, milliseconds); results.Add(result); @@ -44,8 +46,9 @@ public abstract class Measurement protected int currentStep = 0; public abstract string Name { get; } - public virtual int InitialSamples { get { return 10; } } - public virtual int NumberOfRuns { get { return 5; } } + public virtual int InitialSamples => 10; + public virtual int NumberOfRuns => 5; + public virtual int RunLength => 5000; public virtual Task BeforeBatch() { return Task.CompletedTask; } diff --git a/src/mono/sample/wasm/browser-bench/Vector.cs b/src/mono/sample/wasm/browser-bench/Vector.cs index 4a13e41d6fb4e..5a0e6bdf5eea6 100644 --- a/src/mono/sample/wasm/browser-bench/Vector.cs +++ b/src/mono/sample/wasm/browser-bench/Vector.cs @@ -16,6 +16,12 @@ public VectorTask() new Create(), new Add(), new Multiply(), + new DotInt(), + new DotULong(), + new DotFloat(), + new DotDouble(), + new SumUInt(), + new SumDouble(), }; } @@ -30,6 +36,7 @@ public override Measurement[] Measurements public abstract class VectorMeasurement : BenchTask.Measurement { public override int InitialSamples => 100000; + public override int RunLength => 500; } class Create : VectorMeasurement @@ -70,5 +77,113 @@ public Multiply() public override void RunStep() => vector3 = vector1 * vector2; } + + class DotInt : VectorMeasurement + { + Vector128 vector1, vector2; + float result; + + public override string Name => "Dot product int"; + + public DotInt() + { + vector1 = Vector128.Create(12, 34, 56, 78); + vector2 = Vector128.Create(23, 45, 67, 89); + } + + public override void RunStep() + { + result = Vector128.Dot(vector1, vector2); + } + } + + class DotULong : VectorMeasurement + { + Vector128 vector1, vector2; + float result; + + public override string Name => "Dot product ulong"; + + public DotULong() + { + vector1 = Vector128.Create(12ul, 34); + vector2 = Vector128.Create(23ul, 45); + } + + public override void RunStep() + { + result = Vector128.Dot(vector1, vector2); + } + } + + class DotFloat : VectorMeasurement + { + Vector128 vector1, vector2; + float result; + + public override string Name => "Dot product float"; + + public DotFloat() + { + vector1 = Vector128.Create(12f, 34, 56, 78); + vector2 = Vector128.Create(23f, 45, 67, 89); + } + + public override void RunStep() { + result = Vector128.Dot(vector1, vector2); + } + } + + class DotDouble : VectorMeasurement + { + Vector128 vector1, vector2; + double result; + + public override string Name => "Dot product double"; + + public DotDouble() + { + vector1 = Vector128.Create(12d, 34); + vector2 = Vector128.Create(23d, 45); + } + + public override void RunStep() { + result = Vector128.Dot(vector1, vector2); + } + } + + class SumUInt : VectorMeasurement + { + Vector128 vector1; + uint result; + + public override string Name => "Sum uint"; + + public SumUInt() + { + vector1 = Vector128.Create(12u, 34, 56, 78); + } + + public override void RunStep() { + result = Vector128.Sum(vector1); + } + } + + class SumDouble : VectorMeasurement + { + Vector128 vector1; + double result; + + public override string Name => "Sum double"; + + public SumDouble() + { + vector1 = Vector128.Create(12d, 34); + } + + public override void RunStep() { + result = Vector128.Sum(vector1); + } + } } } diff --git a/src/mono/wasm/Wasm.Build.Tests/BlazorWasmBuildPublishTests.cs b/src/mono/wasm/Wasm.Build.Tests/BlazorWasmBuildPublishTests.cs index f034d2fcc40a8..8123c9cfc1c9e 100644 --- a/src/mono/wasm/Wasm.Build.Tests/BlazorWasmBuildPublishTests.cs +++ b/src/mono/wasm/Wasm.Build.Tests/BlazorWasmBuildPublishTests.cs @@ -21,7 +21,7 @@ public BlazorWasmBuildPublishTests(ITestOutputHelper output, SharedBuildPerTestC _enablePerTestCleanup = true; } - [Theory] + [Theory, TestCategory("no-workload")] [InlineData("Debug")] [InlineData("Release")] public void DefaultTemplate_WithoutWorkload(string config) diff --git a/src/mono/wasm/Wasm.Build.Tests/BuildTestBase.cs b/src/mono/wasm/Wasm.Build.Tests/BuildTestBase.cs index 9e3b2e5b0b7ff..bba739eee756a 100644 --- a/src/mono/wasm/Wasm.Build.Tests/BuildTestBase.cs +++ b/src/mono/wasm/Wasm.Build.Tests/BuildTestBase.cs @@ -136,6 +136,7 @@ protected string RunAndTestWasmApp(BuildArgs buildArgs, Dictionary? envVars = null, string targetFramework = DefaultTargetFramework, string? extraXHarnessMonoArgs = null, + string? extraXHarnessArgs = null, string jsRelativePath = "test-main.js") { buildDir ??= _projectDir; @@ -158,13 +159,15 @@ protected string RunAndTestWasmApp(BuildArgs buildArgs, throw new InvalidOperationException("Running tests with V8 on windows isn't supported"); // Use wasm-console.log to get the xharness output for non-browser cases - (string testCommand, string extraXHarnessArgs, bool useWasmConsoleOutput) = host switch + (string testCommand, string xharnessArgs, bool useWasmConsoleOutput) = host switch { RunHost.V8 => ("wasm test", $"--js-file={jsRelativePath} --engine=V8 -v trace", true), RunHost.NodeJS => ("wasm test", $"--js-file={jsRelativePath} --engine=NodeJS -v trace", true), _ => ("wasm test-browser", $"-v trace -b {host} --web-server-use-cop", false) }; + extraXHarnessArgs += " " + xharnessArgs; + string testLogPath = Path.Combine(_logPath, host.ToString()); string output = RunWithXHarness( testCommand, @@ -213,6 +216,16 @@ protected static string RunWithXHarness(string testCommand, string testLogPath, args.Append($" --expected-exit-code={expectedAppExitCode}"); args.Append($" {extraXHarnessArgs ?? string.Empty}"); + if (File.Exists("/.dockerenv")) + args.Append(" --browser-arg=--no-sandbox"); + + if (!string.IsNullOrEmpty(EnvironmentVariables.BrowserPathForTests)) + { + if (!File.Exists(EnvironmentVariables.BrowserPathForTests)) + throw new Exception($"Cannot find BROWSER_PATH_FOR_TESTS={EnvironmentVariables.BrowserPathForTests}"); + args.Append($" --browser-path=\"{EnvironmentVariables.BrowserPathForTests}\""); + } + args.Append(" -- "); if (extraXHarnessMonoArgs != null) { @@ -325,7 +338,8 @@ protected static BuildArgs ExpandBuildArgs(BuildArgs buildArgs, string extraProp { _testOutput.WriteLine ($"Using existing build found at {product.ProjectDir}, with build log at {product.LogFile}"); - Assert.True(product.Result, $"Found existing build at {product.ProjectDir}, but it had failed. Check build log at {product.LogFile}"); + if (!product.Result) + throw new XunitException($"Found existing build at {product.ProjectDir}, but it had failed. Check build log at {product.LogFile}"); _projectDir = product.ProjectDir; // use this test's id for the run logs @@ -359,7 +373,6 @@ protected static BuildArgs ExpandBuildArgs(BuildArgs buildArgs, string extraProp string logFilePath = Path.Combine(_logPath, $"{buildArgs.ProjectName}{logFileSuffix}.binlog"); _testOutput.WriteLine($"-------- Building ---------"); _testOutput.WriteLine($"Binlog path: {logFilePath}"); - _testOutput.WriteLine($"Binlog path: {logFilePath}"); sb.Append($" /bl:\"{logFilePath}\" /nologo"); sb.Append($" /v:{options.Verbosity ?? "minimal"}"); if (buildArgs.ExtraBuildArgs != null) @@ -635,10 +648,10 @@ protected static void AssertFile(string file0, string file1, string? label=null, protected (int exitCode, string buildOutput) AssertBuild(string args, string label="build", bool expectSuccess=true, IDictionary? envVars=null, int? timeoutMs=null) { var result = RunProcess(s_buildEnv.DotNet, _testOutput, args, workingDir: _projectDir, label: label, envVars: envVars, timeoutMs: timeoutMs ?? s_defaultPerTestTimeoutMs); - if (expectSuccess) - Assert.True(0 == result.exitCode, $"Build process exited with non-zero exit code: {result.exitCode}"); - else - Assert.True(0 != result.exitCode, $"Build should have failed, but it didn't. Process exited with exitCode : {result.exitCode}"); + if (expectSuccess && result.exitCode != 0) + throw new XunitException($"Build process exited with non-zero exit code: {result.exitCode}"); + if (!expectSuccess && result.exitCode == 0) + throw new XunitException($"Build should have failed, but it didn't. Process exited with exitCode : {result.exitCode}"); return result; } diff --git a/src/mono/wasm/Wasm.Build.Tests/EnvironmentVariables.cs b/src/mono/wasm/Wasm.Build.Tests/EnvironmentVariables.cs index a243979756684..ff3d5d565ae54 100644 --- a/src/mono/wasm/Wasm.Build.Tests/EnvironmentVariables.cs +++ b/src/mono/wasm/Wasm.Build.Tests/EnvironmentVariables.cs @@ -16,5 +16,6 @@ internal static class EnvironmentVariables internal static readonly string? TestLogPath = Environment.GetEnvironmentVariable("TEST_LOG_PATH"); internal static readonly string? SkipProjectCleanup = Environment.GetEnvironmentVariable("SKIP_PROJECT_CLEANUP"); internal static readonly string? XHarnessCliPath = Environment.GetEnvironmentVariable("XHARNESS_CLI_PATH"); + internal static readonly string? BrowserPathForTests = Environment.GetEnvironmentVariable("BROWSER_PATH_FOR_TESTS"); } } diff --git a/src/mono/wasm/Wasm.Build.Tests/Wasm.Build.Tests.csproj b/src/mono/wasm/Wasm.Build.Tests/Wasm.Build.Tests.csproj index a56ba59191d84..45116c2db286b 100644 --- a/src/mono/wasm/Wasm.Build.Tests/Wasm.Build.Tests.csproj +++ b/src/mono/wasm/Wasm.Build.Tests/Wasm.Build.Tests.csproj @@ -56,7 +56,7 @@ - + <_XUnitTraitArg Condition="'$(TestUsingWorkloads)' == 'true'">-notrait category=no-workload <_XUnitTraitArg Condition="'$(TestUsingWorkloads)' != 'true'">-trait category=no-workload diff --git a/src/mono/wasm/Wasm.Build.Tests/WasmSIMDTests.cs b/src/mono/wasm/Wasm.Build.Tests/WasmSIMDTests.cs index 8c5c397b82c3d..26aa0a7e62ce4 100644 --- a/src/mono/wasm/Wasm.Build.Tests/WasmSIMDTests.cs +++ b/src/mono/wasm/Wasm.Build.Tests/WasmSIMDTests.cs @@ -1,6 +1,7 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.IO; using Xunit; using Xunit.Abstractions; @@ -16,25 +17,114 @@ public WasmSIMDTests(ITestOutputHelper output, SharedBuildPerTestClassFixture bu } [Theory] - [MemberData(nameof(MainMethodTestData), parameters: new object[] { /*aot*/ true, RunHost.All })] - public void BuildWithSIMD(BuildArgs buildArgs, RunHost host, string id) - => TestMain("main_simd_aot", - @" - using System; - using System.Runtime.Intrinsics; - - public class TestClass { - public static int Main() - { - var v1 = Vector128.Create(0x12345678); - var v2 = Vector128.Create(0x23456789); - var v3 = v1*v2; - Console.WriteLine(v3); - Console.WriteLine(""Hello, World!""); - - return 42; - } - }", - buildArgs, host, id, extraProperties: "true"); + [MemberData(nameof(MainMethodTestData), parameters: new object[] { /*aot*/ false, RunHost.All })] + public void BuildWithSIMD_NoAOT_ShouldRelink(BuildArgs buildArgs, RunHost host, string id) + { + string projectName = $"sim_with_workload_no_aot"; + buildArgs = buildArgs with { ProjectName = projectName }; + buildArgs = ExpandBuildArgs(buildArgs, "true"); + + (_, string output) = BuildProject(buildArgs, + id: id, + new BuildProjectOptions( + InitProject: () => File.WriteAllText(Path.Combine(_projectDir!, "Program.cs"), s_simdProgramText), + Publish: false, + DotnetWasmFromRuntimePack: false)); + + if (!_buildContext.TryGetBuildFor(buildArgs, out _)) + { + // Check if this is not a cached build + Assert.Contains("Compiling native assets with excc", output); + } + + RunAndTestWasmApp(buildArgs, + extraXHarnessArgs: host == RunHost.NodeJS ? "--engine-arg=--experimental-wasm-simd" : "", + expectedExitCode: 42, + test: output => + { + Assert.Contains("<-2094756296, -2094756296, -2094756296, -2094756296>", output); + Assert.Contains("Hello, World!", output); + }, host: host, id: id); + } + + [Theory] + // https://github.com/dotnet/runtime/issues/75044 - disabled for V8, and NodeJS + //[MemberData(nameof(MainMethodTestData), parameters: new object[] { /*aot*/ true, RunHost.All })] + [MemberData(nameof(MainMethodTestData), parameters: new object[] { /*aot*/ true, RunHost.Chrome })] + [MemberData(nameof(MainMethodTestData), parameters: new object[] { /*aot*/ false, RunHost.All })] + public void PublishWithSIMD_AOT(BuildArgs buildArgs, RunHost host, string id) + { + string projectName = $"sim_with_workload_aot"; + buildArgs = buildArgs with { ProjectName = projectName }; + buildArgs = ExpandBuildArgs(buildArgs, "true"); + + BuildProject(buildArgs, + id: id, + new BuildProjectOptions( + InitProject: () => File.WriteAllText(Path.Combine(_projectDir!, "Program.cs"), s_simdProgramText), + DotnetWasmFromRuntimePack: false)); + + RunAndTestWasmApp(buildArgs, + extraXHarnessArgs: host == RunHost.NodeJS ? "--engine-arg=--experimental-wasm-simd" : "", + expectedExitCode: 42, + test: output => + { + Assert.Contains("<-2094756296, -2094756296, -2094756296, -2094756296>", output); + Assert.Contains("Hello, World!", output); + }, host: host, id: id); + } + + [Theory, TestCategory("no-workload")] + [InlineData("Debug", /*aot*/true, /*publish*/true)] + [InlineData("Debug", /*aot*/false, /*publish*/false)] + [InlineData("Debug", /*aot*/false, /*publish*/true)] + [InlineData("Release", /*aot*/true, /*publish*/true)] + [InlineData("Release", /*aot*/false, /*publish*/false)] + [InlineData("Release", /*aot*/false, /*publish*/true)] + public void BuildWithSIMDNeedsWorkload(string config, bool aot, bool publish) + { + string id = Path.GetRandomFileName(); + string projectName = $"simd_no_workload_{config}_aot_{aot}"; + BuildArgs buildArgs = new + ( + ProjectName: projectName, + Config: config, + AOT: aot, + ProjectFileContents: "placeholder", + ExtraBuildArgs: string.Empty + ); + + string extraProperties = """ + browser-wasm + true + """; + buildArgs = ExpandBuildArgs(buildArgs, extraProperties); + + (_, string output) = BuildProject(buildArgs, + id: id, + new BuildProjectOptions( + InitProject: () => File.WriteAllText(Path.Combine(_projectDir!, "Program.cs"), s_simdProgramText), + Publish: publish, + ExpectSuccess: false, + UseCache: false)); + Assert.Contains("following workloads must be installed: wasm-tools", output); + } + + private static string s_simdProgramText = @" + using System; + using System.Runtime.Intrinsics; + + public class TestClass { + public static int Main() + { + var v1 = Vector128.Create(0x12345678); + var v2 = Vector128.Create(0x23456789); + var v3 = v1*v2; + Console.WriteLine(v3); + Console.WriteLine(""Hello, World!""); + + return 42; + } + }"; } } diff --git a/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.cmd b/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.cmd index d3c47af35c02f..464d95e36c7ea 100644 --- a/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.cmd +++ b/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.cmd @@ -34,7 +34,7 @@ if [%XHARNESS_COMMAND%] == [] ( if /I [%XHARNESS_COMMAND%] == [test] ( if [%JS_ENGINE%] == [] ( - if /I [%SCENARIO%] == [WasmTestOnNodeJs] ( + if /I [%SCENARIO%] == [WasmTestOnNodeJS] ( set "JS_ENGINE=--engine^=NodeJS" ) else ( set "JS_ENGINE=--engine^=V8" diff --git a/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.sh b/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.sh index 0c3be485422f9..67ffeabba709e 100644 --- a/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.sh +++ b/src/mono/wasm/Wasm.Build.Tests/data/RunScriptTemplate.sh @@ -33,7 +33,7 @@ fi if [[ "$XHARNESS_COMMAND" == "test" ]]; then if [[ -z "$JS_ENGINE" ]]; then - if [[ "$SCENARIO" == "WasmTestOnNodeJs" || "$SCENARIO" == "wasmtestonnodejs" ]]; then + if [[ "$SCENARIO" == "WasmTestOnNodeJS" || "$SCENARIO" == "wasmtestonnodejs" ]]; then JS_ENGINE="--engine=NodeJS" else JS_ENGINE="--engine=V8" diff --git a/src/mono/wasm/build/WasmApp.Native.targets b/src/mono/wasm/build/WasmApp.Native.targets index dfe3a169d5604..abd8cd6aef3ab 100644 --- a/src/mono/wasm/build/WasmApp.Native.targets +++ b/src/mono/wasm/build/WasmApp.Native.targets @@ -108,6 +108,8 @@ true + true + true false @@ -118,6 +120,7 @@ true true + true false @@ -193,9 +196,9 @@ <_EmccCommonFlags Include="-s EXPORT_ES6=1" /> <_EmccCommonFlags Include="-g" Condition="'$(WasmNativeStrip)' == 'false'" /> <_EmccCommonFlags Include="-v" Condition="'$(EmccVerbose)' != 'false'" /> - <_EmccCommonFlags Include="-s DISABLE_EXCEPTION_CATCHING=0" Condition="'$(WasmExceptionHandling)' == 'false'" /> - <_EmccCommonFlags Include="-fwasm-exceptions" Condition="'$(WasmExceptionHandling)' == 'true'" /> - <_EmccCommonFlags Include="-msimd128" Condition="'$(WasmSIMD)' == 'true'" /> + <_EmccCommonFlags Include="-s DISABLE_EXCEPTION_CATCHING=0" Condition="'$(WasmEnableExceptionHandling)' == 'false'" /> + <_EmccCommonFlags Include="-fwasm-exceptions" Condition="'$(WasmEnableExceptionHandling)' == 'true'" /> + <_EmccCommonFlags Include="-msimd128" Condition="'$(WasmEnableSIMD)' == 'true'" /> <_EmccIncludePaths Include="$(_WasmIntermediateOutputPath.TrimEnd('\/'))" /> <_EmccIncludePaths Include="$(_WasmRuntimePackIncludeDir)mono-2.0" /> @@ -367,10 +370,10 @@ - <_WasmEHLib Condition="'$(WasmExceptionHandling)' == 'true'">libmono-wasm-eh-wasm.a - <_WasmEHLib Condition="'$(WasmExceptionHandling)' != 'true'">libmono-wasm-eh-js.a - <_WasmEHLibToExclude Condition="'$(WasmExceptionHandling)' == 'true'">libmono-wasm-eh-js.a - <_WasmEHLibToExclude Condition="'$(WasmExceptionHandling)' != 'true'">libmono-wasm-eh-wasm.a + <_WasmEHLib Condition="'$(WasmEnableExceptionHandling)' == 'true'">libmono-wasm-eh-wasm.a + <_WasmEHLib Condition="'$(WasmEnableExceptionHandling)' != 'true'">libmono-wasm-eh-js.a + <_WasmEHLibToExclude Condition="'$(WasmEnableExceptionHandling)' == 'true'">libmono-wasm-eh-js.a + <_WasmEHLibToExclude Condition="'$(WasmEnableExceptionHandling)' != 'true'">libmono-wasm-eh-wasm.a @@ -524,8 +527,8 @@ - - + + diff --git a/src/mono/wasm/build/WasmApp.targets b/src/mono/wasm/build/WasmApp.targets index 50f220060b0e0..641ee99b1906c 100644 --- a/src/mono/wasm/build/WasmApp.targets +++ b/src/mono/wasm/build/WasmApp.targets @@ -65,8 +65,8 @@ - $(RunAOTCompilationAfterBuild) - Run AOT compilation even after Build. By default, it is run only for publish. Defaults to false. - $(WasmAotProfilePath) - Path to an AOT profile file. - - $(WasmExceptionHandling) - Enable support for the WASM Exception Handling feature. - - $(WasmSIMD) - Enable support for the WASM SIMD feature. + - $(WasmEnableExceptionHandling) - Enable support for the WASM Exception Handling feature. + - $(WasmEnableSIMD) - Enable support for the WASM SIMD feature. Public items: - @(WasmExtraFilesToDeploy) - Files to copy to $(WasmAppDir). @@ -86,8 +86,8 @@ false - false - false + false + false diff --git a/src/native/libs/System.Native/entrypoints.c b/src/native/libs/System.Native/entrypoints.c index a9ae62ce446f4..e254f526a1a43 100644 --- a/src/native/libs/System.Native/entrypoints.c +++ b/src/native/libs/System.Native/entrypoints.c @@ -245,6 +245,7 @@ static const Entry s_sysNative[] = DllImportEntry(SystemNative_UTimensat) DllImportEntry(SystemNative_FUTimens) DllImportEntry(SystemNative_GetTimestamp) + DllImportEntry(SystemNative_GetBootTimeTicks) DllImportEntry(SystemNative_GetCpuUtilization) DllImportEntry(SystemNative_GetPwUidR) DllImportEntry(SystemNative_GetPwNamR) diff --git a/src/native/libs/System.Native/pal_time.c b/src/native/libs/System.Native/pal_time.c index 588277afdf592..4a7bbeca4d59b 100644 --- a/src/native/libs/System.Native/pal_time.c +++ b/src/native/libs/System.Native/pal_time.c @@ -18,9 +18,10 @@ enum { - SecondsToMicroSeconds = 1000000, // 10^6 - SecondsToNanoSeconds = 1000000000, // 10^9 - MicroSecondsToNanoSeconds = 1000 // 10^3 + MicroSecondsToNanoSeconds = 1000, // 10^3 + SecondsToNanoSeconds = 1000000000, // 10^9 + SecondsToTicks = 10000000, // 10^7 + TicksToNanoSeconds = 100, // 10^2 }; int32_t SystemNative_UTimensat(const char* path, TimeSpec* times) @@ -95,6 +96,29 @@ uint64_t SystemNative_GetTimestamp() #endif } +int64_t SystemNative_GetBootTimeTicks() +{ +#if defined(TARGET_LINUX) || defined(TARGET_ANDROID) + struct timespec ts; + + int result = clock_gettime(CLOCK_BOOTTIME, &ts); + assert(result == 0); // only possible errors are if the given clockId isn't supported or &ts is an invalid address + (void)result; // suppress unused parameter warning in release builds + + int64_t sinceBootTicks = ((int64_t)ts.tv_sec * SecondsToTicks) + (ts.tv_nsec / TicksToNanoSeconds); + + result = clock_gettime(CLOCK_REALTIME_COARSE, &ts); + assert(result == 0); + + int64_t sinceEpochTicks = ((int64_t)ts.tv_sec * SecondsToTicks) + (ts.tv_nsec / TicksToNanoSeconds); + const int64_t UnixEpochTicks = 621355968000000000; + + return UnixEpochTicks + sinceEpochTicks - sinceBootTicks; +#else + return -1; +#endif +} + double SystemNative_GetCpuUtilization(ProcessCpuInformation* previousCpuInfo) { uint64_t kernelTime = 0; diff --git a/src/native/libs/System.Native/pal_time.h b/src/native/libs/System.Native/pal_time.h index 8660902f4761f..26db2c20c102e 100644 --- a/src/native/libs/System.Native/pal_time.h +++ b/src/native/libs/System.Native/pal_time.h @@ -39,6 +39,11 @@ PALEXPORT int32_t SystemNative_FUTimens(intptr_t fd, TimeSpec* times); */ PALEXPORT uint64_t SystemNative_GetTimestamp(void); +/** + * Gets system boot time ticks. (Linux only) + */ +PALEXPORT int64_t SystemNative_GetBootTimeTicks(void); + /** * The main purpose of this function is to compute the overall CPU utilization * for the CLR thread pool to regulate the number of worker threads. diff --git a/src/tasks/AndroidAppBuilder/Templates/MonoRunner.java b/src/tasks/AndroidAppBuilder/Templates/MonoRunner.java index 4bd5a68d41623..bd91f93f6b760 100644 --- a/src/tasks/AndroidAppBuilder/Templates/MonoRunner.java +++ b/src/tasks/AndroidAppBuilder/Templates/MonoRunner.java @@ -24,6 +24,7 @@ import java.io.OutputStream; import java.io.BufferedInputStream; import java.util.ArrayList; +import java.util.Calendar; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.time.OffsetDateTime; @@ -90,7 +91,7 @@ public static int initialize(String entryPointLibName, String[] args, Context co unzipAssets(context, filesDir, "assets.zip"); Log.i("DOTNET", "MonoRunner initialize,, entryPointLibName=" + entryPointLibName); - int localDateTimeOffset = OffsetDateTime.now().getOffset().getTotalSeconds(); + int localDateTimeOffset = getLocalDateTimeOffset(); return initRuntime(filesDir, cacheDir, testResultsDir, entryPointLibName, args, localDateTimeOffset); } @@ -152,6 +153,15 @@ static void unzipAssets(Context context, String toPath, String zipName) { } } + static int getLocalDateTimeOffset() { + if (android.os.Build.VERSION.SDK_INT >= 26) { + return OffsetDateTime.now().getOffset().getTotalSeconds(); + } else { + int offsetInMillis = Calendar.getInstance().getTimeZone().getRawOffset(); + return offsetInMillis / 1000; + } + } + static native int initRuntime(String libsDir, String cacheDir, String testResultsDir, String entryPointLibName, String[] args, int local_date_time_offset); static native int setEnv(String key, String value); diff --git a/src/tests/Common/CLRTest.Execute.Bash.targets b/src/tests/Common/CLRTest.Execute.Bash.targets index ac0a86a8699a5..1c20d1e730c81 100644 --- a/src/tests/Common/CLRTest.Execute.Bash.targets +++ b/src/tests/Common/CLRTest.Execute.Bash.targets @@ -53,7 +53,7 @@ WARNING: When setting properties based on their current state (for example: + DependsOnTargets="$(BashScriptSnippetGen);GetIlasmRoundTripBashScript;GetSuperPMICollectionBashScript"> @@ -534,6 +534,7 @@ $(BashCLRTestEnvironmentCompatibilityCheck) $(BashCLRTestArgPrep) $(BashCLRTestExitCodePrep) $(IlasmRoundTripBashScript) +$(SuperPMICollectionBashScript) # Allow precommands to override the ExePath ExePath=$(InputAssemblyName) export TestExclusionListPath=$CORE_ROOT/TestExclusionList.txt diff --git a/src/tests/Common/CLRTest.Execute.Batch.targets b/src/tests/Common/CLRTest.Execute.Batch.targets index 2907e65e394f5..04c9e017d7e51 100644 --- a/src/tests/Common/CLRTest.Execute.Batch.targets +++ b/src/tests/Common/CLRTest.Execute.Batch.targets @@ -52,7 +52,7 @@ WARNING: When setting properties based on their current state (for example: + DependsOnTargets="$(BatchScriptSnippetGen);GetIlasmRoundTripBatchScript;GetSuperPMICollectionBatchScript"> @@ -434,6 +434,8 @@ $(BatchCLRTestEnvironmentCompatibilityCheck) $(IlasmRoundTripBatchScript) +$(SuperPMICollectionBatchScript) + REM Allow precommands to override the ExePath set ExePath=$(InputAssemblyName) set TestExclusionListPath=%CORE_ROOT%\TestExclusionList.txt diff --git a/src/tests/Common/CLRTest.Jit.targets b/src/tests/Common/CLRTest.Jit.targets index f4cfed6594cf9..df93c54cd7e4d 100644 --- a/src/tests/Common/CLRTest.Jit.targets +++ b/src/tests/Common/CLRTest.Jit.targets @@ -187,6 +187,72 @@ IF NOT DEFINED DoLink ( + + + + + + + + + + + + + + + + + + + + @@ -44,7 +100,8 @@ RuntimeVariant=$(_RuntimeVariant); BundledNETCoreAppPackageVersion=$(BundledNETCoreAppPackageVersion); HelixRuntimeRid=$(HelixRuntimeRid); - PALTestsDir=$(_PALTestsDir) + PALTestsDir=$(_PALTestsDir); + SuperPmiCollect=$(_SuperPmiCollect) <_PropertiesToPass Condition="'$(TargetOS)' == 'Browser' Or '$(TargetsAndroid)' == 'true'"> @@ -67,7 +124,7 @@ - + @@ -230,6 +287,13 @@ + + + + <_SuperPmiScriptsFiles Include="$(RepoRoot)src\coreclr\scripts\*.py" /> + + + @@ -298,6 +362,8 @@ <_MergedWrapperParentDirectory>$([System.IO.Path]::GetDirectoryName('$(_MergedWrapperDirectory)')) <_MergedWrapperName>%(_MergedWrapperRunScript.FileName) <_MergedWrapperRunScriptRelative Condition="'%(_MergedWrapperRunScript.Identity)' != ''">$([System.IO.Path]::GetRelativePath($(TestBinDir), %(_MergedWrapperRunScript.FullPath))) + + <_MergedWrapperRunScriptPrefix Condition="'$(TestWrapperTargetsWindows)' == 'true'">call <_MergedWrapperOutOfProcessTestMarkers Include="$(_MergedWrapperParentDirectory)/**/*.OutOfProcessTest" /> @@ -327,8 +393,8 @@ - - + + @@ -502,12 +568,26 @@ <_XUnitParallelMode Condition=" '$(LongRunningGCTests)' == 'true' ">none <_XUnitParallelMode Condition=" '$(GcSimulatorTests)' == 'true' ">none -parallel $(_XUnitParallelMode) -nocolor -noshadow -xml testResults.xml + false + + + + + $(BUILD_SOURCESDIRECTORY)\artifacts\helixresults + + + + $(BUILD_SOURCESDIRECTORY)/artifacts/helixresults + + - + @@ -526,10 +606,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + @@ -547,6 +654,27 @@ + + + + + + + + + + + + + + + + + + + + + @(HelixPreCommand) @(HelixPostCommand) @@ -616,12 +744,14 @@ dotnet $(XUnitRunnerDll) %(XUnitWrapperDlls) $(XUnitRunnerArgs) dotnet $(XUnitRunnerDll) %(XUnitWrapperDlls) $(XUnitRunnerArgs) -trait TestGroup=%(TestGroup) $([System.TimeSpan]::FromMinutes($(TimeoutPerTestCollectionInMinutes))) + coreclr_tests.run.$(TargetOS).$(TargetArchitecture).$(Configuration).mch;coreclr_tests.run.$(TargetOS).$(TargetArchitecture).$(Configuration).log %(PayloadDirectory) %(MergedTestHelixCommand) $([System.TimeSpan]::FromMinutes($(TimeoutPerTestCollectionInMinutes))) + coreclr_tests.run.$(TargetOS).$(TargetArchitecture).$(Configuration).mch;coreclr_tests.run.$(TargetOS).$(TargetArchitecture).$(Configuration).log diff --git a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs index f899919ef9dab..8fcdabc5d6df6 100644 --- a/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs +++ b/src/tests/Interop/ObjectiveC/ObjectiveCMarshalAPI/Program.cs @@ -7,6 +7,7 @@ namespace ObjectiveCMarshalAPI using System.Collections.Generic; using System.Diagnostics; using System.Reflection; + using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.InteropServices.ObjectiveC; @@ -140,6 +141,7 @@ static void InitializeObjectiveCMarshal() ObjectiveCMarshal.Initialize(beginEndCallback, isReferencedCallback, trackedObjectEnteredFinalization, OnUnhandledExceptionPropagationHandler); } + [MethodImpl(MethodImplOptions.NoInlining)] static GCHandle AllocAndTrackObject(uint count) where T : Base, new() { var obj = new T(); diff --git a/src/tests/JIT/Directed/arglist/vararg.cs b/src/tests/JIT/Directed/arglist/vararg.cs index ccc4db38c8af7..1292f6a161dcd 100644 --- a/src/tests/JIT/Directed/arglist/vararg.cs +++ b/src/tests/JIT/Directed/arglist/vararg.cs @@ -4441,6 +4441,26 @@ static bool TestEchoFourDoubleStructManagedViaAddress() return equal; } + // Miscellaneous tests + + [MethodImpl(MethodImplOptions.NoInlining)] + static bool TestEchoFourDoubleStructViaParameterAssign() + { + FourDoubleStruct arg = new FourDoubleStruct(); + arg.a = 1.0; + arg.b = 2.0; + arg.c = 3.0; + arg.d = 4.0; + + FourDoubleStruct returnValue = ManagedNativeVarargTests.TestEchoFourDoubleStructViaParameterAssign(arg, __arglist()); + bool equal = arg.a == returnValue.a && + arg.b == returnValue.b && + arg.c == returnValue.c && + arg.d == returnValue.d; + + return equal; + } + //////////////////////////////////////////////////////////////////////// // Report Failure //////////////////////////////////////////////////////////////////////// @@ -5065,6 +5085,9 @@ static int Main(string[] args) // Parameter address tests success = ReportFailure(TestEchoFourDoubleStructManagedViaAddress(), "TestEchoFourDoubleStructManagedViaAddress()", success, 155); + // Miscellaneous tests + success = ReportFailure(TestEchoFourDoubleStructViaParameterAssign(), "TestEchoFourDoubleStructViaParameterAssign()", success, 156); + printf("\n", __arglist()); printf("%d Tests run. %d Passed, %d Failed.\n", __arglist(m_testCount, m_passCount, m_failCount)); diff --git a/src/tests/JIT/Directed/arglist/varargmanaged.cs b/src/tests/JIT/Directed/arglist/varargmanaged.cs index 3ec476c54e393..1b94046a11f52 100644 --- a/src/tests/JIT/Directed/arglist/varargmanaged.cs +++ b/src/tests/JIT/Directed/arglist/varargmanaged.cs @@ -1331,5 +1331,31 @@ private static FourDoubleStruct NewFourDoubleStructViaAddress(ref double a, ref { return new FourDoubleStruct { a = a, b = b, c = c, d = d }; } + + // Miscellaneous tests + + [MethodImpl(MethodImplOptions.NoInlining)] + public static FourDoubleStruct TestEchoFourDoubleStructViaParameterAssign(FourDoubleStruct a, __arglist) + { + // Tests that a multi-reg return from an inline candidate can be assigned successfully to a by-reference + // parameter on Windows ARM64. + a = ReturnDoubleStructInlineCandidate(a); + + return a; + } + + private static FourDoubleStruct ReturnDoubleStructInlineCandidate(FourDoubleStruct a) + { + [MethodImpl(MethodImplOptions.NoInlining)] + static void Call() { } + + Call(); + Call(); + Call(); + Call(); + Call(); + + return a; + } } } diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_34170/Runtime_34170.cs b/src/tests/JIT/Regression/JitBlue/Runtime_34170/Runtime_34170.cs index 19fd90aff05ef..622bbc1d04fd2 100644 --- a/src/tests/JIT/Regression/JitBlue/Runtime_34170/Runtime_34170.cs +++ b/src/tests/JIT/Regression/JitBlue/Runtime_34170/Runtime_34170.cs @@ -21,7 +21,7 @@ public FloatNonAlignedFieldWithSmallOffset(float a) [StructLayout(LayoutKind.Explicit)] internal struct FloatNonAlignedFieldWithLargeOffset { - [FieldOffset(1021)] + [FieldOffset(0x10001)] public float field; public FloatNonAlignedFieldWithLargeOffset(float a) @@ -45,7 +45,7 @@ public DoubleNonAlignedFieldWithSmallOffset(float a) [StructLayout(LayoutKind.Explicit)] internal struct DoubleNonAlignedFieldWithLargeOffset { - [FieldOffset(1021)] + [FieldOffset(0x10001)] public double field; public DoubleNonAlignedFieldWithLargeOffset(float a) diff --git a/src/tests/issues.targets b/src/tests/issues.targets index ac3e9975606fd..f9351841eeb87 100644 --- a/src/tests/issues.targets +++ b/src/tests/issues.targets @@ -562,6 +562,9 @@ https://github.com/dotnet/runtime/issues/60152 + + https://github.com/dotnet/runtime/issues/74631 +